Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/ci/job_artifact_file_spec.rb18
-rw-r--r--spec/lib/api/entities/ci/job_request/dependency_spec.rb27
-rw-r--r--spec/lib/api/entities/user_spec.rb57
-rw-r--r--spec/lib/api/entities/wiki_page_spec.rb56
-rw-r--r--spec/lib/api/helpers_spec.rb20
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb22
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb2
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb23
-rw-r--r--spec/lib/atlassian/jira_connect_spec.rb29
-rw-r--r--spec/lib/backup/artifacts_spec.rb2
-rw-r--r--spec/lib/backup/database_spec.rb47
-rw-r--r--spec/lib/backup/files_spec.rb38
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb120
-rw-r--r--spec/lib/backup/gitaly_rpc_backup_spec.rb23
-rw-r--r--spec/lib/backup/lfs_spec.rb2
-rw-r--r--spec/lib/backup/manager_spec.rb844
-rw-r--r--spec/lib/backup/object_backup_spec.rb2
-rw-r--r--spec/lib/backup/pages_spec.rb2
-rw-r--r--spec/lib/backup/repositories_spec.rb53
-rw-r--r--spec/lib/backup/task_spec.rb27
-rw-r--r--spec/lib/backup/uploads_spec.rb2
-rw-r--r--spec/lib/banzai/filter/front_matter_filter_spec.rb53
-rw-r--r--spec/lib/banzai/filter/image_link_filter_spec.rb62
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/reference_redactor_filter_spec.rb3
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb19
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/task_list_filter_spec.rb13
-rw-r--r--spec/lib/banzai/reference_redactor_spec.rb3
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb2
-rw-r--r--spec/lib/container_registry/client_spec.rb100
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb52
-rw-r--r--spec/lib/container_registry/registry_spec.rb7
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/access_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/ldap/authentication_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/o_auth/provider_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb132
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/job_coordinator_spec.rb60
-rw-r--r--spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb49
-rw-r--r--spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb37
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/policy_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/trigger_spec.rb92
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb702
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb725
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb494
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb63
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb75
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/variables/builder/group_spec.rb209
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb100
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/dag_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb38
-rw-r--r--spec/lib/gitlab/color_spec.rb132
-rw-r--r--spec/lib/gitlab/config/entry/validators_spec.rb43
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb15
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb190
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb112
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb23
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb10
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb82
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb7
-rw-r--r--spec/lib/gitlab/database/load_balancing/setup_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb561
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_details_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb18
-rw-r--r--spec/lib/gitlab/database/migrations/test_background_runner_spec.rb120
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb14
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb17
-rw-r--r--spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb161
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb20
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb67
-rw-r--r--spec/lib/gitlab/database/type/color_spec.rb41
-rw-r--r--spec/lib/gitlab/database_spec.rb71
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb44
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb107
-rw-r--r--spec/lib/gitlab/email/attachment_uploader_spec.rb24
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb31
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb15
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb34
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb142
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb126
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb131
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb3
-rw-r--r--spec/lib/gitlab/etag_caching/router/rails_spec.rb (renamed from spec/lib/gitlab/etag_caching/router/restful_spec.rb)8
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb2
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb19
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb2
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb2
-rw-r--r--spec/lib/gitlab/fips_spec.rb51
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb23
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb16
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb32
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb163
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb42
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb9
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb64
-rw-r--r--spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb48
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/mount_mutation_spec.rb8
-rw-r--r--spec/lib/gitlab/harbor/client_spec.rb28
-rw-r--r--spec/lib/gitlab/health_checks/db_check_spec.rb17
-rw-r--r--spec/lib/gitlab/highlight_spec.rb76
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml8
-rw-r--r--spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb132
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb25
-rw-r--r--spec/lib/gitlab/import_export/group/object_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb282
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb22
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb32
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb114
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb88
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb45
-rw-r--r--spec/lib/gitlab/mail_room/authenticator_spec.rb20
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb184
-rw-r--r--spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb88
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb4
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb18
-rw-r--r--spec/lib/gitlab/metrics/dashboard/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/null_request_store_spec.rb2
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb197
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb2
-rw-r--r--spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb28
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb5
-rw-r--r--spec/lib/gitlab/process_supervisor_spec.rb170
-rw-r--r--spec/lib/gitlab/profiler_spec.rb24
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb15
-rw-r--r--spec/lib/gitlab/runtime_spec.rb20
-rw-r--r--spec/lib/gitlab/safe_request_loader_spec.rb180
-rw-r--r--spec/lib/gitlab/safe_request_store_spec.rb4
-rw-r--r--spec/lib/gitlab/sanitizers/exif_spec.rb118
-rw-r--r--spec/lib/gitlab/seeder_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb86
-rw-r--r--spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb40
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb104
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb49
-rw-r--r--spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb222
-rw-r--r--spec/lib/gitlab/usage_counters/pod_logs_spec.rb7
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb7
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb63
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb13
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb30
-rw-r--r--spec/lib/gitlab/utils_spec.rb71
-rw-r--r--spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb29
-rw-r--r--spec/lib/gitlab_spec.rb119
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb16
-rw-r--r--spec/lib/learn_gitlab/onboarding_spec.rb2
-rw-r--r--spec/lib/learn_gitlab/project_spec.rb3
-rw-r--r--spec/lib/peek/views/active_record_spec.rb12
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb8
-rw-r--r--spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb120
-rw-r--r--spec/lib/serializers/unsafe_json_spec.rb27
-rw-r--r--spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb21
-rw-r--r--spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb10
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb35
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb12
-rw-r--r--spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb8
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb21
-rw-r--r--spec/lib/sidebars/projects/menus/project_information_menu_spec.rb6
205 files changed, 8925 insertions, 2835 deletions
diff --git a/spec/lib/api/entities/ci/job_artifact_file_spec.rb b/spec/lib/api/entities/ci/job_artifact_file_spec.rb
new file mode 100644
index 00000000000..9e4ec272518
--- /dev/null
+++ b/spec/lib/api/entities/ci/job_artifact_file_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::JobArtifactFile do
+ let(:artifact_file) { instance_double(JobArtifactUploader, filename: 'ci_build_artifacts.zip', cached_size: 42) }
+ let(:entity) { described_class.new(artifact_file) }
+
+ subject { entity.as_json }
+
+ it 'returns the filename' do
+ expect(subject[:filename]).to eq('ci_build_artifacts.zip')
+ end
+
+ it 'returns the size' do
+ expect(subject[:size]).to eq(42)
+ end
+end
diff --git a/spec/lib/api/entities/ci/job_request/dependency_spec.rb b/spec/lib/api/entities/ci/job_request/dependency_spec.rb
new file mode 100644
index 00000000000..fa5f3da554c
--- /dev/null
+++ b/spec/lib/api/entities/ci/job_request/dependency_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::JobRequest::Dependency do
+ let(:job) { create(:ci_build, :artifacts) }
+ let(:entity) { described_class.new(job) }
+
+ subject { entity.as_json }
+
+ it 'returns the dependency id' do
+ expect(subject[:id]).to eq(job.id)
+ end
+
+ it 'returns the dependency name' do
+ expect(subject[:name]).to eq(job.name)
+ end
+
+ it 'returns the dependency token' do
+ expect(subject[:token]).to eq(job.token)
+ end
+
+ it 'returns the dependency artifacts_file', :aggregate_failures do
+ expect(subject[:artifacts_file][:filename]).to eq('ci_build_artifacts.zip')
+ expect(subject[:artifacts_file][:size]).to eq(job.artifacts_size)
+ end
+end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 14dc60e1a5f..be5e8e8e8c2 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -78,6 +78,63 @@ RSpec.describe API::Entities::User do
end
end
+ context 'with group bot user' do
+ let(:group) { create(:group) }
+ let(:user) { create(:user, :project_bot, name: 'group bot') }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'exposes user as a bot' do
+ expect(subject[:bot]).to eq(true)
+ end
+
+ context 'when the requester is not a group member' do
+ context 'with a public group' do
+ it 'exposes group bot user name' do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+
+ context 'with a private group' do
+ let(:group) { create(:group, :private) }
+
+ it 'does not expose group bot user name' do
+ expect(subject[:name]).to eq('****')
+ end
+ end
+ end
+
+ context 'when the requester is nil' do
+ let(:current_user) { nil }
+
+ it 'does not expose group bot user name' do
+ expect(subject[:name]).to eq('****')
+ end
+ end
+
+ context 'when the requester is a group maintainer' do
+ let(:current_user) { create(:user) }
+
+ before do
+ group.add_maintainer(current_user)
+ end
+
+ it 'exposes group bot user name' do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+
+ context 'when the requester is an admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ it 'exposes group bot user name', :enable_admin_mode do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+ end
+
it 'exposes local_time' do
local_time = '2:30 PM'
expect(entity).to receive(:local_time).with(timezone).and_return(local_time)
diff --git a/spec/lib/api/entities/wiki_page_spec.rb b/spec/lib/api/entities/wiki_page_spec.rb
new file mode 100644
index 00000000000..238c8233a14
--- /dev/null
+++ b/spec/lib/api/entities/wiki_page_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::WikiPage do
+ let_it_be_with_reload(:wiki_page) { create(:wiki_page) }
+
+ let(:params) { {} }
+ let(:entity) { described_class.new(wiki_page, params) }
+
+ subject { entity.as_json }
+
+ it 'returns the proper encoding for the wiki page content' do
+ expect(entity.as_json[:encoding]).to eq 'UTF-8'
+
+ wiki_page.update_attributes(content: 'new_content'.encode('ISO-8859-1')) # rubocop:disable Rails/ActiveRecordAliases, Rails/SaveBang
+
+ expect(entity.as_json[:encoding]).to eq 'ISO-8859-1'
+ end
+
+ it 'returns the raw wiki page content' do
+ expect(subject[:content]).to eq wiki_page.content
+ end
+
+ context 'when render_html param is passed' do
+ context 'when it is true' do
+ let(:params) { { render_html: true } }
+
+ it 'returns the wiki page content rendered' do
+ expect(subject[:content]).to eq "<p data-sourcepos=\"1:1-1:#{wiki_page.content.size}\" dir=\"auto\">#{wiki_page.content}</p>"
+ end
+
+ it 'includes the wiki page version in the render context' do
+ expect(entity).to receive(:render_wiki_content).with(anything, hash_including(ref: wiki_page.version.id)).and_call_original
+
+ subject[:content]
+ end
+
+ context 'when page is an Ascii document' do
+ let(:wiki_page) { create(:wiki_page, content: "*Test* _content_", format: :asciidoc) }
+
+ it 'renders the page without errors' do
+ expect(subject[:content]).to eq("<div>&#x000A;<p><strong>Test</strong> <em>content</em></p>&#x000A;</div>")
+ end
+ end
+ end
+
+ context 'when it is false' do
+ let(:params) { { render_html: false } }
+
+ it 'returns the raw wiki page content' do
+ expect(subject[:content]).to eq wiki_page.content
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index b2d4a3094af..2afe5a1a9d7 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -109,6 +109,26 @@ RSpec.describe API::Helpers do
end
end
end
+
+ context 'when project is pending delete' do
+ let(:project_pending_delete) { create(:project, pending_delete: true) }
+
+ it 'does not return the project pending delete' do
+ expect(Project).not_to receive(:find_by_full_path)
+
+ expect(subject.find_project(project_pending_delete.id)).to be_nil
+ end
+ end
+
+ context 'when project is hidden' do
+ let(:hidden_project) { create(:project, :hidden) }
+
+ it 'does not return the hidden project' do
+ expect(Project).not_to receive(:find_by_full_path)
+
+ expect(subject.find_project(hidden_project.id)).to be_nil
+ end
+ end
end
describe '#find_project!' do
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index 9201d1c5dcb..dd3130c78bf 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -127,11 +127,19 @@ RSpec.describe Atlassian::JiraConnect::Client do
end
end
+ context 'the response is 202 accepted' do
+ let(:response) { double(code: 202, parsed_response: :foo) }
+
+ it 'yields to the block' do
+ expect(processed).to eq [:data, :foo]
+ end
+ end
+
context 'the response is 400 bad request' do
let(:response) { double(code: 400, parsed_response: errors) }
it 'extracts the errors messages' do
- expect(processed).to eq('errorMessages' => %w(X Y))
+ expect(processed).to eq('errorMessages' => %w(X Y), 'responseCode' => 400)
end
end
@@ -139,7 +147,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 401, parsed_response: nil) }
it 'reports that our JWT is wrong' do
- expect(processed).to eq('errorMessages' => ['Invalid JWT'])
+ expect(processed).to eq('errorMessages' => ['Invalid JWT'], 'responseCode' => 401)
end
end
@@ -147,7 +155,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 403, parsed_response: nil) }
it 'reports that the App is misconfigured' do
- expect(processed).to eq('errorMessages' => ['App does not support foo'])
+ expect(processed).to eq('errorMessages' => ['App does not support foo'], 'responseCode' => 403)
end
end
@@ -155,7 +163,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 413, parsed_response: errors) }
it 'extracts the errors messages' do
- expect(processed).to eq('errorMessages' => ['Data too large', 'X', 'Y'])
+ expect(processed).to eq('errorMessages' => ['Data too large', 'X', 'Y'], 'responseCode' => 413)
end
end
@@ -163,7 +171,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 429, parsed_response: nil) }
it 'reports that we exceeded the rate limit' do
- expect(processed).to eq('errorMessages' => ['Rate limit exceeded'])
+ expect(processed).to eq('errorMessages' => ['Rate limit exceeded'], 'responseCode' => 429)
end
end
@@ -171,7 +179,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 503, parsed_response: nil) }
it 'reports that the service is unavailable' do
- expect(processed).to eq('errorMessages' => ['Service unavailable'])
+ expect(processed).to eq('errorMessages' => ['Service unavailable'], 'responseCode' => 503)
end
end
@@ -179,7 +187,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 1000, parsed_response: :something) }
it 'reports that this was unanticipated' do
- expect(processed).to eq('errorMessages' => ['Unknown error'], 'response' => :something)
+ expect(processed).to eq('errorMessages' => ['Unknown error'], 'responseCode' => 1000, 'response' => :something)
end
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
index 4bbd654655d..a29f32d35b8 100644
--- a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity do
context 'when the pipeline does belong to a Jira issue' do
let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
- %i[jira_branch jira_title].each do |trait|
+ %i[jira_branch jira_title jira_description].each do |trait|
context "because it belongs to an MR with a #{trait}" do
let(:merge_request) { create(:merge_request, trait) }
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
index 8ccc3253a46..40b9e83719b 100644
--- a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -45,33 +45,18 @@ RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity do
describe 'environment type' do
using RSpec::Parameterized::TableSyntax
- where(:env_name, :env_type) do
- 'PRODUCTION' | 'production'
- 'prod' | 'production'
- 'prod-east-2' | 'production'
- 'us-prod-east' | 'production'
- 'fe-production' | 'production'
- 'test' | 'testing'
- 'qa-env-2' | 'testing'
- 'staging' | 'staging'
- 'pre-prod' | 'staging'
- 'blue-kit-stage' | 'staging'
- 'pre-prod' | 'staging'
- 'dev' | 'development'
- 'review/app' | 'development'
- 'something-else' | 'unmapped'
- 'store-produce' | 'unmapped'
- 'unproductive' | 'unmapped'
+ where(:tier, :env_type) do
+ 'other' | 'unmapped'
end
with_them do
before do
- environment.update!(name: env_name)
+ subject.environment.update!(tier: tier)
end
let(:exposed_type) { subject.send(:environment_entity).send(:type) }
- it 'has the correct environment type' do
+ it 'has the same type as the environment tier' do
expect(exposed_type).to eq(env_type)
end
end
diff --git a/spec/lib/atlassian/jira_connect_spec.rb b/spec/lib/atlassian/jira_connect_spec.rb
new file mode 100644
index 00000000000..d9c34e938b4
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Atlassian::JiraConnect do
+ describe '.app_name' do
+ subject { described_class.app_name }
+
+ it { is_expected.to eq('GitLab for Jira (localhost)') }
+ end
+
+ describe '.app_key' do
+ subject(:app_key) { described_class.app_key }
+
+ it { is_expected.to eq('gitlab-jira-connect-localhost') }
+
+ context 'host name is too long' do
+ before do
+ hostname = 'x' * 100
+
+ stub_config(gitlab: { host: hostname })
+ end
+
+ it 'truncates the key to be no longer than 64 characters', :aggregate_failures do
+ expect(app_key).to eq('gitlab-jira-connect-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/artifacts_spec.rb b/spec/lib/backup/artifacts_spec.rb
index e65dc79b65b..d830692d96b 100644
--- a/spec/lib/backup/artifacts_spec.rb
+++ b/spec/lib/backup/artifacts_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Backup::Artifacts do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('artifacts.tar.gz')
end
end
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 4345778ba92..53db7f0f149 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -6,25 +6,49 @@ RSpec.describe Backup::Database do
let(:progress) { StringIO.new }
let(:output) { progress.string }
- before do
- allow(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ before(:all) do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/gitlab/backup'
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/gitlab/db'
+ Rake.application.rake_require 'tasks/cache'
end
describe '#restore' do
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] }
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
+ let(:force) { true }
- subject { described_class.new(progress, filename: data) }
+ subject { described_class.new(progress, force: force) }
before do
allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
end
+ context 'when not forced' do
+ let(:force) { false }
+
+ it 'warns the user and waits' do
+ expect(subject).to receive(:sleep)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
+
+ expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
+ end
+
+ it 'has a pre restore warning' do
+ expect(subject.pre_restore_warning).not_to be_nil
+ end
+ end
+
context 'with an empty .gz file' do
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
it 'returns successfully' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]")
@@ -36,7 +60,9 @@ RSpec.describe Backup::Database do
let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s }
it 'raises a backup error' do
- expect { subject.restore }.to raise_error(Backup::Error)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ expect { subject.restore(data) }.to raise_error(Backup::Error)
end
end
@@ -45,12 +71,15 @@ RSpec.describe Backup::Database do
let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
- it 'filters out noise from errors' do
- subject.restore
+ it 'filters out noise from errors and has a post restore warning' do
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("ERRORS")
expect(output).not_to include(noise)
expect(output).to include(visible_error)
+ expect(subject.post_restore_warning).not_to be_nil
end
end
@@ -66,7 +95,9 @@ RSpec.describe Backup::Database do
end
it 'overrides default config values' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange"))
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 6bff0919293..bbc465a26c9 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
end
describe '#restore' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
let(:timestamp) { Time.utc(2017, 3, 22) }
@@ -58,11 +58,11 @@ RSpec.describe Backup::Files do
it 'moves all necessary files' do
allow(subject).to receive(:backup_existing_files).and_call_original
expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises no errors' do
- expect { subject.restore }.not_to raise_error
+ expect { subject.restore('registry.tar.gz') }.not_to raise_error
end
it 'calls tar command with unlink' do
@@ -70,13 +70,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises an error on failure' do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
- expect { subject.restore }.to raise_error(/Restore operation failed:/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/Restore operation failed:/)
end
end
@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do
it 'shows error message' do
expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
- subject.restore
+ subject.restore('registry.tar.gz')
end
end
@@ -104,13 +104,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
.and_call_original
- expect { subject.restore }.to raise_error(/is a mountpoint/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/is a mountpoint/)
end
end
end
describe '#dump' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
before do
allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
@@ -118,14 +118,14 @@ RSpec.describe Backup::Files do
end
it 'raises no errors' do
- expect { subject.dump }.not_to raise_error
+ expect { subject.dump('registry.tar.gz') }.not_to raise_error
end
it 'excludes tmp dirs from archive' do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args)
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'raises an error on failure' do
@@ -133,7 +133,7 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to raise_error(/Failed to create compressed file/)
end
@@ -149,7 +149,7 @@ RSpec.describe Backup::Files do
.with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['', 0])
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'retries if rsync fails due to vanishing files' do
@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 24], ['', 0])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/files vanished during rsync, retrying/).to_stdout
end
@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 1])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/rsync failed/).to_stdout
.and raise_error(/Failed to create compressed file/)
end
@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
end
describe '#exclude_dirs' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
it 'prepends a leading dot slash to tar excludes' do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
end
describe '#run_pipeline!' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'executes an Open3.pipeline for cmd_list' do
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
end
describe '#pipeline_succeeded?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if both tar and gzip succeeeded' do
expect(
@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
end
describe '#tar_ignore_non_success?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
end
describe '#noncritical_warning?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if given text matches noncritical warnings list' do
expect(
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 6bf4f833c1f..f5295c2b04c 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { nil }
let(:storage_parallelism) { nil }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
+ let(:backup_id) { '20220101' }
let(:progress) do
Tempfile.new('progress').tap do |progress|
@@ -23,11 +25,11 @@ RSpec.describe Backup::GitalyBackup do
progress.close
end
- subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism) }
+ subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism, backup_id: backup_id) }
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -40,9 +42,9 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -50,20 +52,20 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path, backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki', backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design', backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path, backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path, backup_id, '001.bundle'))
end
context 'parallel option set' do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -72,9 +74,9 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -82,9 +84,39 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:create)
+ subject.start(:create, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
+
+ context 'feature flag incremental_repository_backup disabled' do
+ before do
+ stub_feature_flags(incremental_repository_backup: false)
+ end
+
+ it 'creates repository bundles', :aggregate_failures do
+ # Add data to the wiki, design repositories, and snippets, so they will be included in the dump.
+ create(:wiki_page, container: project)
+ create(:design, :with_file, issue: create(:issue, project: project))
+ project_snippet = create(:project_snippet, :repository, project: project)
+ personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
+
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
+
+ subject.start(:create, destination)
+ subject.enqueue(project, Gitlab::GlRepository::PROJECT)
+ subject.enqueue(project, Gitlab::GlRepository::WIKI)
+ subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.finish!
+
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
+ end
+ end
end
context 'hashed storage' do
@@ -112,9 +144,9 @@ RSpec.describe Backup::GitalyBackup do
end
it 'passes through SSL envs' do
- expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -137,9 +169,9 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -149,20 +181,20 @@ RSpec.describe Backup::GitalyBackup do
collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
- expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
- expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
- expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
- expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
- expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1'])
+ expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to match_array(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
+ expect(collect_commit_shas.call(personal_snippet.repository)).to match_array(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
+ expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1'])
end
context 'parallel option set' do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3', '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
@@ -171,17 +203,49 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
+ context 'feature flag incremental_repository_backup disabled' do
+ before do
+ stub_feature_flags(incremental_repository_backup: false)
+ end
+
+ it 'restores from repository bundles', :aggregate_failures do
+ copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle')
+ copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle')
+ copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle')
+ copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
+ copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
+
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
+
+ subject.start(:restore, destination)
+ subject.enqueue(project, Gitlab::GlRepository::PROJECT)
+ subject.enqueue(project, Gitlab::GlRepository::WIKI)
+ subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.finish!
+
+ collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
+
+ expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to match_array(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
+ expect(collect_commit_shas.call(personal_snippet.repository)).to match_array(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
+ expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1'])
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:restore)
+ subject.start(:restore, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb
index 4829d51ac9d..6cba8c5c9b1 100644
--- a/spec/lib/backup/gitaly_rpc_backup_spec.rb
+++ b/spec/lib/backup/gitaly_rpc_backup_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyRpcBackup do
let(:progress) { spy(:stdout) }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
subject { described_class.new(progress) }
@@ -14,7 +15,7 @@ RSpec.describe Backup::GitalyRpcBackup do
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyRpcBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -35,11 +36,11 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end
context 'failure' do
@@ -50,7 +51,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
@@ -90,7 +91,7 @@ RSpec.describe Backup::GitalyRpcBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -123,7 +124,7 @@ RSpec.describe Backup::GitalyRpcBackup do
repository
end
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -141,7 +142,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
diff --git a/spec/lib/backup/lfs_spec.rb b/spec/lib/backup/lfs_spec.rb
index 6525019d9ac..a27f60f20d0 100644
--- a/spec/lib/backup/lfs_spec.rb
+++ b/spec/lib/backup/lfs_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Backup::Lfs do
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('lfs.tar.gz')
end
end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 9c186205067..9cf78a11bc7 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -6,16 +6,149 @@ RSpec.describe Backup::Manager do
include StubENV
let(:progress) { StringIO.new }
+ let(:definitions) { nil }
- subject { described_class.new(progress) }
+ subject { described_class.new(progress, definitions: definitions) }
before do
+ # Rspec fails with `uninitialized constant RSpec::Support::Differ` when it
+ # is trying to display a diff and `File.exist?` is stubbed. Adding a
+ # default stub fixes this.
+ allow(File).to receive(:exist?).and_call_original
+
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
end
- describe '#pack' do
- let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml) }
+ describe '#run_create_task' do
+ let(:enabled) { true }
+ let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+
+ it 'calls the named task' do
+ expect(task).to receive(:dump)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done')
+
+ subject.run_create_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+
+ describe 'skipped' do
+ it 'informs the user' do
+ stub_env('SKIP', 'my_task')
+
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[SKIPPED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+ end
+
+ describe '#run_restore_task' do
+ let(:enabled) { true }
+ let(:pre_restore_warning) { nil }
+ let(:post_restore_warning) { nil }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+ let(:backup_information) { {} }
+ let(:task) do
+ instance_double(Backup::Task,
+ human_name: 'my task',
+ enabled: enabled,
+ pre_restore_warning: pre_restore_warning,
+ post_restore_warning: post_restore_warning)
+ end
+
+ before do
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
+ end
+
+ it 'calls the named task' do
+ expect(task).to receive(:restore)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+
+ subject.run_restore_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]').ordered
+
+ subject.run_restore_task('my_task')
+ end
+ end
+
+ describe 'pre_restore_warning' do
+ let(:pre_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+
+ describe 'post_restore_warning' do
+ let(:post_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(task).to receive(:restore)
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} }
let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
@@ -26,21 +159,28 @@ RSpec.describe Backup::Manager do
}
end
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
+ end
+
before do
allow(ActiveRecord::Base.connection).to receive(:reconnect!)
allow(Kernel).to receive(:system).and_return(true)
- allow(YAML).to receive(:load_file).and_return(backup_information)
-
- ::Backup::Manager::FOLDERS_TO_BACKUP.each do |folder|
- allow(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, folder)).and_return(true)
- end
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
allow(subject).to receive(:backup_information).and_return(backup_information)
- allow(subject).to receive(:upload)
+ allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
@@ -50,247 +190,401 @@ RSpec.describe Backup::Manager do
it 'uses the given value as tar file name' do
stub_env('BACKUP', '/ignored/path/custom')
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
context 'when skipped is set in backup_information.yml' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: '12.3',
- skipped: ['repositories']
+ skipped: ['task2']
}
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- context 'when a directory does not exist' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
-
- before do
- expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false)
+ context 'when the destination is optional' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz', destination_optional: true)
+ }
end
it 'executes tar' do
- subject.pack
+ expect(File).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')).and_return(false)
+
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- end
- describe '#remove_tmp' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+ context 'many backup files' do
+ let(:files) do
+ [
+ '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
+ '1451510000_2015_12_30_gitlab_backup.tar',
+ '1450742400_2015_12_22_gitlab_backup.tar',
+ '1449878400_gitlab_backup.tar',
+ '1449014400_gitlab_backup.tar',
+ 'manual_gitlab_backup.tar'
+ ]
+ end
- before do
- allow(FileUtils).to receive(:rm_rf).and_return(true)
- end
+ before do
+ allow(Dir).to receive(:chdir).and_yield
+ allow(Dir).to receive(:glob).and_return(files)
+ allow(FileUtils).to receive(:rm)
+ allow(Time).to receive(:now).and_return(Time.utc(2016))
+ end
- it 'removes backups/tmp dir' do
- subject.remove_tmp
+ context 'when keep_time is zero' do
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
- expect(FileUtils).to have_received(:rm_rf).with(path)
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- it 'prints running task with a done confirmation' do
- subject.remove_tmp
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
- expect(progress).to have_received(:puts).with('done')
- end
- end
+ it 'prints a skipped message' do
+ expect(progress).to have_received(:puts).with('skipping')
+ end
+ end
- describe '#remove_old' do
- let(:files) do
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
- '1451510000_2015_12_30_gitlab_backup.tar',
- '1450742400_2015_12_22_gitlab_backup.tar',
- '1449878400_gitlab_backup.tar',
- '1449014400_gitlab_backup.tar',
- 'manual_gitlab_backup.tar'
- ]
- end
+ context 'when no valid file is found' do
+ let(:files) do
+ [
+ '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
+ 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ ]
+ end
- before do
- allow(Dir).to receive(:chdir).and_yield
- allow(Dir).to receive(:glob).and_return(files)
- allow(FileUtils).to receive(:rm)
- allow(Time).to receive(:now).and_return(Time.utc(2016))
- end
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- context 'when keep_time is zero' do
- before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- subject.remove_old
- end
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
end
- it 'prints a skipped message' do
- expect(progress).to have_received(:puts).with('skipping')
- end
- end
+ context 'when there are no files older than keep_time' do
+ before do
+ # Set to 30 days
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
- context 'when no valid file is found' do
- let(:files) do
- [
- '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
- 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
- ]
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- subject.remove_old
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
+ context 'when keep_time is set to remove files' do
+ before do
+ # Set to 1 second
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
- end
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- context 'when there are no files older than keep_time' do
- before do
- # Set to 30 days
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
+ it 'removes matching files with a human-readable versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[1])
+ expect(FileUtils).to have_received(:rm).with(files[2])
+ expect(FileUtils).to have_received(:rm).with(files[3])
+ end
- subject.remove_old
- end
+ it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
+ it 'removes matching files with a human-readable non-versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ end
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
- end
- end
+ it 'removes matching files without a human-readable timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
- context 'when keep_time is set to remove files' do
- before do
- # Set to 1 second
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ it 'does not remove files that are not old enough' do
+ expect(FileUtils).not_to have_received(:rm).with(files[0])
+ end
- subject.remove_old
- end
+ it 'does not remove non-matching files' do
+ expect(FileUtils).not_to have_received(:rm).with(files[9])
+ end
- it 'removes matching files with a human-readable versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[1])
- expect(FileUtils).to have_received(:rm).with(files[2])
- expect(FileUtils).to have_received(:rm).with(files[3])
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (8 removed)')
+ end
end
- it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- end
+ context 'when removing a file fails' do
+ let(:file) { files[1] }
+ let(:message) { "Permission denied @ unlink_internal - #{file}" }
- it 'removes matching files with a human-readable non-versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- end
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
- it 'removes matching files without a human-readable timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- it 'does not remove files that are not old enough' do
- expect(FileUtils).not_to have_received(:rm).with(files[0])
- end
+ it 'removes the remaining expected files' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
- it 'does not remove non-matching files' do
- expect(FileUtils).not_to have_received(:rm).with(files[9])
- end
+ it 'sets the correct removed count' do
+ expect(progress).to have_received(:puts).with('done. (7 removed)')
+ end
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (8 removed)')
+ it 'prints the error from file that could not be removed' do
+ expect(progress).to have_received(:puts).with(a_string_matching(message))
+ end
end
end
- context 'when removing a file fails' do
- let(:file) { files[1] }
- let(:message) { "Permission denied @ unlink_internal - #{file}" }
+ describe 'cloud storage' do
+ let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
+ let(:backup_filename) { File.basename(backup_file.path) }
before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
+ allow(subject).to receive(:tar_file).and_return(backup_filename)
- subject.remove_old
- end
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'id',
+ aws_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: 104857600,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
- it 'removes the remaining expected files' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
- end
+ Fog.mock!
- it 'sets the correct removed count' do
- expect(progress).to have_received(:puts).with('done. (7 removed)')
+ # the Fog mock only knows about directories we create explicitly
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
- it 'prints the error from file that could not be removed' do
- expect(progress).to have_received(:puts).with(a_string_matching(message))
+ context 'target path' do
+ it 'uses the tar filename by default' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: backup_filename, public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'adds the DIRECTORY environment variable if present' do
+ stub_env('DIRECTORY', 'daily')
+
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: "daily/#{backup_filename}", public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
end
- end
- end
- describe 'verify_backup_version' do
- context 'on version mismatch' do
- let(:gitlab_version) { Gitlab::VERSION }
+ context 'with AWS with server side encryption' do
+ let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
+ let(:encryption_key) { nil }
+ let(:encryption) { nil }
+ let(:storage_options) { nil }
+
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: encryption,
+ encryption_key: encryption_key,
+ storage_options: storage_options,
+ storage_class: nil
+ }
+ )
+
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
+
+ context 'with SSE-S3 without using storage_options' do
+ let(:encryption) { 'AES256' }
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-C (customer-provided keys) options' do
+ let(:encryption) { 'AES256' }
+ let(:encryption_key) { SecureRandom.hex }
- it 'stops the process' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "not #{gitlab_version}" })
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
- expect { subject.verify_backup_version }.to raise_error SystemExit
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-KMS options' do
+ let(:storage_options) do
+ {
+ server_side_encryption: 'aws:kms',
+ server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ }
+ end
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with aws:kms)")
+ end
+ end
end
- end
- context 'on version match' do
- let(:gitlab_version) { Gitlab::VERSION }
+ context 'with Google provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'Google',
+ google_storage_access_key_id: 'test-access-id',
+ google_storage_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
- it 'does nothing' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "#{gitlab_version}" })
+ it 'does not attempt to set ACL' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_excluding(public: false))
+ .and_call_original
- expect { subject.verify_backup_version }.not_to raise_error
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+ end
+
+ context 'with AzureRM provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'test-access-id',
+ azure_storage_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: nil,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+ end
+
+ it 'loads the provider' do
+ expect { subject.create }.not_to raise_error # rubocop:disable Rails/SaveBang
+ end
end
end
end
- describe '#unpack' do
+ describe '#restore' do
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
+ end
+
+ let(:gitlab_version) { Gitlab::VERSION }
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: gitlab_version
+ }
+ end
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/cache'
+
+ allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
+ allow(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
+ allow(Rake::Task['cache:clear']).to receive(:invoke)
+ end
+
context 'when there are no backup files in the directory' do
before do
allow(Dir).to receive(:glob).and_return([])
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('No backups found'))
end
@@ -307,13 +601,13 @@ RSpec.describe Backup::Manager do
end
it 'prints the list of available backups' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31'))
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('Found more than one backup'))
end
@@ -332,7 +626,7 @@ RSpec.describe Backup::Manager do
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar')
expect(progress).to have_received(:puts)
.with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist'))
@@ -348,17 +642,46 @@ RSpec.describe Backup::Manager do
)
allow(File).to receive(:exist?).and_return(true)
allow(Kernel).to receive(:system).and_return(true)
- allow(YAML).to receive(:load_file).and_return(gitlab_version: Gitlab::VERSION)
stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3')
end
it 'unpacks the file' do
- subject.unpack
+ subject.restore
expect(Kernel).to have_received(:system)
.with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar")
- expect(progress).to have_received(:puts).with(a_string_matching('done'))
+ end
+
+ context 'on version mismatch' do
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
+ }
+ end
+
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
+ end
+ end
+
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
+
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
+
+ subject.restore
+
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
@@ -375,184 +698,41 @@ RSpec.describe Backup::Manager do
it 'selects the non-tarred backup to restore from' do
expect(Kernel).not_to receive(:system)
- subject.unpack
+ subject.restore
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
end
- end
- end
-
- describe '#upload' do
- let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
- let(:backup_filename) { File.basename(backup_file.path) }
-
- before do
- allow(subject).to receive(:tar_file).and_return(backup_filename)
-
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'id',
- aws_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: 104857600,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
-
- Fog.mock!
-
- # the Fog mock only knows about directories we create explicitly
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
- context 'target path' do
- it 'uses the tar filename by default' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: backup_filename, public: false))
- .and_return(true)
-
- subject.upload
- end
-
- it 'adds the DIRECTORY environment variable if present' do
- stub_env('DIRECTORY', 'daily')
-
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: "daily/#{backup_filename}", public: false))
- .and_return(true)
-
- subject.upload
- end
- end
-
- context 'with AWS with server side encryption' do
- let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
- let(:encryption_key) { nil }
- let(:encryption) { nil }
- let(:storage_options) { nil }
-
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'AWS_ACCESS_KEY_ID',
- aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: encryption,
- encryption_key: encryption_key,
- storage_options: storage_options,
- storage_class: nil
- }
- )
-
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
-
- context 'with SSE-S3 without using storage_options' do
- let(:encryption) { 'AES256' }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('AES256')
- expect(result.encryption_key).to be_nil
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-C (customer-provided keys) options' do
- let(:encryption) { 'AES256' }
- let(:encryption_key) { SecureRandom.hex }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq(encryption)
- expect(result.encryption_key).to eq(encryption_key)
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-KMS options' do
- let(:storage_options) do
+ context 'on version mismatch' do
+ let(:backup_information) do
{
- server_side_encryption: 'aws:kms',
- server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
}
end
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('aws:kms')
- expect(result.kms_key_id).to eq('arn:aws:kms:12345')
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
end
end
- end
- context 'with Google provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'Google',
- google_storage_access_key_id: 'test-access-id',
- google_storage_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
-
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
- it 'does not attempt to set ACL' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_excluding(public: false))
- .and_return(true)
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
- subject.upload
- end
- end
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
- context 'with AzureRM provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AzureRM',
- azure_storage_account_name: 'test-access-id',
- azure_storage_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: nil,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
- end
+ subject.restore
- it 'loads the provider' do
- expect { subject.upload }.not_to raise_error
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
end
diff --git a/spec/lib/backup/object_backup_spec.rb b/spec/lib/backup/object_backup_spec.rb
index 4d34dc0ade7..85658173b0e 100644
--- a/spec/lib/backup/object_backup_spec.rb
+++ b/spec/lib/backup/object_backup_spec.rb
@@ -21,7 +21,7 @@ RSpec.shared_examples 'backup object' do |setting|
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('backup_object.tar.gz')
end
end
end
diff --git a/spec/lib/backup/pages_spec.rb b/spec/lib/backup/pages_spec.rb
index f9ee4bbdc41..095dda61cf4 100644
--- a/spec/lib/backup/pages_spec.rb
+++ b/spec/lib/backup/pages_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Pages do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.dump
+ subject.dump('pages.tar.gz')
end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 0b29a25360d..db3e507596f 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Backup::Repositories do
let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
let(:max_concurrency) { 1 }
let(:max_storage_concurrency) { 1 }
+ let(:destination) { 'repositories' }
subject do
described_class.new(
@@ -26,9 +27,9 @@ RSpec.describe Backup::Repositories do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.dump
+ subject.dump(destination)
- expect(strategy).to have_received(:start).with(:create)
+ expect(strategy).to have_received(:start).with(:create, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -54,38 +55,38 @@ RSpec.describe Backup::Repositories do
it 'creates the expected number of threads' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
describe 'command failure' do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -98,13 +99,13 @@ RSpec.describe Backup::Repositories do
it 'enqueues all projects sequentially' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -122,13 +123,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
context 'with extra max concurrency' do
@@ -139,13 +140,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -153,33 +154,33 @@ RSpec.describe Backup::Repositories do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
context 'misconfigured storages' do
let(:storage_keys) { %w[test_second_storage] }
it 'raises an error' do
- expect { subject.dump }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
+ expect { subject.dump(destination) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
end
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -192,9 +193,9 @@ RSpec.describe Backup::Repositories do
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do
- subject.restore
+ subject.restore(destination)
- expect(strategy).to have_received(:start).with(:restore)
+ expect(strategy).to have_received(:start).with(:restore, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -208,7 +209,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).not_to be_failed
@@ -219,7 +220,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).to be_obsolete
@@ -236,14 +237,14 @@ RSpec.describe Backup::Repositories do
end
it 'shows the appropriate error' do
- subject.restore
+ subject.restore(destination)
expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
end
it 'removes the snippets from the DB' do
- expect { subject.restore }.to change(PersonalSnippet, :count).by(-1)
+ expect { subject.restore(destination) }.to change(PersonalSnippet, :count).by(-1)
.and change(ProjectSnippet, :count).by(-1)
.and change(SnippetRepository, :count).by(-2)
end
@@ -253,7 +254,7 @@ RSpec.describe Backup::Repositories do
shard_name = personal_snippet.repository.shard
path = personal_snippet.disk_path + '.git'
- subject.restore
+ subject.restore(destination)
expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
end
diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb
new file mode 100644
index 00000000000..b0eb885d3f4
--- /dev/null
+++ b/spec/lib/backup/task_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Task do
+ let(:progress) { StringIO.new }
+
+ subject { described_class.new(progress) }
+
+ describe '#human_name' do
+ it 'must be implemented by the subclass' do
+ expect { subject.human_name }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#dump' do
+ it 'must be implemented by the subclass' do
+ expect { subject.dump('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#restore' do
+ it 'must be implemented by the subclass' do
+ expect { subject.restore('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb
index 25ad0c0d3f7..0cfc80a9cb9 100644
--- a/spec/lib/backup/uploads_spec.rb
+++ b/spec/lib/backup/uploads_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Uploads do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('uploads.tar.gz')
end
end
end
diff --git a/spec/lib/banzai/filter/front_matter_filter_spec.rb b/spec/lib/banzai/filter/front_matter_filter_spec.rb
index 1562c388296..f3543ab9582 100644
--- a/spec/lib/banzai/filter/front_matter_filter_spec.rb
+++ b/spec/lib/banzai/filter/front_matter_filter_spec.rb
@@ -105,6 +105,56 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
end
end
+ context 'source position mapping' do
+ it 'keeps spaces before and after' do
+ content = <<~MD
+
+
+ ---
+
+ foo: :foo_symbol
+
+ ---
+
+
+ # Header
+ MD
+
+ output = filter(content)
+
+ expect(output).to eq <<~MD
+
+
+ ```yaml:frontmatter
+
+ foo: :foo_symbol
+
+ ```
+
+
+ # Header
+ MD
+ end
+
+ it 'keeps an empty line in place of the encoding' do
+ content = <<~MD
+ # encoding: UTF-8
+ ---
+ foo: :foo_symbol
+ ---
+ MD
+
+ output = filter(content)
+
+ expect(output).to eq <<~MD
+
+ ```yaml:frontmatter
+ foo: :foo_symbol
+ ```
+ MD
+ end
+ end
+
context 'on content without front matter' do
it 'returns the content unmodified' do
content = <<~MD
@@ -119,7 +169,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
context 'on front matter without content' do
it 'converts YAML front matter to a fenced code block' do
- content = <<~MD
+ content = <<~MD.rstrip
---
foo: :foo_symbol
bar: :bar_symbol
@@ -134,7 +184,6 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
foo: :foo_symbol
bar: :bar_symbol
```
-
MD
end
end
diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb
index 5c04f6b2b3e..238c3cdb9c1 100644
--- a/spec/lib/banzai/filter/image_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/image_link_filter_spec.rb
@@ -5,34 +5,82 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::ImageLinkFilter do
include FilterSpecHelper
- def image(path)
- %(<img src="#{path}" />)
+ let(:path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
+ let(:context) { {} }
+
+ def image(path, alt: nil, data_src: nil)
+ alt_tag = alt ? %Q{alt="#{alt}"} : ""
+ data_src_tag = data_src ? %Q{data-src="#{data_src}"} : ""
+
+ %(<img src="#{path}" #{alt_tag} #{data_src_tag} />)
end
it 'wraps the image with a link to the image src' do
- doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
+ doc = filter(image(path), context)
+
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
end
it 'does not wrap a duplicate link' do
- doc = filter(%Q(<a href="/whatever">#{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')}</a>))
+ doc = filter(%Q(<a href="/whatever">#{image(path)}</a>), context)
+
expect(doc.to_html).to match %r{^<a href="/whatever"><img[^>]*></a>$}
end
it 'works with external images' do
- doc = filter(image('https://i.imgur.com/DfssX9C.jpg'))
+ doc = filter(image('https://i.imgur.com/DfssX9C.jpg'), context)
+
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
end
it 'works with inline images' do
- doc = filter(%Q(<p>test #{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')} inline</p>))
+ doc = filter(%Q(<p>test #{image(path)} inline</p>), context)
+
expect(doc.to_html).to match %r{^<p>test <a[^>]*><img[^>]*></a> inline</p>$}
end
it 'keep the data-canonical-src' do
- doc = filter(%q(<img src="http://assets.example.com/6cd/4d7" data-canonical-src="http://example.com/test.png" />))
+ doc = filter(%q(<img src="http://assets.example.com/6cd/4d7" data-canonical-src="http://example.com/test.png" />), context)
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
expect(doc.at_css('img')['data-canonical-src']).to eq doc.at_css('a')['data-canonical-src']
end
+
+ it 'adds no-attachment icon class to the link' do
+ doc = filter(image(path), context)
+
+ expect(doc.at_css('a')['class']).to match(%r{no-attachment-icon})
+ end
+
+ context 'when :link_replaces_image is true' do
+ let(:context) { { link_replaces_image: true } }
+
+ it 'replaces the image with link to image src', :aggregate_failures do
+ doc = filter(image(path), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>#{path}</a>$})
+ expect(doc.at_css('a')['href']).to eq(path)
+ end
+
+ it 'uses image alt as a link text', :aggregate_failures do
+ doc = filter(image(path, alt: 'My image'), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>My image</a>$})
+ expect(doc.at_css('a')['href']).to eq(path)
+ end
+
+ it 'uses image data-src as a link text', :aggregate_failures do
+ data_src = '/uploads/data-src.png'
+ doc = filter(image(path, data_src: data_src), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>#{data_src}</a>$})
+ expect(doc.at_css('a')['href']).to eq(data_src)
+ end
+
+ it 'adds attachment icon class to the link' do
+ doc = filter(image(path), context)
+
+ expect(doc.at_css('a')['class']).to match(%r{with-attachment-icon})
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index 0840ccf19e4..ef23725c790 100644
--- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter do
end
it 'ignores non-issuable links' do
- link = create_link('text', project: project, reference_type: 'issue')
+ link = create_link('text', project: project.id, reference_type: 'issue')
doc = filter(link, context)
expect(doc.css('a').last.text).to eq('text')
diff --git a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
index d0336e9e059..a2f34d42814 100644
--- a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Banzai::Filter::ReferenceRedactorFilter do
- include ActionView::Helpers::UrlHelper
include FilterSpecHelper
it 'ignores non-GFM links' do
@@ -14,7 +13,7 @@ RSpec.describe Banzai::Filter::ReferenceRedactorFilter do
end
def reference_link(data)
- link_to('text', '', class: 'gfm', data: data)
+ ActionController::Base.helpers.link_to('text', '', class: 'gfm', data: data)
end
it 'skips when the skip_redaction flag is set' do
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index d7bcebbbe34..2e811d35662 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -256,4 +256,23 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
it_behaves_like "external issue tracker"
end
end
+
+ context 'checking N+1' do
+ let_it_be(:integration) { create(:redmine_integration, project: project) }
+ let_it_be(:issue1) { ExternalIssue.new("#123", project) }
+ let_it_be(:issue2) { ExternalIssue.new("YT-123", project) }
+
+ before do
+ project.update!(issues_enabled: false)
+ end
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ single_reference = "External Issue #{issue1.to_reference}"
+ multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}"
+
+ control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index b18d68c8dd4..c342a831d62 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -277,7 +277,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter do
end
context 'References with html entities' do
- let!(:label) { create(:label, name: '&lt;html&gt;', project: project) }
+ let!(:label) { create(:label, title: '&lt;html&gt;', project: project) }
it 'links to a valid reference' do
doc = reference_filter('See ~"&lt;html&gt;"')
diff --git a/spec/lib/banzai/filter/task_list_filter_spec.rb b/spec/lib/banzai/filter/task_list_filter_spec.rb
new file mode 100644
index 00000000000..c89acd1a643
--- /dev/null
+++ b/spec/lib/banzai/filter/task_list_filter_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::TaskListFilter do
+ include FilterSpecHelper
+
+ it 'adds `<task-button></task-button>` to every list item' do
+ doc = filter("<ul data-sourcepos=\"1:1-2:20\">\n<li data-sourcepos=\"1:1-1:20\">[ ] testing item 1</li>\n<li data-sourcepos=\"2:1-2:20\">[x] testing item 2</li>\n</ul>")
+
+ expect(doc.xpath('.//li//task-button').count).to eq(2)
+ end
+end
diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb
index 78cceedd0e5..45e14032a98 100644
--- a/spec/lib/banzai/reference_redactor_spec.rb
+++ b/spec/lib/banzai/reference_redactor_spec.rb
@@ -106,13 +106,12 @@ RSpec.describe Banzai::ReferenceRedactor do
end
context 'when the user cannot read cross project' do
- include ActionView::Helpers::UrlHelper
let(:project) { create(:project) }
let(:other_project) { create(:project, :public) }
def create_link(issuable)
type = issuable.class.name.underscore.downcase
- link_to(issuable.to_reference, '',
+ ActionController::Base.helpers.link_to(issuable.to_reference, '',
class: 'gfm has-tooltip',
title: issuable.title,
data: {
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 1bbc96af8ee..c9730e03311 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'when response is not success' do
it 'raises BulkImports::Error' do
- response_double = double(code: 503, success?: false, request: double(path: double(path: '/test')))
+ response_double = double(code: 503, success?: false, parsed_response: 'Error', request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test')
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test. Body: Error')
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
index 48db24def48..ac516418ce8 100644
--- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
expect(label.title).to eq('Label 1')
expect(label.description).to eq('Label 1')
- expect(label.color).to eq('#6699cc')
+ expect(label.color).to be_color('#6699cc')
expect(File.directory?(tmpdir)).to eq(false)
end
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index 974c3478ddc..39a594eba5c 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -168,24 +168,100 @@ RSpec.describe ContainerRegistry::Client do
expect(subject).to eq('Blob')
end
- it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do
- stub_request(method, url)
- .with(headers: blob_headers)
- .to_return(status: 307, body: '', headers: { Location: 'http://redirected' })
- # We should probably use hash_excluding here, but that requires an update to WebMock:
- # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb
- stub_request(:get, "http://redirected/")
- .with(headers: redirect_header) do |request|
- !request.headers.include?('Authorization')
+ context 'with a 307 redirect' do
+ let(:redirect_location) { 'http://redirected' }
+
+ before do
+ stub_request(method, url)
+ .with(headers: blob_headers)
+ .to_return(status: 307, body: '', headers: { Location: redirect_location })
+
+ # We should probably use hash_excluding here, but that requires an update to WebMock:
+ # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb
+ stub_request(:get, redirect_location)
+ .with(headers: redirect_header) do |request|
+ !request.headers.include?('Authorization')
+ end
+ .to_return(status: 200, body: "Successfully redirected")
+ end
+
+ shared_examples 'handling redirects' do
+ it 'follows the redirect' do
+ expect(Faraday::Utils).not_to receive(:escape).with('signature=')
+ expect_new_faraday
+ expect(subject).to eq('Successfully redirected')
+ end
+ end
+
+ it_behaves_like 'handling redirects'
+
+ context 'with a redirect location with params ending with =' do
+ let(:redirect_location) { 'http://redirect?foo=bar&test=signature=' }
+
+ it_behaves_like 'handling redirects'
+
+ context 'with container_registry_follow_redirects_middleware disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'follows the redirect' do
+ expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
+
+ expect_new_faraday(times: 2)
+ expect(subject).to eq('Successfully redirected')
+ end
end
- .to_return(status: 200, body: "Successfully redirected")
+ end
- expect_new_faraday(times: 2)
+ context 'with a redirect location with params ending with %3D' do
+ let(:redirect_location) { 'http://redirect?foo=bar&test=signature%3D' }
- expect(subject).to eq('Successfully redirected')
+ it_behaves_like 'handling redirects'
+
+ context 'with container_registry_follow_redirects_middleware disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'follows the redirect' do
+ expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
+
+ expect_new_faraday(times: 2)
+ expect(subject).to eq('Successfully redirected')
+ end
+ end
+ end
end
it_behaves_like 'handling timeouts'
+
+ # TODO Remove this context along with the
+ # container_registry_follow_redirects_middleware feature flag
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/353291
+ context 'faraday blob' do
+ subject { client.send(:faraday_blob) }
+
+ it 'has a follow redirects middleware' do
+ expect(subject.builder.handlers).to include(::FaradayMiddleware::FollowRedirects)
+ end
+
+ context 'with container_registry_follow_redirects_middleware is disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'has not a follow redirects middleware' do
+ expect(subject.builder.handlers).not_to include(::FaradayMiddleware::FollowRedirects)
+ end
+ end
+ end
end
describe '#upload_blob' do
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index 292582a8d83..4fe229024e5 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -6,8 +6,11 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
using RSpec::Parameterized::TableSyntax
include_context 'container registry client'
+ include_context 'container registry client stubs'
let(:path) { 'namespace/path/to/repository' }
+ let(:import_token) { 'import_token' }
+ let(:options) { { token: token, import_token: import_token } }
describe '#supports_gitlab_api?' do
subject { client.supports_gitlab_api? }
@@ -121,6 +124,40 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
end
+ describe '#repository_details' do
+ let(:path) { 'namespace/path/to/repository' }
+ let(:response) { { foo: :bar, this: :is_a_test } }
+ let(:with_size) { true }
+
+ subject { client.repository_details(path, with_size: with_size) }
+
+ context 'with size' do
+ before do
+ stub_repository_details(path, with_size: with_size, respond_with: response)
+ end
+
+ it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
+ end
+
+ context 'without_size' do
+ let(:with_size) { false }
+
+ before do
+ stub_repository_details(path, with_size: with_size, respond_with: response)
+ end
+
+ it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
+ end
+
+ context 'with non successful response' do
+ before do
+ stub_repository_details(path, with_size: with_size, status_code: 404)
+ end
+
+ it { is_expected.to eq({}) }
+ end
+ end
+
describe '.supports_gitlab_api?' do
subject { described_class.supports_gitlab_api? }
@@ -180,8 +217,9 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
def stub_pre_import(path, status_code, pre:)
- stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?pre=#{pre}")
- .with(headers: { 'Accept' => described_class::JSON_TYPE })
+ import_type = pre ? 'pre' : 'final'
+ stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?import_type=#{import_type}")
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{import_token}" })
.to_return(status: status_code, body: '')
end
@@ -194,11 +232,19 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
def stub_import_status(path, status)
stub_request(:get, "#{registry_api_url}/gitlab/v1/import/#{path}/")
- .with(headers: { 'Accept' => described_class::JSON_TYPE })
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{import_token}" })
.to_return(
status: 200,
body: { status: status }.to_json,
headers: { content_type: 'application/json' }
)
end
+
+ def stub_repository_details(path, with_size: true, status_code: 200, respond_with: {})
+ url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/"
+ url += "?size=self" if with_size
+ stub_request(:get, url)
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{token}" })
+ .to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
+ end
end
diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb
index c690d96b4f5..86231df5fdb 100644
--- a/spec/lib/container_registry/registry_spec.rb
+++ b/spec/lib/container_registry/registry_spec.rb
@@ -4,10 +4,15 @@ require 'spec_helper'
RSpec.describe ContainerRegistry::Registry do
let(:path) { nil }
- let(:registry) { described_class.new('http://example.com', path: path) }
+ let(:registry_api_url) { 'http://example.com' }
+ let(:registry) { described_class.new(registry_api_url, path: path) }
subject { registry }
+ before do
+ stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
+ end
+
it { is_expected.to respond_to(:client) }
it { is_expected.to respond_to(:uri) }
it { is_expected.to respond_to(:path) }
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 5080d21d564..90c0684f8b7 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -257,7 +257,7 @@ RSpec.describe Feature, stub_feature_flags: false do
end
it 'caches the status in L2 cache after 2 minutes' do
- Timecop.travel 2.minutes do
+ travel_to 2.minutes.from_now do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
@@ -267,7 +267,7 @@ RSpec.describe Feature, stub_feature_flags: false do
end
it 'fetches the status after an hour' do
- Timecop.travel 61.minutes do
+ travel_to 61.minutes.from_now do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
index 14768025932..b4aa843bcd7 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
@@ -30,11 +30,11 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
merge_request1 = create(:merge_request, source_branch: '1', target_project: project, source_project: project)
merge_request2 = create(:merge_request, source_branch: '2', target_project: project, source_project: project)
- Timecop.travel(5.minutes.from_now) do
+ travel_to(5.minutes.from_now) do
merge_request1.metrics.update!(merged_at: Time.zone.now)
end
- Timecop.travel(10.minutes.from_now) do
+ travel_to(10.minutes.from_now) do
merge_request2.metrics.update!(merged_at: Time.zone.now)
end
diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb
index 9e269f84b7e..1fcdd678746 100644
--- a/spec/lib/gitlab/auth/ldap/access_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Ldap::Access do
include LdapHelpers
- let(:user) { create(:omniauth_user) }
+ let(:user) { create(:omniauth_user, :ldap) }
subject(:access) { described_class.new(user) }
diff --git a/spec/lib/gitlab/auth/ldap/authentication_spec.rb b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
index 42a893417d8..4b0e21da6c6 100644
--- a/spec/lib/gitlab/auth/ldap/authentication_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Ldap::Authentication do
let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
- let(:user) { create(:omniauth_user, extern_uid: Gitlab::Auth::Ldap::Person.normalize_dn(dn)) }
+ let(:user) { create(:omniauth_user, :ldap, extern_uid: Gitlab::Auth::Ldap::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
index 57f17365190..c1b96819176 100644
--- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'for an OmniAuth provider' do
before do
- provider = OpenStruct.new(
+ provider = ActiveSupport::InheritableOptions.new(
name: 'google_oauth2',
app_id: 'asd123',
app_secret: 'asd123'
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
subject { described_class.config_for('google_oauth2') }
it 'returns the config' do
- expect(subject).to be_a(OpenStruct)
+ expect(subject).to be_a(ActiveSupport::InheritableOptions)
end
it 'merges defaults with the given configuration' do
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'when configuration specifies a custom label' do
let(:name) { 'google_oauth2' }
let(:label) { 'Custom Google Provider' }
- let(:provider) { OpenStruct.new({ 'name' => name, 'label' => label }) }
+ let(:provider) { ActiveSupport::InheritableOptions.new(name: name, label: label) }
before do
stub_omniauth_setting(providers: [provider])
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
end
context 'when configuration does not specify a custom label' do
- let(:provider) { OpenStruct.new({ 'name' => name } ) }
+ let(:provider) { ActiveSupport::InheritableOptions.new(name: name) }
before do
stub_omniauth_setting(providers: [provider])
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 8d36507ec7a..1a9e2f02de6 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -577,28 +577,66 @@ RSpec.describe Gitlab::Auth::OAuth::User do
stub_omniauth_config(allow_single_sign_on: ['twitter'])
end
- context 'signup with omniauth only' do
- context 'dont block on create' do
- before do
- stub_omniauth_config(block_auto_created_users: false)
+ shared_examples 'being blocked on creation' do
+ context 'when blocking on creation' do
+ it 'creates a blocked user' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ expect(gl_user).to be_valid
+ expect(gl_user).to be_blocked
end
- it do
+ context 'when a sign up user cap has been set up but has not been reached yet' do
+ it 'still creates a blocked user' do
+ stub_application_setting(new_user_signups_cap: 999)
+
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ expect(gl_user).to be_valid
+ expect(gl_user).to be_blocked
+ end
+ end
+ end
+ end
+
+ shared_examples 'not being blocked on creation' do
+ context 'when not blocking on creation' do
+ it 'creates a non-blocked user' do
oauth_user.save # rubocop:disable Rails/SaveBang
expect(gl_user).to be_valid
expect(gl_user).not_to be_blocked
end
end
+ end
+
+ context 'signup with SAML' do
+ let(:provider) { 'saml' }
+
+ before do
+ stub_omniauth_config({
+ allow_single_sign_on: ['saml'],
+ auto_link_saml_user: true,
+ block_auto_created_users: block_auto_created_users
+ })
+ end
+
+ it_behaves_like 'being blocked on creation' do
+ let(:block_auto_created_users) { true }
+ end
+
+ it_behaves_like 'not being blocked on creation' do
+ let(:block_auto_created_users) { false }
+ end
+ end
- context 'block on create' do
+ context 'signup with omniauth only' do
+ it_behaves_like 'being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: true)
end
+ end
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).to be_blocked
+ it_behaves_like 'not being blocked on creation' do
+ before do
+ stub_omniauth_config(block_auto_created_users: false)
end
end
end
@@ -614,64 +652,40 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
context "and no account for the LDAP user" do
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
- allow(instance).to receive_messages(block_auto_created_users: false)
+ allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
- allow(instance).to receive_messages(block_auto_created_users: true)
+ allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).to be_blocked
- end
end
end
context 'and LDAP user has an account already' do
let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
end
end
@@ -682,56 +696,32 @@ RSpec.describe Gitlab::Auth::OAuth::User do
oauth_user.gl_user.activate
end
- context 'dont block on create' do
+ it_behaves_like 'not being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: false)
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create' do
+ it_behaves_like 'not being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: true)
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
end
end
@@ -1057,4 +1047,10 @@ RSpec.describe Gitlab::Auth::OAuth::User do
expect(oauth_user.bypass_two_factor?).to be_falsey
end
end
+
+ describe '#protocol_name' do
+ it 'is OAuth' do
+ expect(oauth_user.protocol_name).to eq('OAuth')
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index 5e9d07a8bf7..2bc80edb98c 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -44,6 +44,38 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
end
end
+ describe '#can_sign_in_bot?' do
+ context 'the user is nil' do
+ it { is_expected.not_to be_can_sign_in_bot(nil) }
+ end
+
+ context 'the user is a bot, but for a web request' do
+ let(:user) { build(:user, :project_bot) }
+
+ it { is_expected.not_to be_can_sign_in_bot(user) }
+ end
+
+ context 'the user is a regular user, for an API request' do
+ let(:user) { build(:user) }
+
+ before do
+ env['SCRIPT_NAME'] = '/api/some_resource'
+ end
+
+ it { is_expected.not_to be_can_sign_in_bot(user) }
+ end
+
+ context 'the user is a project bot, for an API request' do
+ let(:user) { build(:user, :project_bot) }
+
+ before do
+ env['SCRIPT_NAME'] = '/api/some_resource'
+ end
+
+ it { is_expected.to be_can_sign_in_bot(user) }
+ end
+ end
+
describe '#find_sessionless_user' do
let_it_be(:dependency_proxy_user) { build(:user) }
let_it_be(:access_token_user) { build(:user) }
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
new file mode 100644
index 00000000000..b29d4c3583b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:issue_search_data_table) { table(:issue_search_data) }
+
+ let!(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
+ let!(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
+ let!(:issues) { Array.new(10) { table(:issues).create!(project_id: project.id, title: 'test title', description: 'test description') } }
+
+ let(:migration) { described_class.new }
+
+ before do
+ allow(migration).to receive(:sleep)
+ end
+
+ it 'backfills search data for the specified records' do
+ # sleeps for every sub-batch
+ expect(migration).to receive(:sleep).with(0.05).exactly(3).times
+
+ migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
+
+ expect(issue_search_data_table.count).to eq(6)
+ end
+
+ it 'skips issues that already have search data' do
+ old_time = Time.new(2019, 1, 1).in_time_zone
+ issue_search_data_table.create!(project_id: project.id, issue_id: issues[0].id, updated_at: old_time)
+
+ migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
+
+ expect(issue_search_data_table.count).to eq(6)
+ expect(issue_search_data_table.find_by_issue_id(issues[0].id).updated_at).to be_like_time(old_time)
+ end
+
+ it 'rescues batch with bad data and inserts other rows' do
+ issues[1].update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
+
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:error).with(a_hash_including(message: /string is too long for tsvector/, model_id: issues[1].id))
+ end
+
+ expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.not_to raise_error
+
+ expect(issue_search_data_table.count).to eq(5)
+ expect(issue_search_data_table.find_by_issue_id(issues[1].id)).to eq(nil)
+ end
+
+ it 're-raises other errors' do
+ allow(migration).to receive(:update_search_data).and_raise(ActiveRecord::StatementTimeout)
+
+ expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
new file mode 100644
index 00000000000..e1ef12a1479
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220120211832 do
+ let(:migration) { described_class.new }
+ let(:members_table) { table(:members) }
+ let(:namespaces_table) { table(:namespaces) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 100 }
+ let(:pause_ms) { 0 }
+
+ subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
+
+ before do
+ namespaces_table.create!(id: 100, name: 'test1', path: 'test1', type: 'Group')
+ namespaces_table.create!(id: 101, name: 'test2', path: 'test2', type: 'Group')
+ namespaces_table.create!(id: 102, name: 'test3', path: 'test3', type: 'Group')
+ namespaces_table.create!(id: 201, name: 'test4', path: 'test4', type: 'Project')
+
+ members_table.create!(id: 1, source_id: 100, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 2, source_id: 101, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 3, source_id: 102, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: 102, access_level: 10, notification_level: 3)
+ members_table.create!(id: 4, source_id: 103, source_type: 'Project', type: 'ProjectMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 5, source_id: 104, source_type: 'Project', type: 'ProjectMember', member_namespace_id: 201, access_level: 10, notification_level: 3)
+ end
+
+ it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 2
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(3)
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 0
+ expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([100, 101, 102])
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
+ expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 201])
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index d22aa86dbe0..cfa03db52fe 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -78,6 +78,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
shared_examples 'migration_bot user commits files' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
it do
subject
@@ -89,6 +93,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
shared_examples 'commits the file to the repository' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
context 'when author can update snippet and use git' do
it 'creates the repository and commit the file' do
subject
@@ -269,6 +277,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
let!(:snippet) { snippets.create!(id: 5, type: 'PersonalSnippet', author_id: other_user.id, file_name: file_name, content: content) }
let(:ids) { [4, 5] }
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
after do
raw_repository(snippet).remove
raw_repository(invalid_snippet).remove
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
index 7b8a466b37c..b01dd5b410e 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectN
let!(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
let!(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
let!(:project4) { projects.create!(name: 'project4', path: 'project4', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:batching_strategy) { described_class.new }
+ let!(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:job_arguments) { [namespace1.id, 'up'] }
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb
new file mode 100644
index 00000000000..56ed1f23799
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy, '#next_batch' do
+ let(:connection) { double(:connection) }
+ let(:base_strategy_class) { Class.new(described_class) }
+ let(:base_strategy) { base_strategy_class.new(connection: connection) }
+
+ describe '#next_batch' do
+ it 'raises an error if not overridden by a subclass' do
+ expect { base_strategy.next_batch }.to raise_error(NotImplementedError, /does not implement next_batch/)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
index 39030039125..4e0ebd4b692 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy, '#next_batch' do
- let(:batching_strategy) { described_class.new }
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:namespaces) { table(:namespaces) }
let!(:namespace1) { namespaces.create!(name: 'batchtest1', path: 'batch-test1') }
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi
let!(:namespace3) { namespaces.create!(name: 'batchtest3', path: 'batch-test3') }
let!(:namespace4) { namespaces.create!(name: 'batchtest4', path: 'batch-test4') }
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy }
+
context 'when starting on the first batch' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace1.id, batch_size: 3, job_arguments: nil)
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
new file mode 100644
index 00000000000..7334867e8fb
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties do
+ let(:integrations) do
+ table(:integrations) do |integrations|
+ integrations.send :attr_encrypted, :encrypted_properties_tmp,
+ attribute: :encrypted_properties,
+ mode: :per_attribute_iv,
+ key: ::Settings.attr_encrypted_db_key_base_32,
+ algorithm: 'aes-256-gcm',
+ marshal: true,
+ marshaler: ::Gitlab::Json,
+ encode: false,
+ encode_iv: false
+ end
+ end
+
+ let!(:no_properties) { integrations.create! }
+ let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) }
+ let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) }
+ let!(:with_encrypted) do
+ x = integrations.new
+ x.properties = nil
+ x.encrypted_properties_tmp = some_props(3)
+ x.save!
+ x
+ end
+
+ let(:start_id) { integrations.minimum(:id) }
+ let(:end_id) { integrations.maximum(:id) }
+
+ it 'ensures all properties are encrypted', :aggregate_failures do
+ described_class.new.perform(start_id, end_id)
+
+ props = integrations.all.to_h do |record|
+ [record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]]
+ end
+
+ expect(integrations.count).to eq(4)
+
+ expect(props).to match(
+ no_properties.id => both(be_nil),
+ with_plaintext_1.id => both(eq some_props(1)),
+ with_plaintext_2.id => both(eq some_props(2)),
+ with_encrypted.id => match([be_nil, eq(some_props(3))])
+ )
+ end
+
+ private
+
+ def both(obj)
+ match [obj, obj]
+ end
+
+ def some_props(id)
+ HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] })
+ end
+
+ def json_props(id)
+ some_props(id).to_json
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
index 43d41408e66..c1351481505 100644
--- a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
+++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
@@ -38,13 +38,67 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
end
end
+ describe '#pending_jobs' do
+ context 'when there are enqueued jobs' do
+ let(:queue) do
+ [
+ instance_double(Sidekiq::JobRecord, args: [1, 'queue'], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: [2, 'queue'], klass: worker_class.name)
+ ]
+ end
+
+ let(:queue_incorrect_job_class) do
+ [
+ instance_double(Sidekiq::JobRecord, args: [1, 'queue'], klass: 'SomeOtherClass')
+ ]
+ end
+
+ let(:scheduled_set) do
+ [instance_double(Sidekiq::JobRecord, args: [3, 'scheduled'], klass: worker_class.name)]
+ end
+
+ let(:retry_set) do
+ [instance_double(Sidekiq::JobRecord, args: [4, 'retry'], klass: worker_class.name)]
+ end
+
+ let(:dead_set) do
+ [instance_double(Sidekiq::JobRecord, args: [5, 'dead'], klass: worker_class.name)]
+ end
+
+ before do
+ allow(Sidekiq::Queue).to receive(:new)
+ .with(coordinator.queue)
+ .and_return(queue + queue_incorrect_job_class)
+ allow(Sidekiq::ScheduledSet).to receive(:new).and_return(scheduled_set)
+ allow(Sidekiq::RetrySet).to receive(:new).and_return(retry_set)
+ allow(Sidekiq::DeadSet).to receive(:new).and_return(dead_set)
+ end
+
+ it 'does not include jobs for other workers' do
+ expect(coordinator.pending_jobs).not_to include(queue_incorrect_job_class.first)
+ end
+
+ context 'when not including dead jobs' do
+ it 'includes current and future jobs' do
+ expect(coordinator.pending_jobs(include_dead_jobs: false).to_a).to match_array(queue + scheduled_set)
+ end
+ end
+
+ context 'when including dead jobs' do
+ it 'includes current and future jobs, and also dead and retry jobs' do
+ expect(coordinator.pending_jobs(include_dead_jobs: true).to_a).to match_array(queue + scheduled_set + retry_set + dead_set)
+ end
+ end
+ end
+ end
+
describe '#steal' do
context 'when there are enqueued jobs present' do
let(:queue) do
[
- double(args: ['Foo', [10, 20]], klass: worker_class.name),
- double(args: ['Bar', [20, 30]], klass: worker_class.name),
- double(args: ['Foo', [20, 30]], klass: 'MergeWorker')
+ instance_double(Sidekiq::JobRecord, args: ['Foo', [10, 20]], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: ['Bar', [20, 30]], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: ['Foo', [20, 30]], klass: 'MergeWorker')
]
end
diff --git a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
new file mode 100644
index 00000000000..07e77bdbc13
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220208080921 do
+ let(:migration) { described_class.new }
+ let(:users_table) { table(:users) }
+ let(:members_table) { table(:members) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 10 }
+ let(:pause_ms) { 0 }
+
+ let(:owner_access) { 50 }
+ let(:maintainer_access) { 40 }
+ let(:developer_access) { 30 }
+
+ subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
+
+ before do
+ users_table.create!(id: 101, name: "user1", email: "user1@example.com", projects_limit: 5)
+ users_table.create!(id: 102, name: "user2", email: "user2@example.com", projects_limit: 5)
+
+ namespaces_table.create!(id: 201, name: 'user1s-namespace', path: 'user1s-namespace-path', type: 'User', owner_id: 101)
+ namespaces_table.create!(id: 202, name: 'user2s-namespace', path: 'user2s-namespace-path', type: 'User', owner_id: 102)
+ namespaces_table.create!(id: 203, name: 'group', path: 'group', type: 'Group')
+ namespaces_table.create!(id: 204, name: 'project-namespace', path: 'project-namespace-path', type: 'Project')
+
+ projects_table.create!(id: 301, name: 'user1-namespace-project', path: 'project-path-1', namespace_id: 201)
+ projects_table.create!(id: 302, name: 'user2-namespace-project', path: 'project-path-2', namespace_id: 202)
+ projects_table.create!(id: 303, name: 'user2s-namespace-project2', path: 'project-path-3', namespace_id: 202)
+ projects_table.create!(id: 304, name: 'group-project3', path: 'group-project-path-3', namespace_id: 203)
+
+ # user1 member of their own namespace project, maintainer access (change)
+ create_project_member(id: 1, user_id: 101, project_id: 301, level: maintainer_access)
+
+ # user2 member of their own namespace project, owner access (no change)
+ create_project_member(id: 2, user_id: 102, project_id: 302, level: owner_access)
+
+ # user1 member of user2's personal namespace project, maintainer access (no change)
+ create_project_member(id: 3, user_id: 101, project_id: 302, level: maintainer_access)
+
+ # user1 member of group project, maintainer access (no change)
+ create_project_member(id: 4, user_id: 101, project_id: 304, level: maintainer_access)
+
+ # user1 member of group, Maintainer role (no change)
+ create_group_member(id: 5, user_id: 101, group_id: 203, level: maintainer_access)
+
+ # user2 member of their own namespace project, maintainer access, but out of batch range (no change)
+ create_project_member(id: 601, user_id: 102, project_id: 303, level: maintainer_access)
+ end
+
+ it 'migrates MAINTAINER membership records for personal namespaces to OWNER', :aggregate_failures do
+ expect(members_table.where(access_level: owner_access).count).to eq 1
+ expect(members_table.where(access_level: maintainer_access).count).to eq 5
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(3)
+ expect(members_table.where(access_level: owner_access).pluck(:id)).to match_array([1, 2])
+ expect(members_table.where(access_level: maintainer_access).pluck(:id)).to match_array([3, 4, 5, 601])
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+
+ def create_group_member(id:, user_id:, group_id:, level:)
+ members_table.create!(id: id, user_id: user_id, source_id: group_id, access_level: level, source_type: "Namespace", type: "GroupMember", notification_level: 3)
+ end
+
+ def create_project_member(id:, user_id:, project_id:, level:)
+ members_table.create!(id: id, user_id: user_id, source_id: project_id, access_level: level, source_type: "Namespace", type: "ProjectMember", notification_level: 3)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
new file mode 100644
index 00000000000..90dd3e14606
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds, :migration, schema: 20220223112304 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:ci_runners) { table(:ci_runners) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_builds) { table(:ci_builds) }
+
+ subject { described_class.new }
+
+ let(:helpers) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
+ end
+
+ before do
+ helpers.remove_foreign_key_if_exists(:ci_builds, column: :runner_id)
+ end
+
+ after do
+ helpers.add_concurrent_foreign_key(:ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false)
+ end
+
+ describe '#perform' do
+ let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'test') }
+ let(:pipeline) { ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
+
+ it 'nullifies runner_id for orphan ci_builds in range' do
+ ci_runners.create!(id: 2, runner_type: 'project_type')
+
+ ci_builds.create!(id: 5, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 2)
+ ci_builds.create!(id: 7, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 4)
+ ci_builds.create!(id: 8, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 5)
+ ci_builds.create!(id: 9, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 6)
+
+ subject.perform(4, 8, :ci_builds, :id, 10, 0)
+
+ expect(ci_builds.all).to contain_exactly(
+ an_object_having_attributes(id: 5, runner_id: 2),
+ an_object_having_attributes(id: 7, runner_id: nil),
+ an_object_having_attributes(id: 8, runner_id: nil),
+ an_object_having_attributes(id: 9, runner_id: 6)
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
new file mode 100644
index 00000000000..8cdcec9621c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration, schema: 20220131000001 do
+ subject(:perform) { migration.perform(1, 99) }
+
+ let(:migration) { described_class.new }
+
+ let(:trace_in_range) { create_trace!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_outside_range) { create_trace!(id: 40, created_at: Date.new(2020, 06, 22), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_without_expiry) { create_trace!(id: 30, created_at: Date.new(2020, 06, 21), expire_at: nil) }
+ let(:archive_in_range) { create_archive!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_outside_id_range) { create_trace!(id: 100, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+
+ before do
+ table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
+ table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
+ table(:ci_builds).create!(id: 1, allow_failure: false)
+ end
+
+ context 'for self-hosted instances' do
+ it 'sets expire_at for artifacts in range to nil' do
+ expect { perform }.not_to change { trace_in_range.reload.expire_at }
+ end
+
+ it 'does not change expire_at timestamps that are not set to midnight' do
+ expect { perform }.not_to change { trace_outside_range.reload.expire_at }
+ end
+
+ it 'does not change expire_at timestamps that are set to midnight on a day other than the 22nd' do
+ expect { perform }.not_to change { trace_without_expiry.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside id range' do
+ expect { perform }.not_to change { archive_in_range.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside date range' do
+ expect { perform }.not_to change { trace_outside_id_range.reload.expire_at }
+ end
+ end
+
+ private
+
+ def create_trace!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 3)
+ end
+
+ def create_archive!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
new file mode 100644
index 00000000000..6aea549b136
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:perform) { described_class.new.perform(1, 4) }
+
+ before do
+ namespaces.create!(id: 123, name: 'sample', path: 'sample')
+
+ projects.create!(id: 1, namespace_id: 123, runners_token_encrypted: 'duplicate')
+ projects.create!(id: 2, namespace_id: 123, runners_token_encrypted: 'a-runners-token')
+ projects.create!(id: 3, namespace_id: 123, runners_token_encrypted: 'duplicate')
+ projects.create!(id: 4, namespace_id: 123, runners_token_encrypted: nil)
+ projects.create!(id: 5, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
+ projects.create!(id: 6, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
+ end
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 2)
+ end
+
+ it 'nullifies duplicate tokens', :aggregate_failures do
+ perform
+
+ expect(projects.count).to eq(6)
+ expect(projects.all.pluck(:id, :runners_token_encrypted).to_h).to eq(
+ { 1 => nil, 2 => 'a-runners-token', 3 => nil, 4 => nil, 5 => 'duplicate-2', 6 => 'duplicate-2' }
+ )
+ expect(projects.pluck(:runners_token_encrypted).uniq).to match_array [nil, 'a-runners-token', 'duplicate-2']
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
new file mode 100644
index 00000000000..cbe762c2680
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:perform) { described_class.new.perform(1, 4) }
+
+ before do
+ namespaces.create!(id: 123, name: 'sample', path: 'sample')
+
+ projects.create!(id: 1, namespace_id: 123, runners_token: 'duplicate')
+ projects.create!(id: 2, namespace_id: 123, runners_token: 'a-runners-token')
+ projects.create!(id: 3, namespace_id: 123, runners_token: 'duplicate')
+ projects.create!(id: 4, namespace_id: 123, runners_token: nil)
+ projects.create!(id: 5, namespace_id: 123, runners_token: 'duplicate-2')
+ projects.create!(id: 6, namespace_id: 123, runners_token: 'duplicate-2')
+ end
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 2)
+ end
+
+ it 'nullifies duplicate tokens', :aggregate_failures do
+ perform
+
+ expect(projects.count).to eq(6)
+ expect(projects.all.pluck(:id, :runners_token).to_h).to eq(
+ { 1 => nil, 2 => 'a-runners-token', 3 => nil, 4 => nil, 5 => 'duplicate-2', 6 => 'duplicate-2' }
+ )
+ expect(projects.pluck(:runners_token).uniq).to match_array [nil, 'a-runners-token', 'duplicate-2']
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index 7fd51102d71..2924b175fef 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -149,26 +149,9 @@ RSpec.describe Gitlab::Ci::Build::Policy::Refs do
context 'when unsafe regexp is used' do
let(:subject) { described_class.new(['/^(?!master).+/']) }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'ignores invalid regexp' do
- expect(subject)
- .not_to be_satisfied_by(pipeline)
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is satisfied by regexp' do
- expect(subject)
- .to be_satisfied_by(pipeline)
- end
+ it 'ignores invalid regexp' do
+ expect(subject)
+ .not_to be_satisfied_by(pipeline)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 62feed3dda0..c56f2d25074 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -293,6 +293,30 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
end
end
end
+
+ context 'when bridge trigger contains forward' do
+ let(:config) do
+ { trigger: { project: 'some/project', forward: { pipeline_variables: true } } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns a bridge job configuration hash' do
+ expect(subject.value).to eq(name: :my_bridge,
+ trigger: { project: 'some/project',
+ forward: { pipeline_variables: true } },
+ ignore: false,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage)
+ end
+ end
+ end
end
describe '#manual_action?' do
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index e83d4974bb7..6116fbced2b 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -59,9 +59,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- it_behaves_like 'an invalid config', /invalid expression syntax/
- end
+ it_behaves_like 'an invalid config', /invalid expression syntax/
end
context 'when specifying unknown policy' do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 885f3eaff79..97691504abd 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -420,7 +420,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when has dependencies' do
+ context 'when it has dependencies' do
context 'that are not a array of strings' do
let(:config) do
{ script: 'echo', dependencies: 'build-job' }
@@ -433,8 +433,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when has needs' do
- context 'when have dependencies that are not subset of needs' do
+ context 'when the job has needs' do
+ context 'and there are dependencies that are not included in needs' do
let(:config) do
{
stage: 'test',
@@ -448,6 +448,24 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job dependencies the another-job should be part of needs'
end
+
+ context 'and they are only cross pipeline needs' do
+ let(:config) do
+ {
+ script: 'echo',
+ dependencies: ['rspec'],
+ needs: [{
+ job: 'rspec',
+ pipeline: 'other'
+ }]
+ }
+ end
+
+ it 'adds an error for dependency keyword usage' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job needs corresponding to dependencies must be from the same pipeline'
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
index e5de0fb38e3..378c0947e8a 100644
--- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Policy do
let(:entry) { described_class.new(config) }
@@ -45,29 +45,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
- # When removed we could use `require 'fast_spec_helper'` again.
- include StubFeatureFlags
-
let(:config) { ['/^(?!master).+/'] }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is valid' do
- expect(entry).to be_valid
- end
+ it 'is not valid' do
+ expect(entry).not_to be_valid
end
end
@@ -106,29 +87,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
- # When removed we could use `require 'fast_spec_helper'` again.
- include StubFeatureFlags
-
let(:config) { { refs: ['/^(?!master).+/'] } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is valid' do
- expect(entry).to be_valid
- end
+ it 'is not valid' do
+ expect(entry).not_to be_valid
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
new file mode 100644
index 00000000000..588f53150ff
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when it is valid' do
+ let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml' } }
+
+ it { expect(entry).to be_valid }
+
+ it { expect(entry.value).to eq(config) }
+ end
+
+ context 'with unsupported coverage format' do
+ let(:config) { { coverage_format: 'jacoco', path: 'jacoco.xml' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /format must be one of supported formats/ }
+ end
+
+ context 'without coverage format' do
+ let(:config) { { path: 'cobertura-coverage.xml' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /format can't be blank/ }
+ end
+
+ context 'without path' do
+ let(:config) { { coverage_format: 'cobertura' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /path can't be blank/ }
+ end
+
+ context 'with invalid path' do
+ let(:config) { { coverage_format: 'cobertura', path: 123 } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /path should be a string/ }
+ end
+
+ context 'with unknown keys' do
+ let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml', foo: :bar } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /contains unknown keys/ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 12b8960eb32..061d8f34c8d 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -6,12 +6,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
- let(:artifact_file_types) { Ci::JobArtifact.file_types }
-
- described_class::ALLOWED_KEYS.each do |keyword, _|
- it "expects #{keyword} to be an artifact file_type" do
- expect(artifact_file_types).to include(keyword)
- end
+ it "expects ALLOWED_KEYS to be an artifact file_type or coverage_report" do
+ expect(Ci::JobArtifact.file_types.keys.map(&:to_sym) + [:coverage_report]).to include(*described_class::ALLOWED_KEYS)
end
end
@@ -68,6 +64,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
end
+
+ context 'when coverage_report is specified' do
+ let(:coverage_format) { :cobertura }
+ let(:filename) { 'cobertura-coverage.xml' }
+ let(:coverage_report) { { path: filename, coverage_format: coverage_format } }
+ let(:config) { { coverage_report: coverage_report } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ it 'returns artifacts configuration' do
+ expect(entry.value).to eq(config)
+ end
+
+ context 'and another report is specified' do
+ let(:config) { { coverage_report: coverage_report, dast: 'gl-dast-report.json' } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ it 'returns artifacts configuration' do
+ expect(entry.value).to eq({ coverage_report: coverage_report, dast: ['gl-dast-report.json'] })
+ end
+ end
+
+ context 'and a direct coverage report format is specified' do
+ let(:config) { { coverage_report: coverage_report, cobertura: 'cobertura-coverage.xml' } }
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+
+ it 'reports error' do
+ expect(entry.errors).to include /please use only one the following keys: coverage_report, cobertura/
+ end
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index d1bd22e5573..86270788431 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -92,12 +92,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
- it 'reports an error about invalid expression syntax' do
- expect(subject.errors).to include(/invalid expression syntax/)
- end
+ it 'reports an error about invalid expression syntax' do
+ expect(subject.errors).to include(/invalid expression syntax/)
end
end
@@ -174,13 +172,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
context 'specifying a delayed job' do
- let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '15 minutes' } }
+ let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '2 days' } }
it { is_expected.to be_valid }
it 'sets attributes for the job delay' do
expect(entry.when).to eq('delayed')
- expect(entry.start_in).to eq('15 minutes')
+ expect(entry.start_in).to eq('2 days')
end
context 'without a when: key' do
@@ -198,10 +196,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
- it 'returns an error about tstart_in being blank' do
+ it 'returns an error about start_in being blank' do
expect(entry.errors).to include(/start in can't be blank/)
end
end
+
+ context 'when start_in value is longer than a week' do
+ let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '2 weeks' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about start_in exceeding the limit' do
+ expect(entry.errors).to include(/start in should not exceed the limit/)
+ end
+ end
end
context 'when specifying unknown policy' do
diff --git a/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb
new file mode 100644
index 00000000000..b47a27c9025
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Trigger::Forward do
+ subject(:entry) { described_class.new(config) }
+
+ context 'when entry config is correct' do
+ let(:config) do
+ {
+ yaml_variables: false,
+ pipeline_variables: false
+ }
+ end
+
+ it 'returns set values' do
+ expect(entry.value).to eq(yaml_variables: false, pipeline_variables: false)
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when entry config value is empty' do
+ let(:config) do
+ {}
+ end
+
+ it 'returns empty' do
+ expect(entry.value).to eq({})
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when entry value is not correct' do
+ context 'invalid attribute' do
+ let(:config) do
+ {
+ xxx_variables: true
+ }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors).to include 'forward config contains unknown keys: xxx_variables'
+ end
+ end
+
+ context 'non-boolean value' do
+ let(:config) do
+ {
+ yaml_variables: 'okay'
+ }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors).to include 'forward yaml variables should be a boolean value'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
index 5b4289741f3..d0116c961d7 100644
--- a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Trigger do
end
end
- context 'when trigger is a hash' do
+ context 'when trigger is a hash - cross-project' do
context 'when branch is provided' do
let(:config) { { project: 'some/project', branch: 'feature' } }
@@ -82,52 +82,84 @@ RSpec.describe Gitlab::Ci::Config::Entry::Trigger do
end
end
- describe '#include' do
- context 'with simple include' do
- let(:config) { { include: 'path/to/config.yml' } }
+ context 'when config contains unknown keys' do
+ let(:config) { { project: 'some/project', unknown: 123 } }
- it { is_expected.to be_valid }
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
- it 'returns a trigger configuration hash' do
- expect(subject.value).to eq(include: 'path/to/config.yml' )
+ describe '#errors' do
+ it 'returns an error about unknown config key' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: unknown/
end
end
+ end
- context 'with project' do
- let(:config) { { project: 'some/project', include: 'path/to/config.yml' } }
+ context 'with forward' do
+ let(:config) { { project: 'some/project', forward: { pipeline_variables: true } } }
- it { is_expected.not_to be_valid }
+ before do
+ subject.compose!
+ end
- it 'returns an error' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: project/
- end
+ it { is_expected.to be_valid }
+
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(
+ project: 'some/project', forward: { pipeline_variables: true }
+ )
end
+ end
+ end
- context 'with branch' do
- let(:config) { { branch: 'feature', include: 'path/to/config.yml' } }
+ context 'when trigger is a hash - parent-child' do
+ context 'with simple include' do
+ let(:config) { { include: 'path/to/config.yml' } }
- it { is_expected.not_to be_valid }
+ it { is_expected.to be_valid }
- it 'returns an error' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: branch/
- end
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(include: 'path/to/config.yml' )
end
end
- context 'when config contains unknown keys' do
- let(:config) { { project: 'some/project', unknown: 123 } }
+ context 'with project' do
+ let(:config) { { project: 'some/project', include: 'path/to/config.yml' } }
- describe '#valid?' do
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: project/
end
+ end
- describe '#errors' do
- it 'returns an error about unknown config key' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: unknown/
- end
+ context 'with branch' do
+ let(:config) { { branch: 'feature', include: 'path/to/config.yml' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: branch/
+ end
+ end
+
+ context 'with forward' do
+ let(:config) { { include: 'path/to/config.yml', forward: { yaml_variables: false } } }
+
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(
+ include: 'path/to/config.yml', forward: { yaml_variables: false }
+ )
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 3d1fc32a62d..dec3eebe7b1 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -81,6 +81,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
expect(local_file.valid?).to be_falsy
end
end
+
+ context 'when the given sha is not valid' do
+ let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' }
+ let(:sha) { ':' }
+
+ it 'returns false and adds an error message stating that included file does not exist' do
+ expect(local_file).not_to be_valid
+ expect(local_file.errors).to include("Sha #{sha} is not valid!")
+ end
+ end
end
describe '#content' do
diff --git a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
index c68dccd3455..bf89942bf14 100644
--- a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
@@ -69,6 +69,23 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference do
end
end
+ context 'when the references are valid but do not match the config' do
+ let(:yaml) do
+ <<~YML
+ a: [1, 2]
+ b: [3, 4]
+ c: !reference [a, b]
+ YML
+ end
+
+ it 'raises a MissingReferenceError' do
+ expect { subject }.to raise_error(
+ Gitlab::Ci::Config::Yaml::Tags::Reference::MissingReferenceError,
+ '!reference ["a", "b"] could not be found'
+ )
+ end
+ end
+
context 'with arrays' do
let(:yaml) do
<<~YML
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
index 546de2bee5c..65d85c7f1c0 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -1,700 +1,24 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Coverage::Cobertura do
- describe '#parse!' do
- subject(:parse_report) { described_class.new.parse!(cobertura, coverage_report, project_path: project_path, worktree_paths: paths) }
+ let(:xml_data) { double }
+ let(:coverage_report) { double }
+ let(:project_path) { double }
+ let(:paths) { double }
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
- let(:project_path) { 'foo/bar' }
- let(:paths) { ['app/user.rb'] }
+ subject(:parse_report) { described_class.new.parse!(xml_data, coverage_report, project_path: project_path, worktree_paths: paths) }
- let(:cobertura) do
- <<~EOF
- <coverage>
- #{sources_xml}
- #{classes_xml}
- </coverage>
- EOF
- end
-
- context 'when data is Cobertura style XML' do
- shared_examples_for 'ignoring sources, project_path, and worktree_paths' do
- context 'when there is no <class>' do
- let(:classes_xml) { '' }
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'when there is a single <class>' do
- context 'with no lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
- end
- end
-
- context 'without a package parent' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="app.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
- end
- end
-
- context 'with multiple lines and methods info' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
- end
- end
- end
-
- context 'when there are multiple <class>' do
- context 'without a package parent' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- </lines></class>
- <class filename="foo.rb"><methods/><lines>
- <line number="6" hits="1"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns coverage information per class' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 }, 'foo.rb' => { 6 => 1 } })
- end
- end
-
- context 'with the same filename and different lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with merged coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="1"/>
- <line number="2" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with summed-up coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
- end
- end
-
- context 'with missing filename' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with missing name' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with invalid line information' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line null="test" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidLineInformationError)
- end
- end
- end
- end
-
- context 'when there is no <sources>' do
- let(:sources_xml) { '' }
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'when there is an empty <sources>' do
- let(:sources_xml) { '<sources />' }
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'when there is a <sources>' do
- context 'and has a single source with a pattern for Go projects' do
- let(:project_path) { 'local/go' } # Make sure we're not making false positives
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>/usr/local/go/src</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has multiple sources with a pattern for Go projects' do
- let(:project_path) { 'local/go' } # Make sure we're not making false positives
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>/usr/local/go/src</source>
- <source>/go/src</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has a single source but already is at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has multiple sources but already are at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/</source>
- <source>builds/somewhere/#{project_path}</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has a single source that is not at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/app</source>
- </sources>
- EOF
- end
-
- context 'when there is no <class>' do
- let(:classes_xml) { '' }
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'when there is a single <class>' do
- context 'with no lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line but the filename cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="member.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2 } })
- end
- end
-
- context 'with multiple lines and methods info' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
- end
-
- context 'when there are multiple <class>' do
- context 'with the same filename but the filename cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="member.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="member.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'without a parent package' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns coverage information with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and different lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with merged coverage, and with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="1"/>
- <line number="2" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with summed-up coverage, and with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 3, 2 => 1 } })
- end
- end
-
- context 'with missing filename' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with missing name' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with filename that cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="member.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with undetermined filename' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with invalid line information' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line null="test" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidLineInformationError)
- end
- end
- end
- end
-
- context 'and has multiple sources that are not at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/app1/</source>
- <source>builds/#{project_path}/app2/</source>
- </sources>
- EOF
- end
-
- context 'and a class filename is available under multiple extracted sources' do
- let(:paths) { ['app1/user.rb', 'app2/user.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <package name="app1">
- <classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes>
- </package>
- <package name="app2">
- <classes>
- <class filename="user.rb"><lines>
- <line number="2" hits="3"/>
- </lines></class>
- </classes>
- </package>
- EOF
- end
-
- it 'parses XML and returns the files with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({
- 'app1/user.rb' => { 1 => 2 },
- 'app2/user.rb' => { 2 => 3 }
- })
- end
- end
-
- context 'and a class filename is available under one of the extracted sources' do
- let(:paths) { ['app1/member.rb', 'app2/user.rb', 'app2/pet.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root using the extracted source where it is first found under' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app2/user.rb' => { 1 => 2 } })
- end
- end
-
- context 'and a class filename is not found under any of the extracted sources' do
- let(:paths) { ['app1/member.rb', 'app2/pet.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'and a class filename is not found under any of the extracted sources within the iteratable limit' do
- let(:paths) { ['app2/user.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="record.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- before do
- stub_const("#{described_class}::MAX_SOURCES", 1)
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
- end
- end
-
- shared_examples_for 'non-smart parsing' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/foo/bar/app</source>
- </sources>
- EOF
- end
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns filenames unchanged just as how they are found in the class node' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'user.rb' => { 1 => 2 } })
- end
- end
-
- context 'when project_path is not present' do
- let(:project_path) { nil }
- let(:paths) { ['app/user.rb'] }
-
- it_behaves_like 'non-smart parsing'
- end
-
- context 'when worktree_paths is not present' do
- let(:project_path) { 'foo/bar' }
- let(:paths) { nil }
-
- it_behaves_like 'non-smart parsing'
- end
+ before do
+ allow_next_instance_of(Nokogiri::XML::SAX::Parser) do |document|
+ allow(document).to receive(:parse)
end
+ end
- context 'when data is not Cobertura style XML' do
- let(:cobertura) { { coverage: '12%' }.to_json }
+ it 'uses Sax parser' do
+ expect(Gitlab::Ci::Parsers::Coverage::SaxDocument).to receive(:new)
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidXMLError)
- end
- end
+ parse_report
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
new file mode 100644
index 00000000000..0580cb9922b
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -0,0 +1,725 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
+ subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
+
+ describe '#parse!' do
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:project_path) { 'foo/bar' }
+ let(:paths) { ['app/user.rb'] }
+
+ let(:cobertura) do
+ <<~EOF
+ <coverage>
+ #{sources_xml}
+ #{classes_xml}
+ </coverage>
+ EOF
+ end
+
+ context 'when data is Cobertura style XML' do
+ shared_examples_for 'ignoring sources, project_path, and worktree_paths' do
+ context 'when there is no <class>' do
+ let(:classes_xml) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'without a package parent' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multiple lines and methods info' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multiple packages' do
+ let(:cobertura) do
+ <<~EOF
+ <coverage>
+ <packages><package name="app1"><classes>
+ <class filename="app1.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ <packages><package name="app2"><classes>
+ <class filename="app2.rb"><lines>
+ <line number="11" hits="3"/>
+ </lines></class>
+ </classes></package></packages>
+ </coverage>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information per class' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app1.rb' => { 1 => 2 }, 'app2.rb' => { 11 => 3 } })
+ end
+ end
+
+ context 'when there are multiple <class>' do
+ context 'without a package parent' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ <class filename="foo.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information per class' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 }, 'foo.rb' => { 6 => 1 } })
+ end
+ end
+
+ context 'with the same filename and different lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
+ end
+ end
+ end
+ end
+
+ context 'when there is no <sources>' do
+ let(:sources_xml) { '' }
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'when there is an empty <sources>' do
+ let(:sources_xml) { '<sources />' }
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'when there is a <sources>' do
+ context 'and has a single source with a pattern for Go projects' do
+ let(:project_path) { 'local/go' } # Make sure we're not making false positives
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>/usr/local/go/src</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has multiple sources with a pattern for Go projects' do
+ let(:project_path) { 'local/go' } # Make sure we're not making false positives
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>/usr/local/go/src</source>
+ <source>/go/src</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has a single source but already is at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has multiple sources but already are at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/</source>
+ <source>builds/somewhere/#{project_path}</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has a single source that is not at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/app</source>
+ </sources>
+ EOF
+ end
+
+ context 'when there is no <class>' do
+ let(:classes_xml) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line but the filename cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="member.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multiple lines and methods info' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multiple <class>' do
+ context 'with the same filename but the filename cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="member.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="member.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'without a parent package' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and different lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage, and with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage, and with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with filename that cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="member.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with undetermined filename' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
+ end
+ end
+ end
+ end
+
+ context 'and has multiple sources that are not at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/app1/</source>
+ <source>builds/#{project_path}/app2/</source>
+ </sources>
+ EOF
+ end
+
+ context 'and a class filename is available under multiple extracted sources' do
+ let(:paths) { ['app1/user.rb', 'app2/user.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <package name="app1">
+ <classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes>
+ </package>
+ <package name="app2">
+ <classes>
+ <class filename="user.rb"><lines>
+ <line number="2" hits="3"/>
+ </lines></class>
+ </classes>
+ </package>
+ EOF
+ end
+
+ it 'parses XML and returns the files with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({
+ 'app1/user.rb' => { 1 => 2 },
+ 'app2/user.rb' => { 2 => 3 }
+ })
+ end
+ end
+
+ context 'and a class filename is available under one of the extracted sources' do
+ let(:paths) { ['app1/member.rb', 'app2/user.rb', 'app2/pet.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root using the extracted source where it is first found under' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app2/user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'and a class filename is not found under any of the extracted sources' do
+ let(:paths) { ['app1/member.rb', 'app2/pet.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'and a class filename is not found under any of the extracted sources within the iteratable limit' do
+ let(:paths) { ['app2/user.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="record.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ before do
+ stub_const("#{described_class}::MAX_SOURCES", 1)
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+ end
+ end
+
+ shared_examples_for 'non-smart parsing' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/foo/bar/app</source>
+ </sources>
+ EOF
+ end
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns filenames unchanged just as how they are found in the class node' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'when project_path is not present' do
+ let(:project_path) { nil }
+ let(:paths) { ['app/user.rb'] }
+
+ it_behaves_like 'non-smart parsing'
+ end
+
+ context 'when worktree_paths is not present' do
+ let(:project_path) { 'foo/bar' }
+ let(:paths) { nil }
+
+ it_behaves_like 'non-smart parsing'
+ end
+ end
+
+ context 'when data is not Cobertura style XML' do
+ let(:cobertura) { { coverage: '12%' }.to_json }
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidXMLError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 7eec78ff186..1e96c717a4f 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -26,8 +26,6 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
allow(parser).to receive(:tracking_data).and_return(tracking_data)
allow(parser).to receive(:create_flags).and_return(vulnerability_flags_data)
end
-
- artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
end
describe 'schema validation' do
@@ -40,40 +38,50 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
allow(validator_class).to receive(:new).and_call_original
end
- context 'when enforce_security_report_validation is enabled' do
+ context 'when show_report_validation_warnings is enabled' do
before do
- stub_feature_flags(enforce_security_report_validation: true)
+ stub_feature_flags(show_report_validation_warnings: true)
end
- context 'when the validate flag is set as `true`' do
- let(:validate) { true }
+ context 'when the validate flag is set to `false`' do
+ let(:validate) { false }
+ let(:valid?) { false }
+ let(:errors) { ['foo'] }
- it 'instantiates the validator with correct params' do
- parse_report
+ before do
+ allow_next_instance_of(validator_class) do |instance|
+ allow(instance).to receive(:valid?).and_return(valid?)
+ allow(instance).to receive(:errors).and_return(errors)
+ end
- expect(validator_class).to have_received(:new).with(report.type, {})
+ allow(parser).to receive_messages(create_scanner: true, create_scan: true)
end
- context 'when the report data is valid according to the schema' do
- let(:valid?) { true }
+ it 'instantiates the validator with correct params' do
+ parse_report
- before do
- allow_next_instance_of(validator_class) do |instance|
- allow(instance).to receive(:valid?).and_return(valid?)
- allow(instance).to receive(:errors).and_return([])
- end
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
+ end
- allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ context 'when the report data is not valid according to the schema' do
+ it 'adds warnings to the report' do
+ expect { parse_report }.to change { report.warnings }.from([]).to([{ message: 'foo', type: 'Schema' }])
end
- it 'does not add errors to the report' do
- expect { parse_report }.not_to change { report.errors }.from([])
+ it 'keeps the execution flow as normal' do
+ parse_report
+
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
end
+ end
- it 'adds the schema validation status to the report' do
- parse_report
+ context 'when the report data is valid according to the schema' do
+ let(:valid?) { true }
+ let(:errors) { [] }
- expect(report.schema_validation_status).to eq(:valid_schema)
+ it 'does not add warnings to the report' do
+ expect { parse_report }.not_to change { report.errors }
end
it 'keeps the execution flow as normal' do
@@ -83,42 +91,62 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
expect(parser).to have_received(:create_scan)
end
end
+ end
- context 'when the report data is not valid according to the schema' do
- let(:valid?) { false }
-
- before do
- allow_next_instance_of(validator_class) do |instance|
- allow(instance).to receive(:valid?).and_return(valid?)
- allow(instance).to receive(:errors).and_return(['foo'])
- end
+ context 'when the validate flag is set to `true`' do
+ let(:validate) { true }
+ let(:valid?) { false }
+ let(:errors) { ['foo'] }
- allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ before do
+ allow_next_instance_of(validator_class) do |instance|
+ allow(instance).to receive(:valid?).and_return(valid?)
+ allow(instance).to receive(:errors).and_return(errors)
end
+ allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ end
+
+ it 'instantiates the validator with correct params' do
+ parse_report
+
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
+ end
+
+ context 'when the report data is not valid according to the schema' do
it 'adds errors to the report' do
expect { parse_report }.to change { report.errors }.from([]).to([{ message: 'foo', type: 'Schema' }])
end
- it 'adds the schema validation status to the report' do
+ it 'does not try to create report entities' do
parse_report
- expect(report.schema_validation_status).to eq(:invalid_schema)
+ expect(parser).not_to have_received(:create_scanner)
+ expect(parser).not_to have_received(:create_scan)
+ end
+ end
+
+ context 'when the report data is valid according to the schema' do
+ let(:valid?) { true }
+ let(:errors) { [] }
+
+ it 'does not add errors to the report' do
+ expect { parse_report }.not_to change { report.errors }.from([])
end
- it 'does not try to create report entities' do
+ it 'keeps the execution flow as normal' do
parse_report
- expect(parser).not_to have_received(:create_scanner)
- expect(parser).not_to have_received(:create_scan)
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
end
end
end
end
- context 'when enforce_security_report_validation is disabled' do
+ context 'when show_report_validation_warnings is disabled' do
before do
- stub_feature_flags(enforce_security_report_validation: false)
+ stub_feature_flags(show_report_validation_warnings: false)
end
context 'when the validate flag is set as `false`' do
@@ -147,7 +175,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
it 'instantiates the validator with correct params' do
parse_report
- expect(validator_class).to have_received(:new).with(report.type, {})
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
end
context 'when the report data is not valid according to the schema' do
@@ -181,265 +209,283 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
end
- describe 'parsing finding.name' do
- let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
-
- context 'when message is provided' do
- it 'sets message from the report as a finding name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
-
- expect(finding.name).to eq(expected_name)
- end
+ context 'report parsing' do
+ before do
+ artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
end
- context 'when message is not provided' do
- context 'and name is provided' do
- it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+ describe 'parsing finding.name' do
+ let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
+
+ context 'when message is provided' do
+ it 'sets message from the report as a finding name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
expect(finding.name).to eq(expected_name)
end
end
- context 'and name is not provided' do
- context 'when CVE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
+ context 'when message is not provided' do
+ context 'and name is provided' do
+ it 'sets name from the report as a name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+
+ expect(finding.name).to eq(expected_name)
end
end
- context 'when CWE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
- expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
+ context 'and name is not provided' do
+ context 'when CVE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
+ end
end
- end
- context 'when neither CVE nor CWE identifier exist' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
- expect(finding.name).to eq("other-2017-11429 in yarn.lock")
+ context 'when CWE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
+ end
+ end
+
+ context 'when neither CVE nor CWE identifier exist' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ expect(finding.name).to eq("other-2017-11429 in yarn.lock")
+ end
end
end
end
end
- end
- describe 'parsing finding.details' do
- context 'when details are provided' do
- it 'sets details from the report' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
+ describe 'parsing finding.details' do
+ context 'when details are provided' do
+ it 'sets details from the report' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
- expect(finding.details).to eq(expected_details)
+ expect(finding.details).to eq(expected_details)
+ end
end
- end
- context 'when details are not provided' do
- it 'sets empty hash' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expect(finding.details).to eq({})
+ context 'when details are not provided' do
+ it 'sets empty hash' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expect(finding.details).to eq({})
+ end
end
end
- end
- describe 'top-level scanner' do
- it 'is the primary scanner' do
- expect(report.primary_scanner.external_id).to eq('gemnasium')
- expect(report.primary_scanner.name).to eq('Gemnasium')
- expect(report.primary_scanner.vendor).to eq('GitLab')
- expect(report.primary_scanner.version).to eq('2.18.0')
- end
+ describe 'top-level scanner' do
+ it 'is the primary scanner' do
+ expect(report.primary_scanner.external_id).to eq('gemnasium')
+ expect(report.primary_scanner.name).to eq('Gemnasium')
+ expect(report.primary_scanner.vendor).to eq('GitLab')
+ expect(report.primary_scanner.version).to eq('2.18.0')
+ end
- it 'returns nil report has no scanner' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil report has no scanner' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.primary_scanner).to be_nil
+ expect(empty_report.primary_scanner).to be_nil
+ end
end
- end
- describe 'parsing scanners' do
- subject(:scanner) { report.findings.first.scanner }
+ describe 'parsing scanners' do
+ subject(:scanner) { report.findings.first.scanner }
- context 'when vendor is not missing in scanner' do
- it 'returns scanner with parsed vendor value' do
- expect(scanner.vendor).to eq('GitLab')
+ context 'when vendor is not missing in scanner' do
+ it 'returns scanner with parsed vendor value' do
+ expect(scanner.vendor).to eq('GitLab')
+ end
end
end
- end
- describe 'parsing scan' do
- it 'returns scan object for each finding' do
- scans = report.findings.map(&:scan)
+ describe 'parsing scan' do
+ it 'returns scan object for each finding' do
+ scans = report.findings.map(&:scan)
- expect(scans.map(&:status).all?('success')).to be(true)
- expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
- expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
- expect(scans.size).to eq(3)
- expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
- end
+ expect(scans.map(&:status).all?('success')).to be(true)
+ expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
+ expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
+ expect(scans.size).to eq(3)
+ expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
+ end
- it 'returns nil when scan is not a hash' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when scan is not a hash' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.scan).to be(nil)
+ expect(empty_report.scan).to be(nil)
+ end
end
- end
- describe 'parsing schema version' do
- it 'parses the version' do
- expect(report.version).to eq('14.0.2')
- end
+ describe 'parsing schema version' do
+ it 'parses the version' do
+ expect(report.version).to eq('14.0.2')
+ end
- it 'returns nil when there is no version' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when there is no version' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.version).to be_nil
+ expect(empty_report.version).to be_nil
+ end
end
- end
- describe 'parsing analyzer' do
- it 'associates analyzer with report' do
- expect(report.analyzer.id).to eq('common-analyzer')
- expect(report.analyzer.name).to eq('Common Analyzer')
- expect(report.analyzer.version).to eq('2.0.1')
- expect(report.analyzer.vendor).to eq('Common')
- end
+ describe 'parsing analyzer' do
+ it 'associates analyzer with report' do
+ expect(report.analyzer.id).to eq('common-analyzer')
+ expect(report.analyzer.name).to eq('Common Analyzer')
+ expect(report.analyzer.version).to eq('2.0.1')
+ expect(report.analyzer.vendor).to eq('Common')
+ end
- it 'returns nil when analyzer data is not available' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when analyzer data is not available' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.analyzer).to be_nil
+ expect(empty_report.analyzer).to be_nil
+ end
end
- end
- describe 'parsing flags' do
- it 'returns flags object for each finding' do
- flags = report.findings.first.flags
+ describe 'parsing flags' do
+ it 'returns flags object for each finding' do
+ flags = report.findings.first.flags
- expect(flags).to contain_exactly(
- have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
+ expect(flags).to contain_exactly(
+ have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
- )
+ )
+ end
end
- end
- describe 'parsing links' do
- it 'returns links object for each finding', :aggregate_failures do
- links = report.findings.flat_map(&:links)
+ describe 'parsing links' do
+ it 'returns links object for each finding', :aggregate_failures do
+ links = report.findings.flat_map(&:links)
- expect(links.map(&:url)).to match_array(['https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030'])
- expect(links.map(&:name)).to match_array([nil, 'CVE-1030'])
- expect(links.size).to eq(2)
- expect(links.first).to be_a(::Gitlab::Ci::Reports::Security::Link)
+ expect(links.map(&:url)).to match_array(['https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030'])
+ expect(links.map(&:name)).to match_array([nil, 'CVE-1030'])
+ expect(links.size).to eq(2)
+ expect(links.first).to be_a(::Gitlab::Ci::Reports::Security::Link)
+ end
end
- end
- describe 'setting the uuid' do
- let(:finding_uuids) { report.findings.map(&:uuid) }
- let(:uuid_1) do
- Security::VulnerabilityUUID.generate(
- report_type: "sast",
- primary_identifier_fingerprint: report.findings[0].identifiers.first.fingerprint,
- location_fingerprint: location.fingerprint,
- project_id: pipeline.project_id
- )
- end
+ describe 'parsing evidence' do
+ it 'returns evidence object for each finding', :aggregate_failures do
+ evidences = report.findings.map(&:evidence)
- let(:uuid_2) do
- Security::VulnerabilityUUID.generate(
- report_type: "sast",
- primary_identifier_fingerprint: report.findings[1].identifiers.first.fingerprint,
- location_fingerprint: location.fingerprint,
- project_id: pipeline.project_id
- )
+ expect(evidences.first.data).not_to be_empty
+ expect(evidences.first.data["summary"]).to match(/The Origin header was changed/)
+ expect(evidences.size).to eq(3)
+ expect(evidences.compact.size).to eq(2)
+ expect(evidences.first).to be_a(::Gitlab::Ci::Reports::Security::Evidence)
+ end
end
- let(:expected_uuids) { [uuid_1, uuid_2, nil] }
+ describe 'setting the uuid' do
+ let(:finding_uuids) { report.findings.map(&:uuid) }
+ let(:uuid_1) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[0].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
+
+ let(:uuid_2) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[1].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
- it 'sets the UUIDv5 for findings', :aggregate_failures do
- allow_next_instance_of(Gitlab::Ci::Reports::Security::Report) do |report|
- allow(report).to receive(:type).and_return('sast')
+ let(:expected_uuids) { [uuid_1, uuid_2, nil] }
- expect(finding_uuids).to match_array(expected_uuids)
+ it 'sets the UUIDv5 for findings', :aggregate_failures do
+ allow_next_instance_of(Gitlab::Ci::Reports::Security::Report) do |report|
+ allow(report).to receive(:type).and_return('sast')
+
+ expect(finding_uuids).to match_array(expected_uuids)
+ end
end
end
- end
- describe 'parsing tracking' do
- let(:tracking_data) do
- {
+ describe 'parsing tracking' do
+ let(:tracking_data) do
+ {
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
]
- }
- end
+ ]
+ }
+ end
- context 'with valid tracking information' do
- it 'creates signatures for each algorithm' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(3)
- expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
+ context 'with valid tracking information' do
+ it 'creates signatures for each algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
+ end
end
- end
- context 'with invalid tracking information' do
- let(:tracking_data) do
- {
+ context 'with invalid tracking information' do
+ let(:tracking_data) do
+ {
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
]
- }
- end
+ ]
+ }
+ end
- it 'ignores invalid algorithm types' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(2)
- expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
+ it 'ignores invalid algorithm types' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(2)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
+ end
end
- end
- context 'with valid tracking information' do
- it 'creates signatures for each signature algorithm' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(3)
- expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
-
- signatures = finding.signatures.index_by(&:algorithm_type)
- expected_values = tracking_data['items'][0]['signatures'].index_by { |x| x['algorithm'] }
- expect(signatures['hash'].signature_value).to eq(expected_values['hash']['value'])
- expect(signatures['location'].signature_value).to eq(expected_values['location']['value'])
- expect(signatures['scope_offset'].signature_value).to eq(expected_values['scope_offset']['value'])
- end
+ context 'with valid tracking information' do
+ it 'creates signatures for each signature algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
+
+ signatures = finding.signatures.index_by(&:algorithm_type)
+ expected_values = tracking_data['items'][0]['signatures'].index_by { |x| x['algorithm'] }
+ expect(signatures['hash'].signature_value).to eq(expected_values['hash']['value'])
+ expect(signatures['location'].signature_value).to eq(expected_values['location']['value'])
+ expect(signatures['scope_offset'].signature_value).to eq(expected_values['scope_offset']['value'])
+ end
- it 'sets the uuid according to the higest priority signature' do
- finding = report.findings.first
- highest_signature = finding.signatures.max_by(&:priority)
+ it 'sets the uuid according to the higest priority signature' do
+ finding = report.findings.first
+ highest_signature = finding.signatures.max_by(&:priority)
- identifiers = if vulnerability_finding_signatures_enabled
- "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{highest_signature.signature_hex}-#{report.project_id}"
- else
- "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{finding.location.fingerprint}-#{report.project_id}"
- end
+ identifiers = if vulnerability_finding_signatures_enabled
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{highest_signature.signature_hex}-#{report.project_id}"
+ else
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{finding.location.fingerprint}-#{report.project_id}"
+ end
- expect(finding.uuid).to eq(Gitlab::UUID.v5(identifiers))
+ expect(finding.uuid).to eq(Gitlab::UUID.v5(identifiers))
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
index 951e0576a58..c83427b68ef 100644
--- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -3,16 +3,60 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
+ describe 'SUPPORTED_VERSIONS' do
+ schema_path = Rails.root.join("lib", "gitlab", "ci", "parsers", "security", "validators", "schemas")
+
+ it 'matches DEPRECATED_VERSIONS keys' do
+ expect(described_class::SUPPORTED_VERSIONS.keys).to eq(described_class::DEPRECATED_VERSIONS.keys)
+ end
+
+ context 'files under schema path are explicitly listed' do
+ # We only care about the part that comes before report-format.json
+ # https://rubular.com/r/N8Juz7r8hYDYgD
+ filename_regex = /(?<report_type>[-\w]*)\-report-format.json/
+
+ versions = Dir.glob(File.join(schema_path, "*", File::SEPARATOR)).map { |path| path.split("/").last }
+
+ versions.each do |version|
+ files = Dir[schema_path.join(version, "*.json")]
+
+ files.each do |file|
+ matches = filename_regex.match(file)
+ report_type = matches[:report_type].tr("-", "_").to_sym
+
+ it "#{report_type} #{version}" do
+ expect(described_class::SUPPORTED_VERSIONS[report_type]).to include(version)
+ end
+ end
+ end
+ end
+
+ context 'every SUPPORTED_VERSION has a corresponding JSON file' do
+ described_class::SUPPORTED_VERSIONS.each_key do |report_type|
+ # api_fuzzing is covered by DAST schema
+ next if report_type == :api_fuzzing
+
+ described_class::SUPPORTED_VERSIONS[report_type].each do |version|
+ it "#{report_type} #{version} schema file is present" do
+ filename = "#{report_type.to_s.tr("_", "-")}-report-format.json"
+ full_path = schema_path.join(version, filename)
+ expect(File.file?(full_path)).to be true
+ end
+ end
+ end
+ end
+ end
+
using RSpec::Parameterized::TableSyntax
- where(:report_type, :expected_errors, :valid_data) do
- 'sast' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
- :sast | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
- :secret_detection | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ where(:report_type, :report_version, :expected_errors, :valid_data) do
+ 'sast' | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :sast | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :secret_detection | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
end
with_them do
- let(:validator) { described_class.new(report_type, report_data) }
+ let(:validator) { described_class.new(report_type, report_data, report_version) }
describe '#valid?' do
subject { validator.valid? }
@@ -28,6 +72,15 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_truthy }
end
+
+ context 'when no report_version is provided' do
+ let(:report_version) { nil }
+ let(:report_data) { valid_data }
+
+ it 'does not fail' do
+ expect { subject }.not_to raise_error
+ end
+ end
end
describe '#errors' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 27a5abf988c..25e81f6d538 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -55,31 +55,88 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
context 'when the previous pipeline has a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
- context 'when the child pipeline has an interruptible job' do
+ context 'when the child pipeline has interruptible running jobs' do
before do
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
end
- it 'cancels interruptible builds of child pipeline' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ it 'cancels all child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
perform
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+
+ context 'when the child pipeline includes completed interruptible jobs' do
+ before do
+ create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
+ end
+
+ it 'cancels all child pipeline builds with a cancelable_status' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
+ end
end
end
- context 'when the child pipeline has not an interruptible job' do
+ context 'when the child pipeline has started non-interruptible job' do
before do
- create(:ci_build, :running, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ # non-interruptible started
+ create(:ci_build, :success, pipeline: child_pipeline)
end
- it 'does not cancel the build of child pipeline' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
perform
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+
+ context 'when the child pipeline has non-interruptible non-started job' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ end
+
+ not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::BUILD_STARTED_RUNNING_STATUSES
+ context 'when the jobs are cancelable' do
+ cancelable_not_started_statuses = Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
+ cancelable_not_started_statuses.each do |status|
+ it "cancels all child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+ end
+ end
+
+ context 'when the jobs are not cancelable' do
+ not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
+ not_cancelable_not_started_statuses.each do |status|
+ it "does not cancel child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index 1d020d3ea79..9057c4e99df 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -106,21 +106,5 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
end
end
-
- context 'when the feature flag is disabled' do
- before do
- job.tag_list = %w[tag1 tag2]
- stub_feature_flags(ci_bulk_insert_tags: false)
- end
-
- it 'follows the old code path' do
- expect(CommitStatus).not_to receive(:bulk_insert_tags!)
-
- step.perform!
-
- expect(job).to be_persisted
- expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
index a8b962ee970..4dc1eca3859 100644
--- a/spec/lib/gitlab/ci/reports/security/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -158,6 +158,16 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do
end
end
+ describe '#add_warning' do
+ context 'when the message is given' do
+ it 'adds a new warning to report' do
+ expect { report.add_warning('foo', 'bar') }.to change { report.warnings }
+ .from([])
+ .to([{ type: 'foo', message: 'bar' }])
+ end
+ end
+ end
+
describe 'errored?' do
subject { report.errored? }
diff --git a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
index b703a8a47ac..b79b78d911b 100644
--- a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
@@ -5,22 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+ let_it_be(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
subject { described_class.new(Gitlab::Ci::Status::Core.new(build, user)) }
- describe '#illustration' do
- let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- before do
- environment = create(:environment, name: 'production', project: project)
- create(:deployment, :blocked, project: project, environment: environment, deployable: build)
- end
-
- it { expect(subject.illustration).to include(:image, :size) }
- it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
- it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
- end
-
describe '.matches?' do
subject { described_class.matches?(build, user) }
@@ -46,4 +34,39 @@ RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
end
end
end
+
+ describe '#illustration' do
+ before do
+ environment = create(:environment, name: 'production', project: project)
+ create(:deployment, :blocked, project: project, environment: environment, deployable: build)
+ end
+
+ it { expect(subject.illustration).to include(:image, :size) }
+ it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
+ it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
+ end
+
+ describe '#has_action?' do
+ it { expect(subject.has_action?).to be_truthy }
+ end
+
+ describe '#action_icon' do
+ it { expect(subject.action_icon).to be_nil }
+ end
+
+ describe '#action_title' do
+ it { expect(subject.action_title).to be_nil }
+ end
+
+ describe '#action_button_title' do
+ it { expect(subject.action_button_title).to eq('Go to environments page to approve or reject') }
+ end
+
+ describe '#action_path' do
+ it { expect(subject.action_path).to include('environments') }
+ end
+
+ describe '#action_method' do
+ it { expect(subject.action_method).to eq(:get) }
+ end
end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 6a4be1fa072..78d3982a79f 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -238,14 +238,34 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
end
it_behaves_like 'pipeline with Kubernetes jobs'
+
+ context 'when certificate_based_clusters FF is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'does not include production job' do
+ expect(build_names).not_to include('production')
+ end
+ end
end
- context 'when project has an Agent is present' do
+ context 'when project has an Agent' do
before do
create(:cluster_agent, project: project)
end
it_behaves_like 'pipeline with Kubernetes jobs'
+
+ context 'when certificate_based_clusters FF is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'includes production job' do
+ expect(build_names).to include('production')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/variables/builder/group_spec.rb b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
new file mode 100644
index 00000000000..72487588cde
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
@@ -0,0 +1,209 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Builder::Group do
+ let_it_be(:group) { create(:group) }
+
+ let(:builder) { described_class.new(group) }
+
+ describe '#secret_variables' do
+ let(:environment) { '*' }
+ let(:protected_ref) { false }
+
+ let_it_be(:variable) do
+ create(:ci_group_variable,
+ value: 'secret',
+ group: group)
+ end
+
+ let_it_be(:protected_variable) do
+ create(:ci_group_variable, :protected,
+ value: 'protected',
+ group: group)
+ end
+
+ let(:variable_item) { item(variable) }
+ let(:protected_variable_item) { item(protected_variable) }
+
+ subject do
+ builder.secret_variables(
+ environment: environment,
+ protected_ref: protected_ref)
+ end
+
+ context 'when the ref is not protected' do
+ let(:protected_ref) { false }
+
+ it 'contains only the CI variables' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when the ref is protected' do
+ let(:protected_ref) { true }
+
+ it 'contains all the variables' do
+ is_expected.to contain_exactly(variable_item, protected_variable_item)
+ end
+ end
+
+ context 'when environment name is specified' do
+ let(:environment) { 'review/name' }
+
+ before do
+ Ci::GroupVariable.update_all(environment_scope: environment_scope)
+ end
+
+ context 'when environment scope is exactly matched' do
+ let(:environment_scope) { 'review/name' }
+
+ it { is_expected.to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope is matched by wildcard' do
+ let(:environment_scope) { 'review/*' }
+
+ it { is_expected.to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope does not match' do
+ let(:environment_scope) { 'review/*/special' }
+
+ it { is_expected.not_to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope has _' do
+ let(:environment_scope) { '*_*' }
+
+ it 'does not treat it as wildcard' do
+ is_expected.not_to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when environment name contains underscore' do
+ let(:environment) { 'foo_bar/test' }
+ let(:environment_scope) { 'foo_bar/*' }
+
+ it 'matches literally for _' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+
+ # The environment name and scope cannot have % at the moment,
+ # but we're considering relaxing it and we should also make sure
+ # it doesn't break in case some data sneaked in somehow as we're
+ # not checking this integrity in database level.
+ context 'when environment scope has %' do
+ let(:environment_scope) { '*%*' }
+
+ it 'does not treat it as wildcard' do
+ is_expected.not_to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when environment name contains a percent' do
+ let(:environment) { 'foo%bar/test' }
+ let(:environment_scope) { 'foo%bar/*' }
+
+ it 'matches literally for _' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+ end
+
+ context 'when variables with the same name have different environment scopes' do
+ let(:environment) { 'review/name' }
+
+ let_it_be(:partially_matched_variable) do
+ create(:ci_group_variable,
+ key: variable.key,
+ value: 'partial',
+ environment_scope: 'review/*',
+ group: group)
+ end
+
+ let_it_be(:perfectly_matched_variable) do
+ create(:ci_group_variable,
+ key: variable.key,
+ value: 'prefect',
+ environment_scope: 'review/name',
+ group: group)
+ end
+
+ it 'orders the variables from least to most matched' do
+ variables_collection = Gitlab::Ci::Variables::Collection.new([
+ variable,
+ partially_matched_variable,
+ perfectly_matched_variable
+ ]).to_runner_variables
+
+ expect(subject.to_runner_variables).to eq(variables_collection)
+ end
+ end
+
+ context 'when group has children' do
+ let(:protected_ref) { true }
+
+ let_it_be(:group_child_1) { create(:group, parent: group) }
+ let_it_be(:group_child_2) { create(:group, parent: group_child_1) }
+
+ let_it_be_with_reload(:group_child_3) do
+ create(:group, parent: group_child_2)
+ end
+
+ let_it_be(:variable_child_1) do
+ create(:ci_group_variable, group: group_child_1)
+ end
+
+ let_it_be(:variable_child_2) do
+ create(:ci_group_variable, group: group_child_2)
+ end
+
+ let_it_be(:variable_child_3) do
+ create(:ci_group_variable, group: group_child_3)
+ end
+
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ let(:builder) { described_class.new(group_child_3) }
+
+ it 'returns all variables belonging to the group and parent groups' do
+ expected_array1 = Gitlab::Ci::Variables::Collection.new(
+ [protected_variable_item, variable_item])
+ .to_runner_variables
+
+ expected_array2 = Gitlab::Ci::Variables::Collection.new(
+ [variable_child_1, variable_child_2, variable_child_3]
+ ).to_runner_variables
+
+ got_array = subject.to_runner_variables
+
+ expect(got_array.shift(2)).to contain_exactly(*expected_array1)
+ expect(got_array).to eq(expected_array2)
+ end
+ end
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+ end
+ end
+ end
+
+ def item(variable)
+ Gitlab::Ci::Variables::Collection::Item.fabricate(variable)
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 6e144d62ac0..8552a06eab3 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -158,7 +158,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
allow(pipeline).to receive(:predefined_variables) { [var('C', 3), var('D', 3)] }
allow(job).to receive(:runner) { double(predefined_variables: [var('D', 4), var('E', 4)]) }
allow(builder).to receive(:kubernetes_variables) { [var('E', 5), var('F', 5)] }
- allow(builder).to receive(:deployment_variables) { [var('F', 6), var('G', 6)] }
allow(job).to receive(:yaml_variables) { [var('G', 7), var('H', 7)] }
allow(builder).to receive(:user_variables) { [var('H', 8), var('I', 8)] }
allow(job).to receive(:dependency_variables) { [var('I', 9), var('J', 9)] }
@@ -177,7 +176,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
var('C', 3), var('D', 3),
var('D', 4), var('E', 4),
var('E', 5), var('F', 5),
- var('F', 6), var('G', 6),
var('G', 7), var('H', 7),
var('H', 8), var('I', 8),
var('I', 9), var('J', 9),
@@ -193,7 +191,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
expect(subject.to_hash).to match(
'A' => '1', 'B' => '2',
'C' => '3', 'D' => '4',
- 'E' => '5', 'F' => '6',
+ 'E' => '5', 'F' => '5',
'G' => '7', 'H' => '8',
'I' => '9', 'J' => '10',
'K' => '11', 'L' => '12',
@@ -231,7 +229,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
let(:template) { double(to_yaml: 'example-kubeconfig', valid?: template_valid) }
let(:template_valid) { true }
- subject { builder.kubernetes_variables(job) }
+ subject { builder.kubernetes_variables(environment: nil, job: job) }
before do
allow(Ci::GenerateKubeconfigService).to receive(:new).with(job).and_return(service)
@@ -244,6 +242,16 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
it { is_expected.not_to include(key: 'KUBECONFIG', value: 'example-kubeconfig', public: false, file: true) }
end
+
+ it 'includes #deployment_variables and merges the KUBECONFIG values', :aggregate_failures do
+ expect(builder).to receive(:deployment_variables).and_return([
+ { key: 'KUBECONFIG', value: 'deployment-kubeconfig' },
+ { key: 'OTHER', value: 'some value' }
+ ])
+ expect(template).to receive(:merge_yaml).with('deployment-kubeconfig')
+ expect(subject['KUBECONFIG'].value).to eq('example-kubeconfig')
+ expect(subject['OTHER'].value).to eq('some value')
+ end
end
describe '#deployment_variables' do
@@ -342,10 +350,88 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
let_it_be(:protected_variable) { create(:ci_group_variable, protected: true, group: group) }
let_it_be(:unprotected_variable) { create(:ci_group_variable, protected: false, group: group) }
- let(:protected_variable_item) { protected_variable }
- let(:unprotected_variable_item) { unprotected_variable }
+ context 'with ci_variables_builder_memoize_secret_variables disabled' do
+ before do
+ stub_feature_flags(ci_variables_builder_memoize_secret_variables: false)
+ end
+
+ let(:protected_variable_item) { protected_variable }
+ let(:unprotected_variable_item) { unprotected_variable }
- include_examples "secret CI variables"
+ include_examples "secret CI variables"
+ end
+
+ context 'with ci_variables_builder_memoize_secret_variables enabled' do
+ before do
+ stub_feature_flags(ci_variables_builder_memoize_secret_variables: true)
+ end
+
+ let(:protected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) }
+ let(:unprotected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) }
+
+ include_examples "secret CI variables"
+
+ context 'variables memoization' do
+ let_it_be(:scoped_variable) { create(:ci_group_variable, group: group, environment_scope: 'scoped') }
+
+ let(:ref) { job.git_ref }
+ let(:environment) { job.expanded_environment_name }
+ let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) }
+
+ context 'with protected environments' do
+ it 'memoizes the result by environment' do
+ expect(pipeline.project)
+ .to receive(:protected_for?)
+ .with(pipeline.jobs_git_ref)
+ .once.and_return(true)
+
+ expect_next_instance_of(described_class::Group) do |group_variables_builder|
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: 'production', protected_ref: true)
+ .once
+ .and_call_original
+ end
+
+ 2.times do
+ expect(builder.secret_group_variables(ref: ref, environment: 'production'))
+ .to contain_exactly(unprotected_variable_item, protected_variable_item)
+ end
+ end
+ end
+
+ context 'with unprotected environments' do
+ it 'memoizes the result by environment' do
+ expect(pipeline.project)
+ .to receive(:protected_for?)
+ .with(pipeline.jobs_git_ref)
+ .once.and_return(false)
+
+ expect_next_instance_of(described_class::Group) do |group_variables_builder|
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: nil, protected_ref: false)
+ .once
+ .and_call_original
+
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: 'scoped', protected_ref: false)
+ .once
+ .and_call_original
+ end
+
+ 2.times do
+ expect(builder.secret_group_variables(ref: 'other', environment: nil))
+ .to contain_exactly(unprotected_variable_item)
+
+ expect(builder.secret_group_variables(ref: 'other', environment: 'scoped'))
+ .to contain_exactly(unprotected_variable_item, scoped_variable_item)
+ end
+ end
+ end
+ end
+ end
end
describe '#secret_project_variables' do
diff --git a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
index af1b43f6b01..f815f56543c 100644
--- a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
@@ -27,15 +27,13 @@ RSpec.describe Gitlab::Ci::YamlProcessor::Dag do
end
end
- context 'when there is a missing job' do
+ context 'when there are some missing jobs' do
let(:nodes) do
- { 'job_a' => %w(job_d), 'job_b' => %w(job_a) }
+ { 'job_a' => %w(job_d job_f), 'job_b' => %w(job_a job_c job_e) }
end
- it 'raises MissingNodeError' do
- expect { result }.to raise_error(
- Gitlab::Ci::YamlProcessor::Dag::MissingNodeError, 'node job_d is missing'
- )
+ it 'ignores the missing ones and returns in a valid order' do
+ expect(result).to eq(%w(job_d job_f job_a job_c job_e job_b))
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5f46607b042..ebb5c91ebad 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -9,10 +9,6 @@ module Gitlab
subject { described_class.new(config, user: nil).execute }
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
shared_examples 'returns errors' do |error_message|
it 'adds a message when an error is encountered' do
expect(subject.errors).to include(error_message)
@@ -329,6 +325,40 @@ module Gitlab
end
end
end
+
+ describe 'bridge job' do
+ let(:config) do
+ YAML.dump(rspec: {
+ trigger: {
+ project: 'namespace/project',
+ branch: 'main'
+ }
+ })
+ end
+
+ it 'has the attributes' do
+ expect(subject[:options]).to eq(
+ trigger: { project: 'namespace/project', branch: 'main' }
+ )
+ end
+
+ context 'with forward' do
+ let(:config) do
+ YAML.dump(rspec: {
+ trigger: {
+ project: 'namespace/project',
+ forward: { pipeline_variables: true }
+ }
+ })
+ end
+
+ it 'has the attributes' do
+ expect(subject[:options]).to eq(
+ trigger: { project: 'namespace/project', forward: { pipeline_variables: true } }
+ )
+ end
+ end
+ end
end
describe '#stages_attributes' do
diff --git a/spec/lib/gitlab/color_spec.rb b/spec/lib/gitlab/color_spec.rb
new file mode 100644
index 00000000000..8b16e13fa4d
--- /dev/null
+++ b/spec/lib/gitlab/color_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Color do
+ describe ".of" do
+ described_class::Constants::COLOR_NAME_TO_HEX.each do |name, value|
+ it "parses #{name} to #{value}" do
+ expect(described_class.of(name)).to eq(value)
+ end
+ end
+
+ it 'parses hex literals as colors' do
+ expect(described_class.of('#fff')).to eq(described_class.new('#fff'))
+ expect(described_class.of('#fefefe')).to eq(described_class.new('#fefefe'))
+ end
+
+ it 'raises if the input is nil' do
+ expect { described_class.of(nil) }.to raise_error(ArgumentError)
+ end
+
+ it 'returns an invalid color if the input is not valid' do
+ expect(described_class.of('unknown color')).not_to be_valid
+ end
+ end
+
+ describe '#new' do
+ it 'handles nil values' do
+ expect(described_class.new(nil)).to eq(described_class.new(nil))
+ end
+
+ it 'strips input' do
+ expect(described_class.new(' abc ')).to eq(described_class.new('abc'))
+ end
+ end
+
+ describe '#valid?' do
+ described_class::Constants::COLOR_NAME_TO_HEX.each_key do |name|
+ specify "#{name} is a valid color" do
+ expect(described_class.of(name)).to be_valid
+ end
+ end
+
+ specify '#fff is a valid color' do
+ expect(described_class.new('#fff')).to be_valid
+ end
+
+ specify '#ffffff is a valid color' do
+ expect(described_class.new('#ffffff')).to be_valid
+ end
+
+ specify '#ABCDEF is a valid color' do
+ expect(described_class.new('#ABCDEF')).to be_valid
+ end
+
+ specify '#123456 is a valid color' do
+ expect(described_class.new('#123456')).to be_valid
+ end
+
+ specify '#1234567 is not a valid color' do
+ expect(described_class.new('#1234567')).not_to be_valid
+ end
+
+ specify 'fff is not a valid color' do
+ expect(described_class.new('fff')).not_to be_valid
+ end
+
+ specify '#deadbeaf is not a valid color' do
+ expect(described_class.new('#deadbeaf')).not_to be_valid
+ end
+
+ specify '#a1b2c3 is a valid color' do
+ expect(described_class.new('#a1b2c3')).to be_valid
+ end
+
+ specify 'nil is not a valid color' do
+ expect(described_class.new(nil)).not_to be_valid
+ end
+ end
+
+ describe '#light?' do
+ specify '#fff is light' do
+ expect(described_class.new('#fff')).to be_light
+ end
+
+ specify '#a7a7a7 is light' do
+ expect(described_class.new('#a7a7a7')).to be_light
+ end
+
+ specify '#a6a7a7 is dark' do
+ expect(described_class.new('#a6a7a7')).not_to be_light
+ end
+
+ specify '#000 is dark' do
+ expect(described_class.new('#000')).not_to be_light
+ end
+
+ specify 'invalid colors are not light' do
+ expect(described_class.new('not-a-color')).not_to be_light
+ end
+ end
+
+ describe '#contrast' do
+ context 'with light colors' do
+ it 'is dark' do
+ %w[#fff #fefefe #a7a7a7].each do |hex|
+ expect(described_class.new(hex)).to have_attributes(
+ contrast: described_class::Constants::DARK,
+ luminosity: :light
+ )
+ end
+ end
+ end
+
+ context 'with dark colors' do
+ it 'is light' do
+ %w[#000 #a6a7a7].each do |hex|
+ expect(described_class.new(hex)).to have_attributes(
+ contrast: described_class::Constants::LIGHT,
+ luminosity: :dark
+ )
+ end
+ end
+ end
+ end
+
+ describe 'as_json' do
+ it 'serializes correctly' do
+ expect(described_class.new('#f0f1f2').as_json).to eq('#f0f1f2')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/config/entry/validators_spec.rb b/spec/lib/gitlab/config/entry/validators_spec.rb
new file mode 100644
index 00000000000..cbc09aac586
--- /dev/null
+++ b/spec/lib/gitlab/config/entry/validators_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Config::Entry::Validators do
+ let(:klass) do
+ Class.new do
+ include ActiveModel::Validations
+ include Gitlab::Config::Entry::Validators
+ end
+ end
+
+ let(:instance) { klass.new }
+
+ describe described_class::MutuallyExclusiveKeysValidator do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ klass.instance_eval do
+ validates :config, mutually_exclusive_keys: [:foo, :bar]
+ end
+
+ allow(instance).to receive(:config).and_return(config)
+ end
+
+ where(:context, :config, :valid_result) do
+ 'with mutually exclusive keys' | { foo: 1, bar: 2 } | false
+ 'without mutually exclusive keys' | { foo: 1 } | true
+ 'without mutually exclusive keys' | { bar: 1 } | true
+ 'with other keys' | { foo: 1, baz: 2 } | true
+ end
+
+ with_them do
+ it 'validates the instance' do
+ expect(instance.valid?).to be(valid_result)
+
+ unless valid_result
+ expect(instance.errors.messages_for(:config)).to include /please use only one the following keys: foo, bar/
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 73540a9b0f3..fda3b07eb82 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -179,6 +179,21 @@ RSpec.describe Gitlab::CurrentSettings do
expect(settings).to have_attributes(settings_from_defaults)
end
+ context 'when we hit a recursive loop' do
+ before do
+ expect(ApplicationSetting).to receive(:create_from_defaults) do
+ raise ApplicationSetting::Recursion
+ end
+ end
+
+ it 'recovers and returns in-memory settings' do
+ settings = described_class.current_application_settings
+
+ expect(settings).to be_a(ApplicationSetting)
+ expect(settings).not_to be_persisted
+ end
+ end
+
context 'when ApplicationSettings does not have a primary key' do
before do
allow(ApplicationSetting.connection).to receive(:primary_key).with('application_settings').and_return(nil)
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index eb16a8ccfa5..9ba3dad72b3 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -16,45 +16,29 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
describe '#unprepare_async_index' do
let!(:async_index) { create(:postgres_async_index, name: index_name) }
- context 'when the flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(-1)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_test_async_index' }
it 'destroys the record' do
expect do
- migration.unprepare_async_index(table_name, 'id')
+ migration.unprepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(-1)
end
-
- context 'when an explicit name is given' do
- let(:index_name) { 'my_test_async_index' }
-
- it 'destroys the record' do
- expect do
- migration.unprepare_async_index(table_name, 'id', name: index_name)
- end.to change { index_model.where(name: index_name).count }.by(-1)
- end
- end
-
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:find_by)
-
- expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
- end
- end
end
- context 'when the feature flag is disabled' do
- it 'does not destroy the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.unprepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
@@ -63,35 +47,19 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
let(:index_name) { "index_#{table_name}_on_id" }
let!(:async_index) { create(:postgres_async_index, name: index_name) }
- context 'when the flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
-
- it 'destroys the record' do
- expect do
- migration.unprepare_async_index_by_name(table_name, index_name)
- end.to change { index_model.where(name: index_name).count }.by(-1)
- end
-
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:find_by)
-
- expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
- end
- end
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index_by_name(table_name, index_name)
+ end.to change { index_model.where(name: index_name).count }.by(-1)
end
- context 'when the feature flag is disabled' do
- it 'does not destroy the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.unprepare_async_index_by_name(table_name, index_name)
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
end
end
end
@@ -101,14 +69,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
connection.create_table(table_name)
end
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
+ it 'creates the record for the async index' do
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(1)
+
+ record = index_model.find_by(name: index_name)
- it 'creates the record for the async index' do
+ expect(record.table_name).to eq(table_name)
+ expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_async_index_name' }
+
+ it 'creates the record with the given name' do
expect do
- migration.prepare_async_index(table_name, 'id')
+ migration.prepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(1)
record = index_model.find_by(name: index_name)
@@ -116,77 +93,52 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
expect(record.table_name).to eq(table_name)
expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
end
+ end
- context 'when an explicit name is given' do
- let(:index_name) { 'my_async_index_name' }
-
- it 'creates the record with the given name' do
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.to change { index_model.where(name: index_name).count }.by(1)
-
- record = index_model.find_by(name: index_name)
+ context 'when the index already exists' do
+ it 'does not create the record' do
+ connection.add_index(table_name, 'id', name: index_name)
- expect(record.table_name).to eq(table_name)
- expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
end
+ end
- context 'when the index already exists' do
- it 'does not create the record' do
- connection.add_index(table_name, 'id', name: index_name)
+ context 'when the record already exists' do
+ it 'does attempt to create the record' do
+ create(:postgres_async_index, table_name: table_name, name: index_name)
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
end
- context 'when the record already exists' do
- it 'does attempt to create the record' do
- create(:postgres_async_index, table_name: table_name, name: index_name)
-
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
- end
-
- it 'updates definition if changed' do
- index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: '...')
-
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.to change { index.reload.definition }
- end
+ it 'updates definition if changed' do
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: '...')
- it 'does not update definition if not changed' do
- definition = "CREATE INDEX CONCURRENTLY \"index_#{table_name}_on_id\" ON \"#{table_name}\" (\"id\")"
- index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: definition)
-
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.not_to change { index.reload.updated_at }
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.to change { index.reload.definition }
end
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:safe_find_or_create_by!)
+ it 'does not update definition if not changed' do
+ definition = "CREATE INDEX CONCURRENTLY \"index_#{table_name}_on_id\" ON \"#{table_name}\" (\"id\")"
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: definition)
- expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.not_to change { index.reload.updated_at }
end
end
- context 'when the feature flag is disabled' do
- it 'does not create the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:safe_find_or_create_by!)
+
+ expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 7338ea657b9..8c663ff9f8a 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model do
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
+ it { expect(described_class::TIMEOUT_EXCEPTIONS).to match_array [ActiveRecord::StatementTimeout, ActiveRecord::ConnectionTimeoutError, ActiveRecord::AdapterTimeout, ActiveRecord::LockWaitTimeout] }
+
describe 'associations' do
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
it { is_expected.to have_many(:batched_job_transition_logs).with_foreign_key(:batched_background_migration_job_id) }
@@ -13,6 +17,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
describe 'state machine' do
let_it_be(:job) { create(:batched_background_migration_job, :failed) }
+ it { expect(described_class.state_machine.states.map(&:name)).to eql(%i(pending running failed succeeded)) }
+
context 'when a job is running' do
it 'logs the transition' do
expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :running, previous_state: :failed } )
@@ -45,6 +51,51 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
+ context 'when a job fails the number of max times' do
+ let(:max_times) { described_class::MAX_ATTEMPTS }
+ let!(:job) { create(:batched_background_migration_job, :running, batch_size: 10, min_value: 6, max_value: 15, attempts: max_times) }
+
+ context 'when job can be split' do
+ let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
+
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).and_return([6, 10])
+ end
+ end
+
+ it 'splits the job into two retriable jobs' do
+ expect { job.failure!(error: exception) }.to change { job.batched_migration.batched_jobs.retriable.count }.from(0).to(2)
+ end
+ end
+
+ context 'when the job cannot be split' do
+ let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
+ let(:max_times) { described_class::MAX_ATTEMPTS }
+ let!(:job) { create(:batched_background_migration_job, :running, batch_size: 50, sub_batch_size: 20, min_value: 6, max_value: 15, attempts: max_times) }
+ let(:error_message) { 'Job cannot be split further' }
+ let(:split_and_retry_exception) { Gitlab::Database::BackgroundMigration::SplitAndRetryError.new(error_message) }
+
+ before do
+ allow(job).to receive(:split_and_retry!).and_raise(split_and_retry_exception)
+ end
+
+ it 'does not split the job' do
+ expect { job.failure!(error: exception) }.not_to change { job.batched_migration.batched_jobs.retriable.count }
+ end
+
+ it 'keeps the same job attributes' do
+ expect { job.failure!(error: exception) }.not_to change { job }
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with( { message: error_message, batched_job_id: job.id } )
+
+ job.failure!(error: exception)
+ end
+ end
+ end
+
context 'when a job fails' do
let(:job) { create(:batched_background_migration_job, :running) }
@@ -145,6 +196,49 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
+ describe '#can_split?' do
+ subject { job.can_split?(exception) }
+
+ context 'when the number of attempts is greater than the limit and the batch_size is greater than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS + 1) }
+
+ context 'when is a timeout exception' do
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'when is not a timeout exception' do
+ let(:exception) { RuntimeError.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
+ context 'when the number of attempts is lower than the limit and the batch_size is greater than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS - 1) }
+
+ context 'when is a timeout exception' do
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when is not a timeout exception' do
+ let(:exception) { RuntimeError.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
+ context 'when the batch_size is lower than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 2, sub_batch_size: 4) }
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
describe '#time_efficiency' do
subject { job.time_efficiency }
@@ -197,15 +291,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
describe '#split_and_retry!' do
- let!(:job) { create(:batched_background_migration_job, :failed, batch_size: 10, min_value: 6, max_value: 15, attempts: 3) }
+ let_it_be(:migration) { create(:batched_background_migration, table_name: :events) }
+ let_it_be(:job) { create(:batched_background_migration_job, :failed, batched_migration: migration, batch_size: 10, min_value: 6, max_value: 15, attempts: 3) }
+ let_it_be(:project) { create(:project) }
- context 'when job can be split' do
- before do
- allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
- end
+ before_all do
+ (6..16).each do |id|
+ create(:event, id: id, project: project)
end
+ end
+ context 'when job can be split' do
it 'sets the correct attributes' do
expect { job.split_and_retry! }.to change { described_class.count }.by(1)
@@ -261,9 +357,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
context 'when computed midpoint is larger than the max value of the batch' do
before do
- allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 16])
- end
+ Event.where(id: 6..12).delete_all
end
it 'lowers the batch size and resets the number of attempts' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
index c42a0fc5e05..59f4f40c0ef 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJobTransitionLog, type: :model do
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe 'associations' do
it { is_expected.to belong_to(:batched_job).with_foreign_key(:batched_background_migration_job_id) }
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index bb2c6b9a3ae..124d204cb62 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -428,4 +428,27 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
end
+
+ describe '.finalize' do
+ context 'when the connection is passed' do
+ let(:connection) { double('connection') }
+
+ let(:table_name) { :_test_batched_migrations_test_table }
+ let(:column_name) { :some_id }
+ let(:job_arguments) { [:some, :other, :arguments] }
+ let(:batched_migration) { create(:batched_background_migration, table_name: table_name, column_name: column_name) }
+
+ it 'initializes the object with the given connection' do
+ expect(described_class).to receive(:new).with(connection: connection).and_call_original
+
+ described_class.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments,
+ connection: connection
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index ea4ba4dd137..803123e8e34 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model do
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe 'associations' do
it { is_expected.to have_many(:batched_jobs).with_foreign_key(:batched_background_migration_id) }
@@ -272,7 +274,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
before do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ allow(batch_class).to receive(:next_batch).with(
+ anything,
+ anything,
+ batch_min_value: 6,
+ batch_size: 5,
+ job_arguments: batched_migration.job_arguments
+ ).and_return([6, 10])
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 4f5536d8771..d6c984c7adb 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -193,6 +193,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it_behaves_like 'an error is raised', RuntimeError.new('Something broke!')
it_behaves_like 'an error is raised', SignalException.new('SIGTERM')
+ it_behaves_like 'an error is raised', ActiveRecord::StatementTimeout.new('Timeout!')
end
context 'when the batched background migration does not inherit from BaseJob' do
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index d526b3bc1ac..d46c1ca8681 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::EachDatabase do
- describe '.each_database_connection' do
+ describe '.each_database_connection', :add_ci_connection do
before do
allow(Gitlab::Database).to receive(:database_base_models)
.and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
end
- it 'yields each connection after connecting SharedModel', :add_ci_connection do
+ it 'yields each connection after connecting SharedModel' do
expect(Gitlab::Database::SharedModel).to receive(:using_connection)
.with(ActiveRecord::Base.connection).ordered.and_yield
@@ -22,6 +22,42 @@ RSpec.describe Gitlab::Database::EachDatabase do
[Ci::ApplicationRecord.connection, 'ci']
)
end
+
+ context 'when only certain databases are selected' do
+ it 'yields the selected connections after connecting SharedModel' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection)
+ .with(Ci::ApplicationRecord.connection).ordered.and_yield
+
+ expect { |b| described_class.each_database_connection(only: 'ci', &b) }
+ .to yield_successive_args([Ci::ApplicationRecord.connection, 'ci'])
+ end
+
+ context 'when the selected names are passed as symbols' do
+ it 'yields the selected connections after connecting SharedModel' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection)
+ .with(Ci::ApplicationRecord.connection).ordered.and_yield
+
+ expect { |b| described_class.each_database_connection(only: :ci, &b) }
+ .to yield_successive_args([Ci::ApplicationRecord.connection, 'ci'])
+ end
+ end
+
+ context 'when the selected names are invalid' do
+ it 'does not yield any connections' do
+ expect do |b|
+ described_class.each_database_connection(only: :notvalid, &b)
+ rescue ArgumentError => e
+ expect(e.message).to match(/notvalid is not a valid database name/)
+ end.not_to yield_control
+ end
+
+ it 'raises an error' do
+ expect do
+ described_class.each_database_connection(only: :notvalid) {}
+ end.to raise_error(ArgumentError, /notvalid is not a valid database name/)
+ end
+ end
+ end
end
describe '.each_model_connection' do
@@ -69,8 +105,8 @@ RSpec.describe Gitlab::Database::EachDatabase do
allow(main_model).to receive(:connection).and_return(main_connection)
allow(ci_model).to receive(:connection).and_return(ci_connection)
- allow(main_connection).to receive_message_chain('pool.db_config.name').and_return('main')
- allow(ci_connection).to receive_message_chain('pool.db_config.name').and_return('ci')
+ allow(main_model).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(ci_model).to receive_message_chain('connection_db_config.name').and_return('ci')
end
it 'yields each model after connecting SharedModel' do
@@ -81,10 +117,44 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
end
- def expect_yielded_models(models_to_iterate, expected_values)
+ context 'when the database connections are limited by the only_on option' do
+ let(:shared_model) { Class.new(Gitlab::Database::SharedModel) }
+ let(:main_model) { Class.new(ActiveRecord::Base) }
+ let(:ci_model) { Class.new(Ci::ApplicationRecord) }
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
+
+ allow(main_model).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(ci_model).to receive_message_chain('connection_db_config.name').and_return('ci')
+ end
+
+ context 'when a single name is passed in' do
+ it 'yields models only connected to the given database' do
+ expect_yielded_models([main_model, ci_model, shared_model], [
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: 'ci')
+ end
+ end
+
+ context 'when a list of names are passed in' do
+ it 'yields models only connected to the given databases' do
+ expect_yielded_models([main_model, ci_model, shared_model], [
+ { model: main_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: %i[main ci])
+ end
+ end
+ end
+
+ def expect_yielded_models(models_to_iterate, expected_values, only_on: nil)
times_yielded = 0
- described_class.each_model_connection(models_to_iterate) do |model, name|
+ described_class.each_model_connection(models_to_iterate, only_on: only_on) do |model, name|
expected = expected_values[times_yielded]
expect(model).to be(expected[:model])
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index e87c9c20707..77284b4d128 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -7,13 +7,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
let(:db_config) { ActiveRecord::DatabaseConfigurations::HashConfig.new('test', 'ci', configuration_hash) }
let(:model) { double(:model, connection_db_config: db_config) }
- before do
- # It's confusing to think about these specs with this enabled by default so
- # we make it disabled by default and just write the specific spec for when
- # it's enabled
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
-
describe '.for_model' do
context 'when load balancing is not configured' do
it 'uses the default settings' do
diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
index 20519a759b2..4d565ce137a 100644
--- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
@@ -274,6 +274,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do
end
before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
# Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
allow_next_instance_of(described_class) do |setup|
allow(setup).to receive(:configure_connection)
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 45878b2e266..f320fe0276f 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -92,6 +92,18 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
end
+ context 'when an invalid connection is used' do
+ it 'returns :unknown' do
+ expect(described_class.db_role_for_connection(:invalid)).to eq(:unknown)
+ end
+ end
+
+ context 'when a null connection is used' do
+ it 'returns :unknown' do
+ expect(described_class.db_role_for_connection(nil)).to eq(:unknown)
+ end
+ end
+
context 'when a read connection is used' do
it 'returns :replica' do
load_balancer.read do |connection|
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
new file mode 100644
index 00000000000..ad9a3a6e257
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -0,0 +1,561 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, stub_feature_flags: false do
+ let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) }
+
+ describe '#restrict_gitlab_migration' do
+ it 'invalid schema raises exception' do
+ expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_exisiting }
+ .to raise_error /Unknown 'gitlab_schema:/
+ end
+
+ it 'does configure allowed_gitlab_schema' do
+ schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ expect(schema_class.allowed_gitlab_schemas).to eq(%i[gitlab_main])
+ end
+ end
+
+ context 'when executing migrations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "does create table in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ create_table :_test_table do |t|
+ t.references :project, foreign_key: true, null: false
+ t.timestamps_with_timezone null: false
+ end
+ end
+ end,
+ query_matcher: /CREATE TABLE "_test_table"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add column to projects in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ add_column :projects, :__test_column, :integer
+ end
+ end,
+ query_matcher: /ALTER TABLE "projects" ADD "__test_column" integer/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add column to ci_builds in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ add_column :ci_builds, :__test_column, :integer
+ end
+ end,
+ query_matcher: /ALTER TABLE "ci_builds" ADD "__test_column" integer/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add index to projects in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ # Due to running in transactin we cannot use `add_concurrent_index`
+ add_index :projects, :hidden
+ end
+ end,
+ query_matcher: /CREATE INDEX/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add index to ci_builds in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ # Due to running in transactin we cannot use `add_concurrent_index`
+ add_index :ci_builds, :tag, where: "type = 'Ci::Build'", name: 'index_ci_builds_on_tag_and_type_eq_ci_build'
+ end
+ end,
+ query_matcher: /CREATE INDEX/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does create trigger in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::SchemaHelpers
+
+ def up
+ create_trigger_function('_test_trigger_function', replace: true) do
+ <<~SQL
+ RETURN NULL;
+ SQL
+ end
+ end
+
+ def down
+ drop_function('_test_trigger_function')
+ end
+ end,
+ query_matcher: /CREATE OR REPLACE FUNCTION/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does create schema in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::SchemaHelpers
+
+ def up
+ execute("create schema __test_schema")
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /create schema __test_schema/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_main: {
+ # This is not properly detected today since there are no helpers
+ # available to consider this as a DDL type of change
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does attach loose foreign key trigger in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
+
+ enable_lock_retries!
+
+ def up
+ track_record_deletions(:audit_events)
+ end
+
+ def down
+ untrack_record_deletions(:audit_events)
+ end
+ end,
+ query_matcher: /CREATE TRIGGER/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does insert into software_licenses" => {
+ migration: ->(klass) do
+ def up
+ software_license_class.create!(name: 'aaa')
+ end
+
+ def down
+ software_license_class.where(name: 'aaa').delete_all
+ end
+
+ def software_license_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'software_licenses'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "software_licenses"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing tables outside of gitlab_main" => {
+ migration: ->(klass) do
+ def up
+ ci_instance_variables_class.create!(variable_type: 1, key: 'aaa')
+ end
+
+ def down
+ ci_instance_variables_class.delete_all
+ end
+
+ def ci_instance_variables_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'ci_instance_variables'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "ci_instance_variables"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :dml_access_denied,
+ ci: :skipped
+ }
+ }
+ },
+ "does allow modifying gitlab_shared" => {
+ migration: ->(klass) do
+ def up
+ detached_partitions_class.create!(drop_after: Time.current, table_name: '_test_table')
+ end
+
+ def down
+ end
+
+ def detached_partitions_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'detached_partitions'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "detached_partitions"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_main: {
+ # TBD: This allow to selectively modify shared tables in context of a specific DB only
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does update data in batches of gitlab_main, but skips gitlab_ci" => {
+ migration: ->(klass) do
+ def up
+ update_column_in_batches(:projects, :archived, true) do |table, query|
+ query.where(table[:archived].eq(false)) # rubocop:disable CodeReuse/ActiveRecord
+ end
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ query_matcher: /FROM "projects"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does not allow executing mixed DDL and DML migrations" => {
+ migration: ->(klass) do
+ def up
+ execute('UPDATE projects SET hidden=false')
+ add_index(:projects, :hidden, name: 'test_index')
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does schedule background migrations on gitlab_main" => {
+ migration: ->(klass) do
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(
+ define_batchable_model('vulnerability_occurrences'),
+ 'RemoveDuplicateVulnerabilitiesFindings',
+ 2.minutes.to_i,
+ batch_size: 5_000
+ )
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ query_matcher: /FROM "vulnerability_occurrences"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does support prepare_async_index" => {
+ migration: ->(klass) do
+ def up
+ prepare_async_index :projects, :hidden,
+ name: :index_projects_on_hidden
+ end
+
+ def down
+ unprepare_async_index_by_name :projects, :index_projects_on_hidden
+ end
+ end,
+ query_matcher: /INSERT INTO "postgres_async_indexes"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :dml_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing current settings" => {
+ migration: ->(klass) do
+ def up
+ ApplicationSetting.last
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /FROM "application_settings"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing feature flags" => {
+ migration: ->(klass) do
+ def up
+ Feature.enabled?(:redis_hll_tracking, type: :ops, default_enabled: :yaml)
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /FROM "features"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ }
+ }
+ end
+
+ with_them do
+ let(:migration_class) { Class.new(schema_class, &migration) }
+
+ Gitlab::Database.database_base_models.each do |db_config_name, model|
+ context "for db_config_name=#{db_config_name}" do
+ around do |example|
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(model: ActiveRecord::Base, config_model: model)
+
+ example.run
+ end
+ end
+
+ before do
+ allow_next_instance_of(migration_class) do |migration|
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ end
+ end
+
+ %i[no_gitlab_schema gitlab_schema_gitlab_main gitlab_schema_gitlab_shared].each do |restrict_gitlab_migration|
+ context "while restrict_gitlab_migration=#{restrict_gitlab_migration}" do
+ it "does run migrate :up and :down" do
+ expected_result = expected.fetch(restrict_gitlab_migration)[db_config_name.to_sym]
+ skip "not configured" unless expected_result
+
+ case restrict_gitlab_migration
+ when :no_gitlab_schema
+ # no-op
+ when :gitlab_schema_gitlab_main
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+ when :gitlab_schema_gitlab_shared
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_shared
+ end
+
+ # In some cases (for :down) we ignore error and expect no other errors
+ case expected_result
+ when :success
+ expect { migration_class.migrate(:up) }.to make_queries_matching(query_matcher)
+ expect { migration_class.migrate(:down) }.not_to make_queries_matching(query_matcher)
+
+ when :dml_not_allowed
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLNotAllowedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLNotAllowedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :dml_access_denied
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :ddl_not_allowed
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DDLNotAllowedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DDLNotAllowedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :skipped
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError)
+ expect { migration_class.migrate(:down) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError)
+ end
+ end
+ end
+ end
+
+ def ignore_error(error)
+ yield
+ rescue error
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index 96dc3a0fc28..e64f5807385 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -164,11 +164,19 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
end
- context "when the primary_column_name is not an integer" do
+ context 'when the primary_column_name is a string' do
+ it 'does not raise error' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :name_regex)
+ end.not_to raise_error
+ end
+ end
+
+ context "when the primary_column_name is not an integer or a string" do
it 'raises error' do
expect do
model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :enabled)
- end.to raise_error(StandardError, /is not an integer column/)
+ end.to raise_error(StandardError, /is not an integer or string column/)
end
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
index a757cac0a2a..35e4cef6da5 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" }
let(:query_binds) { [Time.current, 3] }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-query-details.json" }
+ let(:log_file) { "#{directory_path}/query-details.json" }
let(:query_details) { Gitlab::Json.parse(File.read(log_file)) }
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
index eb66972e5ab..34678b77a0f 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
it 'writes a file with the query log' do
observe
- expect(File.read("#{directory_path}/#{migration_version}_#{migration_name}.log")).to include(query)
+ expect(File.read("#{directory_path}/migration.log")).to include(query)
end
it 'does not change the default logger' do
diff --git a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
index f433e25b2ba..51b19e7f2da 100644
--- a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do
let(:connection) { ActiveRecord::Migration.connection }
let(:observation) { Gitlab::Database::Migrations::Observation.new(version: migration_version, name: migration_name) }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-transaction-duration.json" }
+ let(:log_file) { "#{directory_path}/transaction-duration.json" }
let(:transaction_duration) { Gitlab::Json.parse(File.read(log_file)) }
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index 7dc965c84fa..84482e6b450 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -79,6 +79,15 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
expect(migration_runs.map(&:dir)).to match_array([:up, :up])
expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
end
+
+ it 'writes a metadata file with the current schema version' do
+ up.run
+
+ metadata_file = result_dir.join('up', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
+ end
end
end
@@ -105,5 +114,14 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
end
end
+
+ it 'writes a metadata file with the current schema version' do
+ down.run
+
+ metadata_file = result_dir.join('down', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
+ end
end
end
diff --git a/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb
new file mode 100644
index 00000000000..c6fe88a7c2d
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do
+ include Gitlab::Database::Migrations::BackgroundMigrationHelpers
+
+ # In order to test the interaction between queueing sidekiq jobs and seeing those jobs in queues,
+ # we need to disable sidekiq's testing mode and actually send our jobs to redis
+ around do |ex|
+ Sidekiq::Testing.disable! { ex.run }
+ end
+
+ context 'without jobs to run' do
+ it 'returns immediately' do
+ runner = described_class.new
+ expect(runner).not_to receive(:run_job)
+ described_class.new.run_jobs(for_duration: 1.second)
+ end
+ end
+
+ context 'with jobs to run' do
+ let(:migration_name) { 'TestBackgroundMigration' }
+
+ before do
+ (1..5).each do |i|
+ migrate_in(i.minutes, migration_name, [i])
+ end
+ end
+
+ context 'finding pending background jobs' do
+ it 'finds all the migrations' do
+ expect(described_class.new.traditional_background_migrations.to_a.size).to eq(5)
+ end
+ end
+
+ context 'running migrations', :freeze_time do
+ def define_background_migration(name)
+ klass = Class.new do
+ # Can't simply def perform here as we won't have access to the block,
+ # similarly can't define_method(:perform, &block) here as it would change the block receiver
+ define_method(:perform) { |*args| yield(*args) }
+ end
+ stub_const("Gitlab::BackgroundMigration::#{name}", klass)
+ klass
+ end
+
+ def expect_migration_call_counts(migrations_to_calls)
+ migrations_to_calls.each do |migration, calls|
+ expect_next_instances_of(migration, calls) do |m|
+ expect(m).to receive(:perform).and_call_original
+ end
+ end
+ end
+
+ it 'runs the migration class correctly' do
+ calls = []
+ define_background_migration(migration_name) do |i|
+ calls << i
+ end
+ described_class.new.run_jobs(for_duration: 1.second) # Any time would work here as we do not advance time
+ expect(calls).to contain_exactly(1, 2, 3, 4, 5)
+ end
+
+ it 'runs the migration for a uniform amount of time' do
+ migration = define_background_migration(migration_name) do |i|
+ travel(1.minute)
+ end
+
+ expect_migration_call_counts(migration => 3)
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+
+ context 'with multiple migrations to run' do
+ let(:other_migration_name) { 'OtherBackgroundMigration' }
+
+ before do
+ (1..5).each do |i|
+ migrate_in(i.minutes, other_migration_name, [i])
+ end
+ end
+
+ it 'splits the time between migrations when all migrations use all their time' do
+ migration = define_background_migration(migration_name) do |i|
+ travel(1.minute)
+ end
+
+ other_migration = define_background_migration(other_migration_name) do |i|
+ travel(2.minutes)
+ end
+
+ expect_migration_call_counts(
+ migration => 2, # 1 minute jobs for 90 seconds, can finish the first and start the second
+ other_migration => 1 # 2 minute jobs for 90 seconds, past deadline after a single job
+ )
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+
+ it 'does not give leftover time to extra migrations' do
+ # This is currently implemented this way for simplicity, but it could make sense to change this behavior.
+
+ migration = define_background_migration(migration_name) do
+ travel(1.second)
+ end
+ other_migration = define_background_migration(other_migration_name) do
+ travel(1.minute)
+ end
+ expect_migration_call_counts(
+ migration => 5,
+ other_migration => 2
+ )
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 154cc2b7972..7c69f639aab 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -109,6 +109,20 @@ RSpec.describe Gitlab::Database::Partitioning do
.and change { find_partitions(table_names.last).size }.from(0)
end
end
+
+ context 'when only a specific database is requested' do
+ before do
+ allow(models.first).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(models.last).to receive_message_chain('connection_db_config.name').and_return('ci')
+ end
+
+ it 'manages partitions for models for the given database', :aggregate_failures do
+ expect { described_class.sync_partitions(models, only_on: 'ci') }
+ .to change { find_partitions(table_names.last).size }.from(0)
+
+ expect(find_partitions(table_names.first).size).to eq(0)
+ end
+ end
end
describe '.report_metrics' do
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 34c72893c53..3b4cbc79de2 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
let(:analyzer) { double(:query_analyzer) }
+ let(:user_analyzer) { double(:query_analyzer) }
let(:disabled_analyzer) { double(:disabled_query_analyzer) }
before do
@@ -53,6 +54,10 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { |b| described_class.instance.within(&b) }.to yield_control
end
+
+ it 'raises exception when trying to re-define analyzers' do
+ expect { |b| described_class.instance.within([user_analyzer], &b) }.to raise_error /Query analyzers are already defined, cannot re-define them/
+ end
end
context 'when initializer is enabled' do
@@ -75,6 +80,18 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { |b| described_class.instance.within(&b) }.to yield_control
end
end
+
+ context 'when user analyzers are used' do
+ it 'calls begin! and end!' do
+ expect(analyzer).not_to receive(:begin!)
+ allow(user_analyzer).to receive(:enabled?).and_return(true)
+ allow(user_analyzer).to receive(:suppressed?).and_return(false)
+ expect(user_analyzer).to receive(:begin!)
+ expect(user_analyzer).to receive(:end!)
+
+ expect { |b| described_class.instance.within([user_analyzer], &b) }.to yield_control
+ end
+ end
end
describe '#process_sql' do
diff --git a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
new file mode 100644
index 00000000000..a2c7916fa01
--- /dev/null
+++ b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_analyzers: false do
+ let(:analyzer) { described_class }
+
+ context 'properly analyzes queries' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ examples = {
+ "for SELECT on projects" => {
+ sql: "SELECT 1 FROM projects",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT" => {
+ sql: "INSERT INTO projects VALUES (1)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for CREATE INDEX" => {
+ sql: "CREATE INDEX index_projects_on_hidden ON projects (hidden)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ },
+ "for CREATE SCHEMA" => {
+ sql: "CREATE SCHEMA __test_schema",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ # TODO: This is currently not properly detected
+ gitlab_main: :success,
+ gitlab_ci: :success
+ }
+ },
+ "for CREATE FUNCTION" => {
+ sql: "CREATE FUNCTION add(integer, integer) RETURNS integer AS 'select $1 + $2;' LANGUAGE SQL",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ },
+ "for CREATE TRIGGER" => {
+ sql: "CREATE TRIGGER check_projects BEFORE UPDATE ON projects FOR EACH ROW EXECUTE PROCEDURE check_projects_update()",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ }
+ }
+
+ # Expands all examples into individual tests
+ examples.flat_map do |name, configuration|
+ configuration[:expected_allowed_gitlab_schemas].map do |allowed_gitlab_schema, expectation|
+ [
+ "#{name} for allowed_gitlab_schema=#{allowed_gitlab_schema}",
+ {
+ sql: configuration[:sql],
+ allowed_gitlab_schema: allowed_gitlab_schema, # nil, gitlab_main
+ expectation: expectation # success, dml_access_denied, ...
+ }
+ ]
+ end
+ end.to_h
+ end
+
+ with_them do
+ subject do
+ process_sql(sql) do
+ analyzer.allowed_gitlab_schemas = [allowed_gitlab_schema] unless allowed_gitlab_schema == :no_schema
+ end
+ end
+
+ it do
+ case expectation
+ when :success
+ expect { subject }.not_to raise_error
+ when :ddl_not_allowed
+ expect { subject }.to raise_error(described_class::DDLNotAllowedError)
+ when :dml_not_allowed
+ expect { subject }.to raise_error(described_class::DMLNotAllowedError)
+ when :dml_access_denied
+ expect { subject }.to raise_error(described_class::DMLAccessDeniedError)
+ else
+ raise "invalid expectation: #{expectation}"
+ end
+ end
+ end
+ end
+
+ describe '.require_ddl_mode!' do
+ subject { described_class.require_ddl_mode! }
+
+ it "when not configured does not raise exception" do
+ expect { subject }.not_to raise_error
+ end
+
+ it "when no schemas are configured does not raise exception (DDL mode)" do
+ with_analyzer do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ it "with schemas configured does raise exception (DML mode)" do
+ with_analyzer do
+ analyzer.allowed_gitlab_schemas = %i[gitlab_main]
+
+ expect { subject }.to raise_error(described_class::DMLNotAllowedError)
+ end
+ end
+ end
+
+ describe '.require_dml_mode!' do
+ subject { described_class.require_dml_mode! }
+
+ it "when not configured does not raise exception" do
+ expect { subject }.not_to raise_error
+ end
+
+ it "when no schemas are configured does raise exception (DDL mode)" do
+ with_analyzer do
+ expect { subject }.to raise_error(described_class::DDLNotAllowedError)
+ end
+ end
+
+ it "with schemas configured does raise exception (DML mode)" do
+ with_analyzer do
+ analyzer.allowed_gitlab_schemas = %i[gitlab_main]
+
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ def with_analyzer
+ Gitlab::Database::QueryAnalyzer.instance.within([analyzer]) do
+ yield
+ end
+ end
+
+ def process_sql(sql, model = ActiveRecord::Base)
+ with_analyzer do
+ yield if block_given?
+
+ # Skip load balancer and retrieve connection assigned to model
+ Gitlab::Database::QueryAnalyzer.instance.process_sql(sql, model.retrieve_connection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 3799fe3c316..50071e3e22b 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
it 'invalidates the markdown cache of related projects' do
expect(subject).to receive(:remove_cached_html_for_projects)
- .with(projects.map(&:id))
+ .with(a_collection_containing_exactly(*projects.map(&:id)))
subject.rename_projects
end
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
index 37cfc841d48..33a47150060 100644
--- a/spec/lib/gitlab/database/transaction/context_spec.rb
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -135,4 +135,24 @@ RSpec.describe Gitlab::Database::Transaction::Context do
it_behaves_like 'logs transaction data'
end
+
+ context 'when there are too many external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:request_count)
+ .and_return(100)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+
+ context 'when there are too many too long external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:duration)
+ .and_return(5.5)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index e5cc0106c9b..074c18d406e 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
User.first
expect(transaction_context).to be_a(::Gitlab::Database::Transaction::Context)
- expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces))
+ expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces external_http_count_start external_http_duration_start))
expect(context[:depth]).to eq(2)
expect(context[:savepoints]).to eq(1)
expect(context[:queries].length).to eq(1)
@@ -38,6 +38,71 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
expect(context[:backtraces].length).to eq(1)
end
+ describe 'tracking external network requests', :request_store do
+ it 'tracks external requests' do
+ perform_stubbed_external_http_request(duration: 0.25)
+ perform_stubbed_external_http_request(duration: 1.25)
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ expect(context[:external_http_count_start]).to eq(2)
+ expect(context[:external_http_duration_start]).to eq(1.5)
+
+ perform_stubbed_external_http_request(duration: 1)
+ perform_stubbed_external_http_request(duration: 3)
+
+ expect(transaction_context.external_http_requests_count).to eq 2
+ expect(transaction_context.external_http_requests_duration).to eq 4
+ end
+ end
+
+ context 'when external HTTP requests duration has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(
+ external_http_requests_count: 2,
+ external_http_requests_duration: 12
+ )
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ perform_stubbed_external_http_request(duration: 2)
+ perform_stubbed_external_http_request(duration: 10)
+ end
+ end
+ end
+
+ context 'when external HTTP requests count has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(external_http_requests_count: 55)
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ 55.times { perform_stubbed_external_http_request(duration: 0.01) }
+ end
+ end
+ end
+
+ def perform_stubbed_external_http_request(duration:)
+ ::Gitlab::Metrics::Subscribers::ExternalHttp.new.request(
+ instance_double(
+ 'ActiveSupport::Notifications::Event',
+ payload: {
+ method: 'GET', code: '200', duration: duration,
+ scheme: 'http', host: 'example.gitlab.com', port: 80, path: '/'
+ },
+ time: Time.current
+ )
+ )
+ end
+ end
+
describe '.extract_sql_command' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/database/type/color_spec.rb b/spec/lib/gitlab/database/type/color_spec.rb
new file mode 100644
index 00000000000..84fd8d0bbce
--- /dev/null
+++ b/spec/lib/gitlab/database/type/color_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Database::Type::Color do
+ subject(:type) { described_class.new }
+
+ let(:color) { ::Gitlab::Color.of('red') }
+
+ it 'serializes by calling #to_s' do
+ expect(type.serialize(color)).to eq(color.to_s)
+ end
+
+ it 'serializes nil to nil' do
+ expect(type.serialize(nil)).to be_nil
+ end
+
+ it 'casts by calling Color::new' do
+ expect(type.cast('#fff')).to eq(::Gitlab::Color.new('#fff'))
+ end
+
+ it 'accepts colors as arguments to cast' do
+ expect(type.cast(color)).to eq(color)
+ end
+
+ it 'allows nil database values' do
+ expect(type.cast(nil)).to be_nil
+ end
+
+ it 'tells us what is serializable' do
+ [nil, 'foo', color].each do |value|
+ expect(type.serializable?(value)).to be true
+ end
+ end
+
+ it 'tells us what is not serializable' do
+ [0, 3.2, true, Time.current, { some: 'hash' }].each do |value|
+ expect(type.serializable?(value)).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index b3b7c81e9e7..c58dba213ee 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -205,12 +205,12 @@ RSpec.describe Gitlab::Database do
end
context 'when the connection is LoadBalancing::ConnectionProxy' do
- it 'returns nil' do
+ it 'returns primary_db_config' do
lb_config = ::Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(lb_config)
proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
- expect(described_class.db_config_for_connection(proxy)).to be_nil
+ expect(described_class.db_config_for_connection(proxy)).to eq(lb_config.primary_db_config)
end
end
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Database do
# This is a ConnectionProxy
expect(described_class.db_config_name(model.connection))
- .to eq('unknown')
+ .to eq('main')
# This is an actual connection
expect(described_class.db_config_name(model.retrieve_connection))
@@ -245,6 +245,31 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.gitlab_schemas_for_connection' do
+ it 'does raise exception for invalid connection' do
+ expect { described_class.gitlab_schemas_for_connection(:invalid) }.to raise_error /key not found: "unknown"/
+ end
+
+ it 'does return a valid schema depending on a base model used', :request_store do
+ # This is currently required as otherwise the `Ci::Build.connection` == `Project.connection`
+ # ENV due to lib/gitlab/database/load_balancing/setup.rb:93
+ stub_env('GITLAB_USE_MODEL_LOAD_BALANCING', '1')
+ # FF due to lib/gitlab/database/load_balancing/configuration.rb:92
+ stub_feature_flags(force_no_sharing_primary_model: true)
+
+ expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared)
+ expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared)
+ end
+
+ it 'does return gitlab_ci when a ActiveRecord::Base is using CI connection' do
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(model: ActiveRecord::Base, config_model: Ci::Build)
+
+ expect(described_class.gitlab_schemas_for_connection(ActiveRecord::Base.connection)).to include(:gitlab_ci, :gitlab_shared)
+ end
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
@@ -279,6 +304,46 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.all_uncached' do
+ let(:base_model) do
+ Class.new do
+ def self.uncached
+ @uncached = true
+
+ yield
+ end
+ end
+ end
+
+ let(:model1) { Class.new(base_model) }
+ let(:model2) { Class.new(base_model) }
+
+ before do
+ allow(described_class).to receive(:database_base_models)
+ .and_return({ model1: model1, model2: model2 }.with_indifferent_access)
+ end
+
+ it 'wraps the given block in uncached calls for each primary connection', :aggregate_failures do
+ expect(model1.instance_variable_get(:@uncached)).to be_nil
+ expect(model2.instance_variable_get(:@uncached)).to be_nil
+
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary).and_yield
+
+ expect(model2).to receive(:uncached).and_call_original
+ expect(model1).to receive(:uncached).and_call_original
+
+ yielded_to_block = false
+ described_class.all_uncached do
+ expect(model1.instance_variable_get(:@uncached)).to be(true)
+ expect(model2.instance_variable_get(:@uncached)).to be(true)
+
+ yielded_to_block = true
+ end
+
+ expect(yielded_to_block).to be(true)
+ end
+ end
+
describe '.read_only?' do
it 'returns false' do
expect(described_class.read_only?).to eq(false)
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 7c1a8f4c3c8..f2212ec9b09 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -51,45 +51,29 @@ RSpec.describe Gitlab::Diff::File do
project.commit(branch_name).diffs.diff_files.first
end
- describe 'initialize' do
- context 'when file is ipynb with a change after transformation' do
+ describe '#has_renderable?' do
+ context 'file is ipynb' do
let(:commit) { project.commit("532c837") }
- let(:diff) { commit.raw_diffs.first }
- let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
- context 'and :jupyter_clean_diffs is enabled' do
- before do
- stub_feature_flags(jupyter_clean_diffs: true)
- end
-
- it 'recreates the diff by transforming the files' do
- expect(diff_file.diff.diff).not_to include('cell_type')
- end
+ it 'has renderable viewer' do
+ expect(diff_file.has_renderable?).to be_truthy
end
+ end
- context 'but :jupyter_clean_diffs is disabled' do
- before do
- stub_feature_flags(jupyter_clean_diffs: false)
- end
+ context 'file is not ipynb' do
+ let(:commit) { project.commit("d59c60028b053793cecfb4022de34602e1a9218e") }
- it 'does not recreate the diff' do
- expect(diff_file.diff.diff).to include('cell_type')
- end
+ it 'does not have renderable viewer' do
+ expect(diff_file.has_renderable?).to be_falsey
end
end
+ end
- context 'when file is ipynb, but there only changes that are removed' do
- let(:commit) { project.commit("2b5ef814") }
- let(:diff) { commit.raw_diffs.first }
- let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
-
- before do
- stub_feature_flags(jupyter_clean_diffs: true)
- end
+ describe '#rendered' do
+ let(:commit) { project.commit("532c837") }
- it 'does not recreate the diff' do
- expect(diff_file.diff.diff).to include('execution_count')
- end
+ it 'creates a NotebookDiffFile for rendering' do
+ expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile)
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
new file mode 100644
index 00000000000..15edbc22460
--- /dev/null
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit("5d6ed1503801ca9dc28e95eeb85a7cf863527aee") }
+ let(:diffs) { commit.raw_diffs.to_a }
+ let(:diff) { diffs.first }
+ let(:source) { Gitlab::Diff::File.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
+ let(:nb_file) { described_class.new(source) }
+
+ describe '#old_blob and #new_blob' do
+ context 'when file is changed' do
+ it 'transforms the old blob' do
+ expect(nb_file.old_blob.data).to include('%%')
+ end
+
+ it 'transforms the new blob' do
+ expect(nb_file.new_blob.data).to include('%%')
+ end
+ end
+
+ context 'when file is added' do
+ let(:diff) { diffs[1] }
+
+ it 'old_blob is empty' do
+ expect(nb_file.old_blob).to be_nil
+ end
+
+ it 'new_blob is transformed' do
+ expect(nb_file.new_blob.data).to include('%%')
+ end
+ end
+
+ context 'when file is removed' do
+ let(:diff) { diffs[2] }
+
+ it 'old_blob is transformed' do
+ expect(nb_file.old_blob.data).to include('%%')
+ end
+
+ it 'new_blob is empty' do
+ expect(nb_file.new_blob).to be_nil
+ end
+ end
+ end
+
+ describe '#diff' do
+ context 'for valid notebooks' do
+ it 'returns the transformed diff' do
+ expect(nb_file.diff.diff).to include('%%')
+ end
+ end
+
+ context 'for invalid notebooks' do
+ let(:commit) { project.commit("6d85bb693dddaee631ec0c2f697c52c62b93f6d3") }
+ let(:diff) { diffs[1] }
+
+ it 'returns nil' do
+ expect(nb_file.diff).to be_nil
+ end
+ end
+ end
+
+ describe '#has_renderable?' do
+ context 'notebook diff is empty' do
+ let(:commit) { project.commit("a867a602d2220e5891b310c07d174fbe12122830") }
+
+ it 'is false' do
+ expect(nb_file.has_renderable?).to be_falsey
+ end
+ end
+
+ context 'notebook is valid' do
+ it 'is true' do
+ expect(nb_file.has_renderable?).to be_truthy
+ end
+ end
+ end
+
+ describe '#highlighted_diff_lines?' do
+ context 'when line transformed line is not part of the diff' do
+ it 'line is not discussable' do
+ expect(nb_file.highlighted_diff_lines[0].discussable?).to be_falsey
+ end
+ end
+
+ context 'when line transformed line part of the diff' do
+ it 'line is not discussable' do
+ expect(nb_file.highlighted_diff_lines[12].discussable?).to be_truthy
+ end
+ end
+
+ context 'assigns the correct position' do
+ it 'computes de first line where the remove would appear' do
+ expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
+ expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
+
+ expect(nb_file.highlighted_diff_lines[12].new_pos).to eq(15)
+ expect(nb_file.highlighted_diff_lines[12].old_pos).to eq(18)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb
index 4b4e671f001..40b94df6ee3 100644
--- a/spec/lib/gitlab/email/attachment_uploader_spec.rb
+++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb
@@ -8,7 +8,27 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/attachment.eml") }
let(:message) { Mail::Message.new(message_raw) }
+ before do
+ allow_next_instance_of(Gitlab::Sanitizers::Exif) do |instance|
+ allow(instance).to receive(:clean_existing_path).and_call_original
+ end
+ end
+
+ def expect_exif_sanitizer_called
+ expect_next_instance_of(Gitlab::Sanitizers::Exif) do |sanitizer|
+ expect(sanitizer).to receive(:clean_existing_path) do |path, **options|
+ expect(File.exist?(path)).to be true
+
+ file = File.open(path, "rb")
+ expect(options).to eql(content: file.read, skip_unallowed_types: true)
+ file.close
+ end
+ end
+ end
+
it "uploads all attachments and returns their links" do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
link = links.first
@@ -21,6 +41,8 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/valid_reply_signed_smime.eml") }
it 'uploads all attachments except the signature' do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
expect(links).not_to include(a_hash_including(alt: 'smime.p7s'))
@@ -36,6 +58,8 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/valid_reply_signed_smime_mixed_protocol_prefix.eml") }
it 'uploads all attachments except the signature' do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
expect(links).not_to include(a_hash_including(alt: 'smime.p7s'))
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index 10098a66ae9..75538baf07f 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -148,34 +148,11 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
end
end
- context 'rate limiting' do
- let(:rate_limited_service_feature_enabled) { nil }
+ it 'raises a RateLimitedService::RateLimitedError' do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
- before do
- stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_feature_enabled)
- end
-
- context 'when :rate_limited_service Feature is disabled' do
- let(:rate_limited_service_feature_enabled) { false }
-
- it 'does not attempt to throttle' do
- expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
-
- setup_attachment
- receiver.execute
- end
- end
-
- context 'when :rate_limited_service Feature is enabled' do
- let(:rate_limited_service_feature_enabled) { true }
-
- it 'raises a RateLimitedService::RateLimitedError' do
- allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
-
- setup_attachment
- expect { receiver.execute }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
- end
- end
+ setup_attachment
+ expect { receiver.execute }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7c34fb1a926..913e197708f 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -382,7 +382,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
subject { 2.times { receiver.execute } }
before do
- stub_feature_flags(rate_limited_service_issues_create: true)
stub_application_setting(issues_create_limit: 1)
end
@@ -478,6 +477,20 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
+ context 'when there is a reply-to address and a from address' do
+ let(:email_raw) { email_fixture('emails/service_desk_reply_to_and_from.eml') }
+
+ it 'shows both from and reply-to addresses in the issue header' do
+ setup_attachment
+
+ expect { receiver.execute }.to change { Issue.count }.by(1)
+
+ new_issue = Issue.last
+
+ expect(new_issue.external_author).to eq('finn@adventuretime.ooo (reply to: marceline@adventuretime.ooo)')
+ end
+ end
+
context 'when service desk is not enabled for project' do
before do
allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(false)
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index b1a04f0592a..9040731d8fd 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -32,12 +32,21 @@ RSpec.describe Gitlab::Email::Receiver do
metadata = receiver.mail_metadata
- expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta))
+ expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients))
expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
end
+ shared_examples 'failed receive' do
+ it 'adds metric event' do
+ expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
+
+ expect { receiver.execute }.to raise_error(expected_error)
+ end
+ end
+
context 'when the email contains a valid email address in a header' do
before do
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
@@ -74,14 +83,25 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'successful receive'
end
- end
- shared_examples 'failed receive' do
- it 'adds metric event' do
- expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
- expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
+ context 'when all other headers are missing' do
+ let(:email_raw) { fixture_file('emails/missing_delivered_to_header.eml') }
+ let(:meta_key) { :received_recipients }
+ let(:meta_value) { ['incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com', 'incoming+gitlabhq/gitlabhq@example.com'] }
- expect { receiver.execute }.to raise_error(expected_error)
+ context 'when use_received_header_for_incoming_emails is enabled' do
+ it_behaves_like 'successful receive'
+ end
+
+ context 'when use_received_header_for_incoming_emails is disabled' do
+ let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
+
+ before do
+ stub_feature_flags(use_received_header_for_incoming_emails: false)
+ end
+
+ it_behaves_like 'failed receive'
+ end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 9acc7fd04be..33d322d0d44 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
+RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry do
describe '.call' do
- let(:required_options) do
+ let(:raven_required_options) do
{
configuration: Raven.configuration,
context: Raven.context,
@@ -12,7 +12,15 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
}
end
- let(:event) { Raven::Event.from_exception(exception, required_options.merge(data)) }
+ let(:raven_event) do
+ Raven::Event
+ .from_exception(exception, raven_required_options.merge(data))
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.event_from_exception(exception)
+ end
+
let(:result_hash) { described_class.call(event).to_hash }
let(:data) do
@@ -27,36 +35,43 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
}
end
+ before do
+ Sentry.get_current_scope.update_from_options(**data)
+ Sentry.get_current_scope.apply_to_event(sentry_event)
+ end
+
+ after do
+ Sentry.get_current_scope.clear
+ end
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
- it 'leaves data unchanged' do
- expect(result_hash).to include(data)
+ shared_examples 'leaves data unchanged' do
+ it { expect(result_hash).to include(data) }
end
- end
- context 'when there is a GRPC exception with a debug string' do
- let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ context 'with Raven event' do
+ let(:event) { raven_event }
- it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash[:fingerprint])
- .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
+ it_behaves_like 'leaves data unchanged'
+ end
- expect(result_hash[:exception][:values].first)
- .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
- expect(result_hash[:extra])
- .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ it_behaves_like 'leaves data unchanged'
end
+ end
- context 'with no custom fingerprint' do
- let(:data) do
- { extra: { caller: 'test' } }
- end
+ context 'when there is a GRPC exception with a debug string' do
+ let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ shared_examples 'processes the exception' do
it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash).not_to include(:fingerprint)
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
@@ -64,11 +79,42 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint]).to be_blank
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'with Raven event' do
+ let(:event) { raven_event }
+
+ it_behaves_like 'processes the exception'
+ end
+
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
+
+ it_behaves_like 'processes the exception'
end
end
context 'when there is a wrapped GRPC exception with a debug string' do
- let(:inner_exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ let(:inner_exception) do
+ GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}')
+ end
+
let(:exception) do
begin
raise inner_exception
@@ -79,27 +125,10 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
e
end
- it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash[:fingerprint])
- .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
-
- expect(result_hash[:exception][:values].first)
- .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
-
- expect(result_hash[:exception][:values].second)
- .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
-
- expect(result_hash[:extra])
- .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
- end
-
- context 'with no custom fingerprint' do
- let(:data) do
- { extra: { caller: 'test' } }
- end
-
+ shared_examples 'processes the exception' do
it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash).not_to include(:fingerprint)
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
@@ -110,6 +139,37 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint]).to be_blank
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'with Raven event' do
+ let(:event) { raven_event }
+
+ it_behaves_like 'processes the exception'
+ end
+
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
+
+ it_behaves_like 'processes the exception'
end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index 3febc10831a..d33f8393904 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'rspec-parameterized'
-RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
+RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
after do
if described_class.instance_variable_defined?(:@permitted_arguments_for_worker)
described_class.remove_instance_variable(:@permitted_arguments_for_worker)
@@ -95,7 +95,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
describe '.call' do
- let(:required_options) do
+ let(:exception) { StandardError.new('Test exception') }
+
+ let(:raven_required_options) do
{
configuration: Raven.configuration,
context: Raven.context,
@@ -103,9 +105,25 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
}
end
- let(:event) { Raven::Event.new(required_options.merge(wrapped_value)) }
+ let(:raven_event) do
+ Raven::Event.new(raven_required_options.merge(wrapped_value))
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.event_from_exception(exception)
+ end
+
let(:result_hash) { described_class.call(event).to_hash }
+ before do
+ Sentry.get_current_scope.update_from_options(**wrapped_value)
+ Sentry.get_current_scope.apply_to_event(sentry_event)
+ end
+
+ after do
+ Sentry.get_current_scope.clear
+ end
+
context 'when there is Sidekiq data' do
let(:wrapped_value) { { extra: { sidekiq: value } } }
@@ -140,42 +158,90 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
context 'when processing via the default error handler' do
- include_examples 'Sidekiq arguments', args_in_job_hash: true
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: true
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: true
+ end
end
context 'when processing via Gitlab::ErrorTracking' do
- include_examples 'Sidekiq arguments', args_in_job_hash: false
- end
+ context 'with Raven events' do
+ let(:event) { raven_event}
- context 'when a jobstr field is present' do
- let(:value) do
- {
- job: { 'args' => [1] },
- jobstr: { 'args' => [1] }.to_json
- }
+ include_examples 'Sidekiq arguments', args_in_job_hash: false
end
- it 'removes the jobstr' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: false
end
end
- context 'when no jobstr value is present' do
- let(:value) { { job: { 'args' => [1] } } }
+ shared_examples 'handles jobstr fields' do
+ context 'when a jobstr field is present' do
+ let(:value) do
+ {
+ job: { 'args' => [1] },
+ jobstr: { 'args' => [1] }.to_json
+ }
+ end
+
+ it 'removes the jobstr' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
+ end
+ end
+
+ context 'when no jobstr value is present' do
+ let(:value) { { job: { 'args' => [1] } } }
- it 'does nothing' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
end
end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'handles jobstr fields'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'handles jobstr fields'
+ end
end
context 'when there is no Sidekiq data' do
let(:value) { { tags: { foo: 'bar', baz: 'quux' } } }
let(:wrapped_value) { value }
- it 'does nothing' do
- expect(result_hash).to include(value)
- expect(result_hash.dig(:extra, :sidekiq)).to be_nil
+ shared_examples 'does nothing' do
+ it 'does nothing' do
+ expect(result_hash).to include(value)
+ expect(result_hash.dig(:extra, :sidekiq)).to be_nil
+ end
+ end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'does nothing'
end
end
@@ -183,8 +249,22 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
let(:value) { { other: 'foo' } }
let(:wrapped_value) { { extra: { sidekiq: value } } }
- it 'does nothing' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ shared_examples 'does nothing' do
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
+ end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'does nothing'
end
end
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index a5d44963f4b..936954fc1b6 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
require 'raven/transports/dummy'
+require 'sentry/transport/dummy_transport'
RSpec.describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:sentry_payload) do
{
@@ -43,17 +44,28 @@ RSpec.describe Gitlab::ErrorTracking do
}
end
- let(:sentry_event) { Gitlab::Json.parse(Raven.client.transport.events.last[1]) }
+ let(:raven_event) do
+ event = Raven.client.transport.events.last[1]
+ Gitlab::Json.parse(event)
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.transport.events.last
+ end
before do
+ stub_feature_flags(enable_old_sentry_integration: true)
+ stub_feature_flags(enable_new_sentry_integration: true)
stub_sentry_settings
- allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
+ allow(described_class).to receive(:sentry_configurable?) { true }
+
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
allow(I18n).to receive(:locale).and_return('en')
described_class.configure do |config|
- config.encoding = 'json'
+ config.encoding = 'json' if config.respond_to?(:encoding=)
+ config.transport.transport_class = Sentry::DummyTransport if config.respond_to?(:transport)
end
end
@@ -63,6 +75,10 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
+ after do
+ Sentry.get_current_scope.clear
+ end
+
describe '.track_and_raise_for_dev_exception' do
context 'when exceptions for dev should be raised' do
before do
@@ -71,6 +87,7 @@ RSpec.describe Gitlab::ErrorTracking do
it 'raises the exception' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
expect do
described_class.track_and_raise_for_dev_exception(
@@ -89,6 +106,7 @@ RSpec.describe Gitlab::ErrorTracking do
it 'logs the exception with all attributes passed' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
described_class.track_and_raise_for_dev_exception(
exception,
@@ -112,6 +130,7 @@ RSpec.describe Gitlab::ErrorTracking do
describe '.track_and_raise_exception' do
it 'always raises the exception' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
expect do
described_class.track_and_raise_for_dev_exception(
@@ -136,20 +155,24 @@ RSpec.describe Gitlab::ErrorTracking do
end
describe '.track_exception' do
- subject(:track_exception) { described_class.track_exception(exception, extra) }
+ subject(:track_exception) do
+ described_class.track_exception(exception, extra)
+ end
before do
allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Sentry).to receive(:capture_exception).and_call_original
allow(Gitlab::ErrorTracking::Logger).to receive(:error)
end
it 'calls Raven.capture_exception' do
track_exception
- expect(Raven).to have_received(:capture_exception).with(
- exception,
- sentry_payload
- )
+ expect(Raven)
+ .to have_received(:capture_exception).with(exception, sentry_payload)
+
+ expect(Sentry)
+ .to have_received(:capture_exception).with(exception, sentry_payload)
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
@@ -172,7 +195,10 @@ RSpec.describe Gitlab::ErrorTracking do
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
- let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller, cause: nil) }
+
+ before do
+ allow(exception).to receive(:sentry_extra_data).and_return(extra_info)
+ end
it 'includes the extra data from the exception in the tracking information' do
track_exception
@@ -180,29 +206,30 @@ RSpec.describe Gitlab::ErrorTracking do
expect(Raven).to have_received(:capture_exception).with(
exception, a_hash_including(extra: a_hash_including(extra_info))
)
+
+ expect(Sentry).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra_info))
+ )
end
end
context 'the exception implements :sentry_extra_data, which returns nil' do
- let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller, cause: nil) }
let(:extra) { { issue_url: issue_url } }
+ before do
+ allow(exception).to receive(:sentry_extra_data).and_return(nil)
+ end
+
it 'just includes the other extra info' do
track_exception
expect(Raven).to have_received(:capture_exception).with(
exception, a_hash_including(extra: a_hash_including(extra))
)
- end
- end
-
- context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
- let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
- it 'injects the normalized sql query into extra' do
- track_exception
-
- expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ expect(Sentry).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra))
+ )
end
end
end
@@ -212,32 +239,65 @@ RSpec.describe Gitlab::ErrorTracking do
before do
allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Sentry).to receive(:capture_exception).and_call_original
allow(Gitlab::ErrorTracking::Logger).to receive(:error)
end
context 'custom GitLab context when using Raven.capture_exception directly' do
- subject(:raven_capture_exception) { Raven.capture_exception(exception) }
+ subject(:track_exception) { Raven.capture_exception(exception) }
it 'merges a default set of tags into the existing tags' do
allow(Raven.context).to receive(:tags).and_return(foo: 'bar')
- raven_capture_exception
+ track_exception
- expect(sentry_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
+ expect(raven_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
end
it 'merges the current user information into the existing user information' do
Raven.user_context(id: -1)
- raven_capture_exception
+ track_exception
- expect(sentry_event['user']).to eq('id' => -1, 'username' => user.username)
+ expect(raven_event['user']).to eq('id' => -1, 'username' => user.username)
+ end
+ end
+
+ context 'custom GitLab context when using Sentry.capture_exception directly' do
+ subject(:track_exception) { Sentry.capture_exception(exception) }
+
+ it 'merges a default set of tags into the existing tags' do
+ Sentry.set_tags(foo: 'bar')
+
+ track_exception
+
+ expect(sentry_event.tags).to include(:correlation_id, :feature_category, :foo, :locale, :program)
+ end
+
+ it 'merges the current user information into the existing user information' do
+ Sentry.set_user(id: -1)
+
+ track_exception
+
+ expect(sentry_event.user).to eq(id: -1, username: user.username)
end
end
context 'with sidekiq args' do
context 'when the args does not have anything sensitive' do
- let(:extra) { { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } } }
+ let(:extra) do
+ {
+ sidekiq: {
+ 'class' => 'PostReceive',
+ 'args' => [
+ 1,
+ { 'id' => 2, 'name' => 'hello' },
+ 'some-value',
+ 'another-value'
+ ]
+ }
+ }
+ end
it 'ensures extra.sidekiq.args is a string' do
track_exception
@@ -254,8 +314,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'does not filter parameters when sending to Sentry' do
track_exception
+ expected_data = [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value']
- expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq([1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'])
+ expect(raven_event.dig('extra', 'sidekiq', 'args')).to eq(expected_data)
+ expect(sentry_event.extra[:sidekiq]['args']).to eq(expected_data)
end
end
@@ -265,7 +327,8 @@ RSpec.describe Gitlab::ErrorTracking do
it 'filters sensitive arguments before sending and logging' do
track_exception
- expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(raven_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(sentry_event.extra[:sidekiq]['args']).to eq(['[FILTERED]', 1, 2])
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
hash_including(
'extra.sidekiq' => {
@@ -285,8 +348,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'sets the GRPC debug error string in the Sentry event and adds a custom fingerprint' do
track_exception
- expect(sentry_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
- expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
+ expect(raven_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
+ expect(raven_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
+ expect(sentry_event.extra[:grpc_debug_error_string]).to eq('{"hello":1}')
+ expect(sentry_event.fingerprint).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
end
end
@@ -296,8 +361,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'does not do any processing on the event' do
track_exception
- expect(sentry_event['extra']).not_to include('grpc_debug_error_string')
- expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
+ expect(raven_event['extra']).not_to include('grpc_debug_error_string')
+ expect(raven_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
+ expect(sentry_event.extra).not_to include(:grpc_debug_error_string)
+ expect(sentry_event.fingerprint).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
end
end
end
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index 982c0d911bc..8228f95dd5e 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -174,7 +174,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
it "pushes route's feature category to the context" do
expect(Gitlab::ApplicationContext).to receive(:push).with(
- feature_category: 'team_planning'
+ feature_category: 'team_planning',
+ caller_id: 'Projects::NotesController#index'
)
_, _, _ = middleware.call(build_request(path, if_none_match))
diff --git a/spec/lib/gitlab/etag_caching/router/restful_spec.rb b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
index a0fc480369c..da6c11e3cb1 100644
--- a/spec/lib/gitlab/etag_caching/router/restful_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::EtagCaching::Router::Restful do
+RSpec.describe Gitlab::EtagCaching::Router::Rails do
it 'matches issue notes endpoint' do
result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes')
@@ -114,6 +114,12 @@ RSpec.describe Gitlab::EtagCaching::Router::Restful do
end
end
+ it 'has a caller_id for every route', :aggregate_failures do
+ described_class::ROUTES.each do |route|
+ expect(route.caller_id).to include('#'), "#{route.name} has caller_id #{route.caller_id}, which is not valid"
+ end
+ end
+
def match_route(path)
described_class.match(double(path_info: path))
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index ce728c41f48..8d2183bc03d 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::EtagCaching::Router do
expect(result).to be_present
expect(result.name).to eq 'project_pipelines'
- expect(result.router).to eq Gitlab::EtagCaching::Router::Restful
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Rails
end
end
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index d73757be79b..82603e6fe0f 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -9,9 +9,10 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
describe "#enabled?" do
before do
- allow(Feature::Definition).to receive(:get).and_return('_instance_')
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
- allow(Feature).to receive(:get).and_return(double(state: :on))
+ stub_feature_flags(gitlab_experiment: true)
+ allow(subject).to receive(:feature_flag_defined?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(subject).to receive(:feature_flag_instance).and_return(double(state: :on))
end
it "is enabled when all criteria are met" do
@@ -19,19 +20,25 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
end
it "isn't enabled if the feature definition doesn't exist" do
- expect(Feature::Definition).to receive(:get).with('namespaced_stub').and_return(nil)
+ expect(subject).to receive(:feature_flag_defined?).and_return(false)
expect(subject).not_to be_enabled
end
it "isn't enabled if we're not in dev or dotcom environments" do
- expect(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ expect(Gitlab).to receive(:com?).and_return(false)
expect(subject).not_to be_enabled
end
it "isn't enabled if the feature flag state is :off" do
- expect(Feature).to receive(:get).with('namespaced_stub').and_return(double(state: :off))
+ expect(subject).to receive(:feature_flag_instance).and_return(double(state: :off))
+
+ expect(subject).not_to be_enabled
+ end
+
+ it "isn't enabled if the gitlab_experiment feature flag is false" do
+ stub_feature_flags(gitlab_experiment: false)
expect(subject).not_to be_enabled
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 8a96771eeb8..435a0d56301 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
}
)
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(is_gitlab_com)
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index d9bf85460b3..a5cc69b9538 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Experimentation::Experiment do
describe '#active?' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(on_gitlab_com)
+ allow(Gitlab).to receive(:com?).and_return(on_gitlab_com)
end
subject { experiment.active? }
diff --git a/spec/lib/gitlab/fips_spec.rb b/spec/lib/gitlab/fips_spec.rb
new file mode 100644
index 00000000000..4d19a44f617
--- /dev/null
+++ b/spec/lib/gitlab/fips_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::FIPS do
+ describe ".enabled?" do
+ subject { described_class.enabled? }
+
+ let(:openssl_fips_mode) { false }
+ let(:fips_mode_env_var) { nil }
+
+ before do
+ expect(OpenSSL).to receive(:fips_mode).and_return(openssl_fips_mode)
+ stub_env("FIPS_MODE", fips_mode_env_var)
+ end
+
+ describe "OpenSSL auto-detection" do
+ context "OpenSSL is in FIPS mode" do
+ let(:openssl_fips_mode) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context "OpenSSL is not in FIPS mode" do
+ let(:openssl_fips_mode) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe "manual configuration via env var" do
+ context "env var is not set" do
+ let(:fips_mode_env_var) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context "env var is set to true" do
+ let(:fips_mode_env_var) { "true" }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context "env var is set to false" do
+ let(:fips_mode_env_var) { "false" }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index e160e88487b..a5f26a212ab 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -78,6 +78,29 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user, value: nil })
end
end
+
+ context 'with checkbox_options: { multiple: true }' do
+ let(:optional_args) do
+ {
+ checkbox_options: { multiple: true },
+ checked_value: 'one',
+ unchecked_value: false
+ }
+ end
+
+ it 'renders labels with correct for attributes' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
+ Show one file at a time on merge request&#39;s Changes tab
+ </label>
+ </div>
+ EOS
+
+ expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ end
+ end
end
describe '#gitlab_ui_radio_component' do
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index ee0c0e2708e..dddcf8c40fc 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -48,14 +48,26 @@ RSpec.describe Gitlab::Git::Wiki do
end
it 'returns the right page' do
- expect(subject.page(title: 'page1', dir: '').url_path).to eq 'page1'
- expect(subject.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1'
+ page = subject.page(title: 'page1', dir: '')
+ expect(page.url_path).to eq 'page1'
+ expect(page.raw_data).to eq 'content'
+
+ page = subject.page(title: 'page1', dir: 'foo')
+ expect(page.url_path).to eq 'foo/page1'
+ expect(page.raw_data).to eq 'content foo/page1'
end
it 'returns nil for invalid arguments' do
expect(subject.page(title: '')).to be_nil
expect(subject.page(title: 'foo', version: ':')).to be_nil
end
+
+ it 'does not return content if load_content param is set to false' do
+ page = subject.page(title: 'page1', dir: '', load_content: false)
+
+ expect(page.url_path).to eq 'page1'
+ expect(page.raw_data).to be_empty
+ end
end
describe '#preview_slug' do
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index d690a4b2db4..b6a61de87a6 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -397,38 +397,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
- describe 'HEAD realignment' do
- let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project) }
-
- shared_examples 'HEAD is updated to the snippet default branch' do
- let(:actor) { snippet.author }
-
- specify do
- expect(snippet).to receive(:change_head_to_default_branch).and_call_original
-
- subject
- end
-
- context 'when an error is raised' do
- let(:actor) { nil }
-
- it 'does not realign HEAD' do
- expect(snippet).not_to receive(:change_head_to_default_branch).and_call_original
-
- expect { subject }.to raise_error(described_class::ForbiddenError)
- end
- end
- end
-
- it_behaves_like 'HEAD is updated to the snippet default branch' do
- subject { push_access_check }
- end
-
- it_behaves_like 'HEAD is updated to the snippet default branch' do
- subject { pull_access_check }
- end
- end
-
private
def raise_not_found(message_key)
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index f0115aa6b2b..0c04863f466 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -386,6 +386,73 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
it_behaves_like 'cherry pick and revert errors'
end
+ describe '#rebase' do
+ let(:response) { Gitaly::UserRebaseConfirmableResponse.new }
+
+ subject do
+ client.rebase(
+ user,
+ '',
+ branch: 'master',
+ branch_sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ remote_repository: repository,
+ remote_branch: 'master'
+ )
+ end
+
+ shared_examples '#rebase with an error' do
+ it 'raises a GitError exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_rebase_confirmable)
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error)
+ end
+ end
+
+ context 'when AccessError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserRebaseConfirmableError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::PreReceiveError }
+
+ it_behaves_like '#rebase with an error'
+ end
+
+ context 'when RebaseConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
+
+ it_behaves_like '#rebase with an error'
+ end
+
+ context 'when non-detailed gRPC error is raised' do
+ let(:raised_error) do
+ GRPC::Internal.new('non-detailed error')
+ end
+
+ let(:expected_error) { GRPC::Internal }
+
+ it_behaves_like '#rebase with an error'
+ end
+ end
+
describe '#user_squash' do
let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
@@ -437,41 +504,93 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
- describe '#user_commit_files' do
- subject do
- client.user_commit_files(
- gitaly_user, 'my-branch', 'Commit files message', [], 'janedoe@example.com', 'Jane Doe',
- 'master', repository)
+ shared_examples '#user_squash with an error' do
+ it 'raises a GitError exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_squash).with(request, kind_of(Hash))
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error)
end
+ end
- before do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
- .and_return(response)
+ context 'when ResolveRevisionError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ resolve_revision: Gitaly::ResolveRevisionError.new(
+ revision: start_sha
+ )))
end
- context 'when a pre_receive_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
- end
+ it_behaves_like '#user_squash with an error'
+ end
+
+ context 'when RebaseConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
end
- context 'when an index_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
- end
+ it_behaves_like '#user_squash with an error'
+ end
+
+ context 'when non-detailed gRPC error is raised' do
+ let(:raised_error) do
+ GRPC::Internal.new('non-detailed error')
+ end
+
+ let(:expected_error) { GRPC::Internal }
+
+ it_behaves_like '#user_squash with an error'
+ end
+ end
+
+ describe '#user_commit_files' do
+ subject do
+ client.user_commit_files(
+ gitaly_user, 'my-branch', 'Commit files message', [], 'janedoe@example.com', 'Jane Doe',
+ 'master', repository)
+ end
+
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_return(response)
+ end
+
+ context 'when a pre_receive_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
+
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
end
+ end
- context 'when branch_update is nil' do
- let(:response) { Gitaly::UserCommitFilesResponse.new }
+ context 'when an index_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
- it { expect(subject).to be_nil }
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
end
end
+
+ context 'when branch_update is nil' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new }
+
+ it { expect(subject).to be_nil }
+ end
end
describe '#user_commit_patches' do
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 353726b56f6..39de9a65390 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -54,6 +54,28 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
+ describe '#optimize_repository' do
+ it 'sends a optimize_repository message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:optimize_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(:optimize_repository))
+
+ client.optimize_repository
+ end
+ end
+
+ describe '#prune_unreachable_objects' do
+ it 'sends a prune_unreachable_objects message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:prune_unreachable_objects)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(:prune_unreachable_objects))
+
+ client.prune_unreachable_objects
+ end
+ end
+
describe '#repository_size' do
it 'sends a repository_size message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
@@ -196,6 +218,26 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
+ describe '#create_repository' do
+ it 'sends a create_repository message without arguments' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path).and(gitaly_request_with_params(default_branch: '')), kind_of(Hash))
+ .and_return(double)
+
+ client.create_repository
+ end
+
+ it 'sends a create_repository message with default branch' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path).and(gitaly_request_with_params(default_branch: 'default-branch-name')), kind_of(Hash))
+ .and_return(double)
+
+ client.create_repository('default-branch-name')
+ end
+ end
+
describe '#create_from_snapshot' do
it 'sends a create_repository_from_snapshot message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index c8e744ab262..321ad7d3238 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
let(:updated_at) { Time.new(2017, 1, 1, 12, 15).utc }
let(:note_body) { 'Hello' }
let(:file_path) { 'files/ruby/popen.rb' }
+ let(:end_line) { 15 }
let(:diff_hunk) do
'@@ -14 +14 @@
@@ -31,7 +32,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
created_at: created_at,
updated_at: updated_at,
start_line: nil,
- end_line: 15,
+ end_line: end_line,
github_id: 1,
diff_hunk: diff_hunk,
side: 'RIGHT'
@@ -173,7 +174,24 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
NOTE
end
- context 'when the note diff file creation fails' do
+ context 'when the note diff file creation fails with DiffNoteCreationError due to outdated suggestion' do
+ let(:end_line) { nil }
+
+ it 'falls back to the LegacyDiffNote' do
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:warn)
+ .with(
+ message: "Validation failed: Line code can't be blank, Line code must be a valid line code, Position is incomplete",
+ 'error.class': 'Gitlab::GithubImport::Importer::DiffNoteImporter::DiffNoteCreationError'
+ )
+
+ expect { subject.execute }
+ .to change(LegacyDiffNote, :count)
+ .and not_change(DiffNote, :count)
+ end
+ end
+
+ context 'when the note diff file creation fails with NoteDiffFileCreationError' do
it 'falls back to the LegacyDiffNote' do
exception = ::DiffNote::NoteDiffFileCreationError.new('Failed to create diff note file')
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index a70ff0bd82d..c1b0f4df29a 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -104,8 +104,13 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.and_yield(pull_request)
expect(Gitlab::GithubImport::ImportPullRequestWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ .to receive(:bulk_perform_in)
+ .with(
+ 1.second,
+ [[project.id, an_instance_of(Hash), an_instance_of(String)]],
+ batch_delay: 1.minute,
+ batch_size: 200
+ )
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index f375e84e0fd..6a19afbc60d 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -22,6 +22,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
def collection_method
:issues
end
+
+ def parallel_import_batch
+ { size: 10, delay: 1.minute }
+ end
end
end
@@ -254,35 +258,61 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
describe '#parallel_import' do
let(:importer) { importer_class.new(project, client) }
+ let(:repr_class) { double(:representation) }
+ let(:worker_class) { double(:worker) }
+ let(:object) { double(:object) }
+ let(:batch_size) { 200 }
+ let(:batch_delay) { 1.minute }
- it 'imports data in parallel' do
- repr_class = double(:representation)
- worker_class = double(:worker)
- object = double(:object)
-
- expect(importer)
- .to receive(:each_object_to_import)
- .and_yield(object)
-
- expect(importer)
+ before do
+ allow(importer)
.to receive(:representation_class)
.and_return(repr_class)
- expect(importer)
+ allow(importer)
.to receive(:sidekiq_worker_class)
.and_return(worker_class)
- expect(repr_class)
+ allow(repr_class)
.to receive(:from_api_response)
.with(object)
.and_return({ title: 'Foo' })
+ end
+
+ context 'with multiple objects' do
+ before do
+ allow(importer).to receive(:parallel_import_batch) { { size: batch_size, delay: batch_delay } }
+ expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
+ end
- expect(worker_class)
- .to receive(:perform_async)
- .with(project.id, { title: 'Foo' }, an_instance_of(String))
+ it 'imports data in parallel batches with delays' do
+ expect(worker_class).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)]
+ ], batch_size: batch_size, batch_delay: batch_delay)
+
+ importer.parallel_import
+ end
+ end
- expect(importer.parallel_import)
- .to be_an_instance_of(Gitlab::JobWaiter)
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(spread_parallel_import: false)
+ end
+
+ it 'imports data in parallel' do
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(worker_class)
+ .to receive(:perform_async)
+ .with(project.id, { title: 'Foo' }, an_instance_of(String))
+
+ expect(importer.parallel_import)
+ .to be_an_instance_of(Gitlab::JobWaiter)
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb
new file mode 100644
index 00000000000..c7e8b34bbe0
--- /dev/null
+++ b/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Loaders::BatchCommitLoader do
+ include RepoHelpers
+
+ describe '#find' do
+ let_it_be(:first_project) { create(:project, :repository) }
+ let_it_be(:second_project) { create(:project, :repository) }
+
+ let_it_be(:first_commit) { first_project.commit(sample_commit.id) }
+ let_it_be(:second_commit) { first_project.commit(another_sample_commit.id) }
+ let_it_be(:third_commit) { second_project.commit(sample_big_commit.id) }
+
+ it 'finds a commit by id' do
+ result = described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: first_commit.id
+ ).find
+
+ expect(result.force).to eq(first_commit)
+ end
+
+ it 'only queries once' do
+ expect do
+ [
+ described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: first_commit.id
+ ).find,
+ described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: second_commit.id
+ ).find,
+ described_class.new(
+ container_class: Project,
+ container_id: second_project.id,
+ oid: third_commit.id
+ ).find
+ ].map(&:force)
+ end.not_to exceed_query_limit(2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index c2253811e91..ed3f19d8cf2 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
expect(field.name).to eq('testHtml')
expect(field.description).to eq('The GitLab Flavored Markdown rendering of `hello`')
expect(field.type).to eq(GraphQL::Types::String)
- expect(field.to_graphql.complexity).to eq(5)
+ expect(field.complexity).to eq(5)
end
context 'developer warnings' do
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:field) { type_class.fields['noteHtml'] }
it 'renders markdown from the same property as the field name without the `_html` suffix' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
context 'when a `method` argument is passed' do
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:field) { type_class.fields['testHtml'] }
it 'renders markdown from a specific property' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
end
@@ -62,21 +62,21 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
it 'renders markdown correctly' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
context 'when the issue is not publicly accessible' do
let_it_be(:project) { create(:project, :private) }
it 'hides the references from users that are not allowed to see the reference' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
end
it 'shows the reference to users that are allowed to see it' do
context = GraphQL::Query::Context.new(query: query, values: { current_user: project.first_owner }, object: nil)
type_instance = type_class.authorized_new(note, context)
- expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
end
end
diff --git a/spec/lib/gitlab/graphql/mount_mutation_spec.rb b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
index fe25e923506..09fd9eac714 100644
--- a/spec/lib/gitlab/graphql/mount_mutation_spec.rb
+++ b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Graphql::MountMutation do
f.mount_mutation(mutation)
end
- mutation_type.get_field('testMutation').to_graphql
+ mutation_type.get_field('testMutation')
end
it 'mounts a mutation' do
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Graphql::MountMutation do
f.mount_aliased_mutation('MyAlias', mutation)
end
- mutation_type.get_field('myAlias').to_graphql
+ mutation_type.get_field('myAlias')
end
it 'mounts a mutation' do
@@ -43,11 +43,11 @@ RSpec.describe Gitlab::Graphql::MountMutation do
end
it 'has a correct type' do
- expect(field.type.name).to eq('MyAliasPayload')
+ expect(field.type.to_type_signature).to eq('MyAliasPayload')
end
it 'has a correct input argument' do
- expect(field.arguments['input'].type.unwrap.name).to eq('MyAliasInput')
+ expect(field.arguments['input'].type.unwrap.to_type_signature).to eq('MyAliasInput')
end
end
diff --git a/spec/lib/gitlab/harbor/client_spec.rb b/spec/lib/gitlab/harbor/client_spec.rb
new file mode 100644
index 00000000000..bc5b593370a
--- /dev/null
+++ b/spec/lib/gitlab/harbor/client_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Harbor::Client do
+ let(:harbor_integration) { build(:harbor_integration) }
+
+ subject(:client) { described_class.new(harbor_integration) }
+
+ describe '#ping' do
+ let!(:harbor_ping_request) { stub_harbor_request("https://demo.goharbor.io/api/v2.0/ping") }
+
+ it "calls api/v2.0/ping successfully" do
+ expect(client.ping).to eq(success: true)
+ end
+ end
+
+ private
+
+ def stub_harbor_request(url, body: {}, status: 200, headers: {})
+ stub_request(:get, url)
+ .to_return(
+ status: status,
+ headers: { 'Content-Type' => 'application/json' }.merge(headers),
+ body: body.to_json
+ )
+ end
+end
diff --git a/spec/lib/gitlab/health_checks/db_check_spec.rb b/spec/lib/gitlab/health_checks/db_check_spec.rb
index 60ebc596a0f..09b2650eae8 100644
--- a/spec/lib/gitlab/health_checks/db_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/db_check_spec.rb
@@ -4,5 +4,20 @@ require 'spec_helper'
require_relative './simple_check_shared'
RSpec.describe Gitlab::HealthChecks::DbCheck do
- include_examples 'simple_check', 'db_ping', 'Db', '1'
+ include_examples 'simple_check', 'db_ping', 'Db', Gitlab::Database.database_base_models.size
+
+ context 'with multiple databases' do
+ subject { described_class.readiness }
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return({ main: ApplicationRecord, ci: Ci::ApplicationRecord }.with_indifferent_access)
+ end
+
+ it 'checks multiple databases' do
+ expect(ApplicationRecord.connection).to receive(:select_value).with('SELECT 1').and_call_original
+ expect(Ci::ApplicationRecord.connection).to receive(:select_value).with('SELECT 1').and_call_original
+ expect(subject).to have_attributes(success: true)
+ end
+ end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1f06019c929..65d8c59fea7 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -53,10 +53,6 @@ RSpec.describe Gitlab::Highlight do
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
end
- it 'increments the metric for oversized files' do
- expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1)
- end
-
it 'returns plain version for long content' do
expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
end
@@ -126,79 +122,29 @@ RSpec.describe Gitlab::Highlight do
end
context 'timeout' do
- subject { described_class.new('file.name', 'Contents') }
+ subject(:highlight) { described_class.new('file.rb', 'begin', language: 'ruby').highlight('Content') }
it 'utilizes timeout for web' do
expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_FOREGROUND).and_call_original
- subject.highlight("Content")
+ highlight
end
- it 'utilizes longer timeout for sidekiq' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
+ it 'falls back to plaintext on timeout' do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- subject.highlight("Content")
- end
- end
+ expect(Rouge::Lexers::PlainText).to receive(:lex).and_call_original
- describe 'highlight timeouts' do
- let(:result) { described_class.highlight(file_name, content, language: "ruby") }
-
- context 'when there is an attempt' do
- it "increments the attempt counter with a defined language" do
- expect { result }.to change { highlight_attempt_total("ruby") }
- end
-
- it "increments the attempt counter with an undefined language" do
- expect do
- described_class.highlight(file_name, content)
- end.to change { highlight_attempt_total("undefined") }
- end
+ highlight
end
- context 'when there is a timeout error while highlighting' do
- before do
- allow(Timeout).to receive(:timeout).twice.and_raise(Timeout::Error)
- # This is done twice because it's rescued first and then
- # calls the original exception
- end
-
- it "increments the foreground counter if it's in the foreground" do
- expect { result }
- .to raise_error(Timeout::Error)
- .and change { highlight_timeout_total('foreground') }.by(1)
- .and not_change { highlight_timeout_total('background') }
- end
-
- it "increments the background counter if it's in the background" do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ it 'utilizes longer timeout for sidekiq' do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
- expect { result }
- .to raise_error(Timeout::Error)
- .and change { highlight_timeout_total('background') }.by(1)
- .and not_change { highlight_timeout_total('foreground') }
- end
+ highlight
end
end
end
-
- def highlight_timeout_total(source)
- Gitlab::Metrics
- .counter(:highlight_timeout, 'Counts the times highlights have timed out')
- .get(source: source)
- end
-
- def highlight_attempt_total(source)
- Gitlab::Metrics
- .counter(:file_highlighting_attempt, 'Counts the times highlighting has been attempted on a file')
- .get(source: source)
- end
-
- def over_highlight_size_limit(source)
- Gitlab::Metrics
- .counter(:over_highlight_size_limit,
- 'Count the times text has been over the highlight size limit')
- .get(source: source)
- end
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 039b4c19522..b9490306410 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -63,5 +63,13 @@ RSpec.describe Gitlab::HookData::IssueBuilder do
.to eq("test![Issue_Image](#{Settings.gitlab.url}/#{expected_path})")
end
end
+
+ context 'for incident' do
+ let_it_be(:issue) { create(:incident, :with_escalation_status) }
+
+ it 'includes additional attr' do
+ expect(data).to include(:escalation_status)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index ce13f405459..29a19e4cafd 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -34,6 +34,7 @@ issues:
- issuable_severity
- issuable_sla
- issue_assignees
+- search_data
- closed_by
- epic_issue
- epic
@@ -54,6 +55,7 @@ issues:
- status_page_published_incident
- namespace
- note_authors
+- user_note_authors
- issue_email_participants
- test_reports
- requirement
@@ -199,6 +201,7 @@ merge_requests:
- user_mentions
- system_note_metadata
- note_authors
+- user_note_authors
- cleanup_schedule
- compliance_violations
external_pull_requests:
@@ -392,6 +395,7 @@ project:
- mattermost_slash_commands_integration
- shimo_integration
- slack_slash_commands_integration
+- harbor_integration
- irker_integration
- packagist_integration
- pivotaltracker_integration
@@ -607,6 +611,7 @@ project:
- sync_events
- secure_files
- security_trainings
+- vulnerability_reads
award_emoji:
- awardable
- user
@@ -627,6 +632,8 @@ issuable_severity:
issue_assignees:
- issue
- assignee
+search_data:
+- issue
merge_request_assignees:
- merge_request
- assignee
@@ -771,6 +778,7 @@ epic:
- resource_state_events
- user_mentions
- note_authors
+- user_note_authors
- boards_epic_user_preferences
- epic_board_positions
epic_issue:
diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
new file mode 100644
index 00000000000..7c84b9604a6
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
+ let(:project) { create(:project) }
+ let(:relation_object) { build(:issue, project: project) }
+ let(:relation_definition) { {} }
+ let(:importable) { project }
+ let(:relation_key) { 'issues' }
+
+ subject(:saver) do
+ described_class.new(
+ relation_object: relation_object,
+ relation_key: relation_key,
+ relation_definition: relation_definition,
+ importable: importable
+ )
+ end
+
+ describe '#save' do
+ before do
+ expect(relation_object).to receive(:save!).and_call_original
+ end
+
+ it 'saves relation object' do
+ expect { saver.execute }.to change(project.issues, :count).by(1)
+ end
+
+ context 'when subrelation is present' do
+ let(:notes) { build_list(:note, 6, project: project, importing: true) }
+ let(:relation_object) { build(:issue, project: project, notes: notes) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves relation object with subrelations' do
+ expect(relation_object.notes).to receive(:<<).and_call_original
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.notes.count).to eq(6)
+ end
+ end
+
+ context 'when subrelation is not a collection' do
+ let(:sentry_issue) { build(:sentry_issue, importing: true) }
+ let(:relation_object) { build(:issue, project: project, sentry_issue: sentry_issue) }
+ let(:relation_definition) { { 'sentry_issue' => {} } }
+
+ it 'saves subrelation as part of the relation object itself' do
+ expect(relation_object.notes).not_to receive(:<<)
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.sentry_issue.persisted?).to eq(true)
+ end
+ end
+
+ context 'when subrelation collection count is small' do
+ let(:notes) { build_list(:note, 2, project: project, importing: true) }
+ let(:relation_object) { build(:issue, project: project, notes: notes) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves subrelation as part of the relation object itself' do
+ expect(relation_object.notes).not_to receive(:<<)
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.notes.count).to eq(2)
+ end
+ end
+
+ context 'when some subrelations are invalid' do
+ let(:notes) { build_list(:note, 5, project: project, importing: true) }
+ let(:invalid_note) { build(:note) }
+ let(:relation_object) { build(:issue, project: project, notes: notes + [invalid_note]) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves valid subrelations and logs invalid subrelation' do
+ expect(relation_object.notes).to receive(:<<).and_call_original
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ message: '[Project/Group Import] Invalid subrelation',
+ project_id: project.id,
+ relation_key: 'issues',
+ error_messages: "Noteable can't be blank and Project does not match noteable project"
+ )
+
+ saver.execute
+
+ issue = project.issues.last
+ import_failure = project.import_failures.last
+
+ expect(issue.notes.count).to eq(5)
+ expect(import_failure.source).to eq('RelationObjectSaver#save!')
+ expect(import_failure.exception_message).to eq("Noteable can't be blank and Project does not match noteable project")
+ end
+
+ context 'when importable is group' do
+ let(:relation_key) { 'labels' }
+ let(:relation_definition) { { 'priorities' => {} } }
+ let(:importable) { create(:group) }
+ let(:valid_priorities) { build_list(:label_priority, 5, importing: true) }
+ let(:invalid_priority) { build(:label_priority, priority: -1) }
+ let(:relation_object) { build(:group_label, group: importable, title: 'test', priorities: valid_priorities + [invalid_priority]) }
+
+ it 'logs invalid subrelation for a group' do
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ message: '[Project/Group Import] Invalid subrelation',
+ group_id: importable.id,
+ relation_key: 'labels',
+ error_messages: 'Priority must be greater than or equal to 0'
+ )
+
+ saver.execute
+
+ label = importable.labels.last
+ import_failure = importable.import_failures.last
+
+ expect(label.priorities.count).to eq(5)
+ expect(import_failure.source).to eq('RelationObjectSaver#save!')
+ expect(import_failure.exception_message).to eq('Priority must be greater than or equal to 0')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index 738a76d3360..f5913da08ba 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -17,6 +17,9 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
def initialize
@shared = Gitlab::ImportExport::Shared.new(nil)
end
+
+ # Make the included methods public for testing
+ public :download_or_copy_upload, :download
end.new
end
@@ -38,6 +41,156 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
expect(file_permissions("#{path}/uploads")).to eq(0755) # originally 555
end
+ describe '#download_or_copy_upload' do
+ let(:upload) { instance_double(Upload, local?: local) }
+ let(:uploader) { instance_double(ImportExportUploader, path: :path, url: :url, upload: upload) }
+ let(:upload_path) { '/some/path' }
+
+ context 'when the upload is local' do
+ let(:local) { true }
+
+ it 'copies the file' do
+ expect(subject).to receive(:copy_files).with(:path, upload_path)
+
+ subject.download_or_copy_upload(uploader, upload_path)
+ end
+ end
+
+ context 'when the upload is remote' do
+ let(:local) { false }
+
+ it 'downloads the file' do
+ expect(subject).to receive(:download).with(:url, upload_path, size_limit: nil)
+
+ subject.download_or_copy_upload(uploader, upload_path)
+ end
+ end
+ end
+
+ describe '#download' do
+ let(:content) { File.open('spec/fixtures/rails_sample.tif') }
+
+ context 'a non-localhost uri' do
+ before do
+ stub_request(:get, url)
+ .to_return(
+ status: status,
+ body: content
+ )
+ end
+
+ let(:url) { 'https://gitlab.com/file' }
+
+ context 'with ok status code' do
+ let(:status) { HTTP::Status::OK }
+
+ it 'gets the contents' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+
+ it 'streams the contents via Gitlab::HTTP' do
+ expect(Gitlab::HTTP).to receive(:get).with(url, hash_including(stream_body: true))
+
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ end
+ end
+
+ it 'does not get the content over the size_limit' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path, size_limit: 300.kilobytes)
+ expect(file.read).to eq('')
+ end
+ end
+
+ it 'gets the content within the size_limit' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path, size_limit: 400.kilobytes)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+ end
+
+ %w[MOVED_PERMANENTLY FOUND TEMPORARY_REDIRECT].each do |code|
+ context "with a redirect status code #{code}" do
+ let(:status) { HTTP::Status.const_get(code, false) }
+
+ it 'logs the redirect' do
+ expect(Gitlab::Import::Logger).to receive(:warn)
+
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ end
+ end
+ end
+ end
+
+ %w[ACCEPTED UNAUTHORIZED BAD_REQUEST].each do |code|
+ context "with an invalid status code #{code}" do
+ let(:status) { HTTP::Status.const_get(code, false) }
+
+ it 'throws an error' do
+ Tempfile.create('test') do |file|
+ expect { subject.download(url, file.path) }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+ end
+ end
+ end
+
+ context 'a localhost uri' do
+ include StubRequests
+
+ let(:status) { HTTP::Status::OK }
+ let(:url) { "#{host}/foo/bar" }
+ let(:host) { 'http://localhost:8081' }
+
+ before do
+ # Note: the hostname gets changed to an ip address due to dns_rebind_protection
+ stub_dns(url, ip_address: '127.0.0.1')
+ stub_request(:get, 'http://127.0.0.1:8081/foo/bar')
+ .to_return(
+ status: status,
+ body: content
+ )
+ end
+
+ it 'throws a blocked url error' do
+ Tempfile.create('test') do |file|
+ expect { subject.download(url, file.path) }.to raise_error((Gitlab::HTTP::BlockedUrlError))
+ end
+ end
+
+ context 'for object_storage uri' do
+ let(:enabled_object_storage_setting) do
+ {
+ 'object_store' =>
+ {
+ 'enabled' => true,
+ 'connection' => {
+ 'endpoint' => host
+ }
+ }
+ }
+ end
+
+ before do
+ allow(Settings).to receive(:external_diffs).and_return(enabled_object_storage_setting)
+ end
+
+ it 'gets the content' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+ end
+ end
+ end
+
describe '#gzip' do
it 'compresses specified file' do
tempfile = Tempfile.new('test', path)
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index ed4436b7257..7b27f7183b0 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -72,6 +72,25 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
expect(shared.export_path).to include('test/abcd')
end
+ context 'when the import file is not remote' do
+ include AfterNextHelpers
+
+ it 'downloads the file from a remote object storage' do
+ import_export_upload = build(:import_export_upload)
+ project = build( :project, import_export_upload: import_export_upload)
+
+ expect_next(described_class)
+ .to receive(:download_or_copy_upload)
+ .with(
+ import_export_upload.import_file,
+ kind_of(String),
+ size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ )
+
+ described_class.import(importable: project, archive_file: nil, shared: shared)
+ end
+ end
+
context 'when the import file is remote' do
include AfterNextHelpers
@@ -82,7 +101,11 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
expect_next(described_class)
.to receive(:download)
- .with(file_url, kind_of(String))
+ .with(
+ file_url,
+ kind_of(String),
+ size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ )
described_class.import(importable: project, archive_file: nil, shared: shared)
end
diff --git a/spec/lib/gitlab/import_export/group/object_builder_spec.rb b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
index 028bd5463a1..09f40199b31 100644
--- a/spec/lib/gitlab/import_export/group/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
@@ -51,16 +51,4 @@ RSpec.describe Gitlab::ImportExport::Group::ObjectBuilder do
expect(milestone.persisted?).to be true
end
end
-
- describe '#initialize' do
- context 'when attributes contain description as empty string' do
- let(:attributes) { base_attributes.merge('description' => '') }
-
- it 'converts empty string to nil' do
- builder = described_class.new(Label, attributes)
-
- expect(builder.send(:attributes)).to include({ 'description' => nil })
- end
- end
- end
end
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index b67d42d1b71..9b01005c2e9 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -5,116 +5,117 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
include ImportExport::CommonUtil
- describe 'restore group tree' do
- before_all do
- # Using an admin for import, so we can check assignment of existing members
- user = create(:admin, email: 'root@gitlabexample.com')
- create(:user, email: 'adriene.mcclure@gitlabexample.com')
- create(:user, email: 'gwendolyn_robel@gitlabexample.com')
+ shared_examples 'group restoration' do
+ describe 'restore group tree' do
+ before_all do
+ # Using an admin for import, so we can check assignment of existing members
+ user = create(:admin, email: 'root@gitlabexample.com')
+ create(:user, email: 'adriene.mcclure@gitlabexample.com')
+ create(:user, email: 'gwendolyn_robel@gitlabexample.com')
- RSpec::Mocks.with_temporary_scope do
- @group = create(:group, name: 'group', path: 'group')
- @shared = Gitlab::ImportExport::Shared.new(@group)
+ RSpec::Mocks.with_temporary_scope do
+ @group = create(:group, name: 'group', path: 'group')
+ @shared = Gitlab::ImportExport::Shared.new(@group)
- setup_import_export_config('group_exports/complex')
+ setup_import_export_config('group_exports/complex')
- group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
+ group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
- expect(group_tree_restorer.restore).to be_truthy
- expect(group_tree_restorer.groups_mapping).not_to be_empty
+ expect(group_tree_restorer.restore).to be_truthy
+ expect(group_tree_restorer.groups_mapping).not_to be_empty
+ end
end
- end
-
- it 'has the group description' do
- expect(Group.find_by_path('group').description).to eq('Group Description')
- end
- it 'has group labels' do
- expect(@group.labels.count).to eq(10)
- end
+ it 'has the group description' do
+ expect(Group.find_by_path('group').description).to eq('Group Description')
+ end
- context 'issue boards' do
- it 'has issue boards' do
- expect(@group.boards.count).to eq(1)
+ it 'has group labels' do
+ expect(@group.labels.count).to eq(10)
end
- it 'has board label lists' do
- lists = @group.boards.find_by(name: 'first board').lists
+ context 'issue boards' do
+ it 'has issue boards' do
+ expect(@group.boards.count).to eq(1)
+ end
+
+ it 'has board label lists' do
+ lists = @group.boards.find_by(name: 'first board').lists
- expect(lists.count).to eq(3)
- expect(lists.first.label.title).to eq('TSL')
- expect(lists.second.label.title).to eq('Sosync')
+ expect(lists.count).to eq(3)
+ expect(lists.first.label.title).to eq('TSL')
+ expect(lists.second.label.title).to eq('Sosync')
+ end
end
- end
- it 'has badges' do
- expect(@group.badges.count).to eq(1)
- end
+ it 'has badges' do
+ expect(@group.badges.count).to eq(1)
+ end
- it 'has milestones' do
- expect(@group.milestones.count).to eq(5)
- end
+ it 'has milestones' do
+ expect(@group.milestones.count).to eq(5)
+ end
- it 'has group children' do
- expect(@group.children.count).to eq(2)
- end
+ it 'has group children' do
+ expect(@group.children.count).to eq(2)
+ end
- it 'has group members' do
- expect(@group.members.map(&:user).map(&:email)).to contain_exactly(
- 'root@gitlabexample.com',
- 'adriene.mcclure@gitlabexample.com',
- 'gwendolyn_robel@gitlabexample.com'
- )
+ it 'has group members' do
+ expect(@group.members.map(&:user).map(&:email)).to contain_exactly(
+ 'root@gitlabexample.com',
+ 'adriene.mcclure@gitlabexample.com',
+ 'gwendolyn_robel@gitlabexample.com'
+ )
+ end
end
- end
- context 'child with no parent' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'child with no parent' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config('group_exports/child_with_no_parent')
- end
+ before do
+ setup_import_export_config('group_exports/child_with_no_parent')
+ end
- it 'captures import failures when a child group does not have a valid parent_id' do
- group_tree_restorer.restore
+ it 'captures import failures when a child group does not have a valid parent_id' do
+ group_tree_restorer.restore
- expect(group.import_failures.first.exception_message).to eq('Parent group not found')
+ expect(group.import_failures.first.exception_message).to eq('Parent group not found')
+ end
end
- end
- context 'when child group creation fails' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'when child group creation fails' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config('group_exports/child_short_name')
- end
+ before do
+ setup_import_export_config('group_exports/child_short_name')
+ end
- it 'captures import failure' do
- exception_message = 'Validation failed: Group URL is too short (minimum is 2 characters)'
+ it 'captures import failure' do
+ exception_message = 'Validation failed: Group URL is too short (minimum is 2 characters)'
- group_tree_restorer.restore
+ group_tree_restorer.restore
- expect(group.import_failures.first.exception_message).to eq(exception_message)
+ expect(group.import_failures.first.exception_message).to eq(exception_message)
+ end
end
- end
- context 'excluded attributes' do
- let!(:source_user) { create(:user, id: 123) }
- let!(:importer_user) { create(:user) }
- let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
- let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
- let(:group_json) { Gitlab::Json.parse(IO.read(exported_file)) }
-
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
+ context 'excluded attributes' do
+ let!(:source_user) { create(:user, id: 123) }
+ let!(:importer_user) { create(:user) }
+ let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
+ let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
+ let(:group_json) { Gitlab::Json.parse(IO.read(exported_file)) }
+
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
id
parent_id
owner_id
@@ -125,80 +126,97 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
saml_discovery_token
]
- before do
- group.add_owner(importer_user)
+ before do
+ group.add_owner(importer_user)
- setup_import_export_config('group_exports/complex')
+ setup_import_export_config('group_exports/complex')
- expect(File.exist?(exported_file)).to be_truthy
+ expect(File.exist?(exported_file)).to be_truthy
- group_tree_restorer.restore
- group.reload
- end
+ group_tree_restorer.restore
+ group.reload
+ end
- it 'does not import root group name' do
- expect(group.name).to eq('user-inputed-name')
- end
+ it 'does not import root group name' do
+ expect(group.name).to eq('user-inputed-name')
+ end
- it 'does not import root group path' do
- expect(group.path).to eq('user-inputed-path')
- end
+ it 'does not import root group path' do
+ expect(group.path).to eq('user-inputed-path')
+ end
- excluded_attributes.each do |excluded_attribute|
- it 'does not allow override of excluded attributes' do
- unless group.public_send(excluded_attribute).nil?
- expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not allow override of excluded attributes' do
+ unless group.public_send(excluded_attribute).nil?
+ expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ end
end
end
end
- end
- include_examples 'excluded attributes'
- end
+ include_examples 'excluded attributes'
+ end
- context 'group.json file access check' do
- let(:user) { create(:user) }
- let!(:group) { create(:group, name: 'group2', path: 'group2') }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'group.json file access check' do
+ let(:user) { create(:user) }
+ let!(:group) { create(:group, name: 'group2', path: 'group2') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups'))
- setup_symlink(tmpdir, 'tree/groups/_all.ndjson')
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups'))
+ setup_symlink(tmpdir, 'tree/groups/_all.ndjson')
- allow(shared).to receive(:export_path).and_return(tmpdir)
+ allow(shared).to receive(:export_path).and_return(tmpdir)
- expect(group_tree_restorer.restore).to eq(false)
- expect(shared.errors).to include('Incorrect JSON format')
+ expect(group_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
end
end
- end
- context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'group visibility levels' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config(filepath)
+ before do
+ setup_import_export_config(filepath)
- group_tree_restorer.restore
- end
+ group_tree_restorer.restore
+ end
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
end
end
+
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
+ end
+ end
+
+ context 'when import_relation_object_persistence feature flag is enabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: true)
+ end
+
+ include_examples 'group restoration'
+ end
+
+ context 'when import_relation_object_persistence feature flag is disabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: false)
end
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
+ include_examples 'group restoration'
end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 352af18c822..ba1cccf87ce 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -158,26 +158,10 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
end
describe 'load balancing' do
- context 'when feature flag load_balancing_for_export_workers is enabled' do
- before do
- stub_feature_flags(load_balancing_for_export_workers: true)
- end
-
- it 'reads from replica' do
- expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
-
- subject.execute
- end
- end
+ it 'reads from replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
- context 'when feature flag load_balancing_for_export_workers is disabled' do
- it 'reads from primary' do
- stub_feature_flags(load_balancing_for_export_workers: false)
-
- expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_replicas_for_read_queries)
-
- subject.execute
- end
+ subject.execute
end
end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index ffbbf9326ec..240d86077c4 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -401,4 +401,20 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
expect(created_object.value).to be_nil
end
end
+
+ context 'event object' do
+ let(:relation_sym) { :events }
+ let(:relation_hash) do
+ {
+ 'project_id' => project.id,
+ 'author_id' => admin.id,
+ 'action' => 'created',
+ 'target_type' => 'Issue'
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 8884722254d..fdf8260c058 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -1058,13 +1058,35 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
- context 'enable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+ context 'when import_relation_object_persistence feature flag is enabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: true)
+ end
+
+ context 'enable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+
+ it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ end
- it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ context 'disable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ end
end
- context 'disable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ context 'when import_relation_object_persistence feature flag is disabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: false)
+ end
+
+ context 'enable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+
+ it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ end
+
+ context 'disable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index f019883a91e..e06fcb0cd3f 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -857,6 +857,7 @@ Epic:
- health_status
- external_key
- confidential
+ - color
EpicIssue:
- id
- relative_position
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
deleted file mode 100644
index 1205b74dc9d..00000000000
--- a/spec/lib/gitlab/integrations/sti_type_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Integrations::StiType do
- let(:types) { ['AsanaService', 'Integrations::Asana', Integrations::Asana] }
-
- describe '#serialize' do
- context 'SQL SELECT' do
- let(:expected_sql) do
- <<~SQL.strip
- SELECT "integrations".* FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
- SQL
- end
-
- it 'forms SQL SELECT statements correctly' do
- sql_statements = types.map do |type|
- Integration.where(type: type).to_sql
- end
-
- expect(sql_statements).to all(eq(expected_sql))
- end
- end
-
- context 'SQL CREATE' do
- let(:expected_sql) do
- <<~SQL.strip
- INSERT INTO "integrations" ("type") VALUES ('AsanaService')
- SQL
- end
-
- it 'forms SQL CREATE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { Integration.insert({ type: type }) }
- record.log.first
- end
-
- expect(sql_statements).to all(include(expected_sql))
- end
- end
-
- context 'SQL UPDATE' do
- let(:expected_sql) do
- <<~SQL.strip
- UPDATE "integrations" SET "type" = 'AsanaService'
- SQL
- end
-
- let_it_be(:integration) { create(:integration) }
-
- it 'forms SQL UPDATE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { integration.update_column(:type, type) }
- record.log.first
- end
-
- expect(sql_statements).to all(include(expected_sql))
- end
- end
-
- context 'SQL DELETE' do
- let(:expected_sql) do
- <<~SQL.strip
- DELETE FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
- SQL
- end
-
- it 'forms SQL DELETE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { Integration.delete_by(type: type) }
- record.log.first
- end
-
- expect(sql_statements).to all(match(expected_sql))
- end
- end
- end
-
- describe '#deserialize' do
- specify 'it deserializes type correctly', :aggregate_failures do
- types.each do |type|
- service = create(:integration, type: type)
-
- expect(service.type).to eq('AsanaService')
- end
- end
- end
-
- describe '#cast' do
- it 'casts type as model correctly', :aggregate_failures do
- create(:integration, type: 'AsanaService')
-
- types.each do |type|
- expect(Integration.find_by(type: type)).to be_kind_of(Integrations::Asana)
- end
- end
- end
-
- describe '#changed?' do
- it 'detects changes correctly', :aggregate_failures do
- service = create(:integration, type: 'AsanaService')
-
- types.each do |type|
- service.type = type
-
- expect(service).not_to be_changed
- end
-
- service.type = 'NewType'
-
- expect(service).to be_changed
- end
- end
-end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 7899d01b475..d7d28a94cfe 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@@ -8,7 +9,7 @@ RSpec.describe Gitlab::JsonCache do
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
- let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}" }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
@@ -35,69 +36,63 @@ RSpec.describe Gitlab::JsonCache do
end
describe '#cache_key' do
- context 'when namespace is not defined' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with GitLab, and Rails versions' do
- cache = described_class.new(cache_key_with_version: true)
+ using RSpec::Parameterized::TableSyntax
- cache_key = cache.cache_key(key)
-
- expect(cache_key).to eq("#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
- end
+ where(:namespace, :cache_key_strategy, :expanded_key) do
+ nil | :revision | "#{key}:#{Gitlab.revision}"
+ nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
+ namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ end
- context 'when cache_key_with_version is false' do
- it 'returns the key' do
- cache = described_class.new(namespace: nil, cache_key_with_version: false)
+ with_them do
+ let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: cache_key_strategy) }
- cache_key = cache.cache_key(key)
+ subject { cache.cache_key(key) }
- expect(cache_key).to eq(key)
- end
- end
+ it { is_expected.to eq expanded_key }
end
- context 'when namespace is nil' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with GitLab, and Rails versions' do
- cache = described_class.new(cache_key_with_version: true)
-
- cache_key = cache.cache_key(key)
+ context 'when cache_key_strategy is unknown' do
+ let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: 'unknown') }
- expect(cache_key).to eq("#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
+ it 'raises KeyError' do
+ expect { cache.cache_key('key') }.to raise_error(KeyError)
end
+ end
+ end
- context 'when cache_key_with_version is false' do
- it 'returns the key' do
- cache = described_class.new(namespace: nil, cache_key_with_version: false)
+ describe '#namespace' do
+ it 'defaults to nil' do
+ cache = described_class.new
+ expect(cache.namespace).to be_nil
+ end
+ end
- cache_key = cache.cache_key(key)
+ describe '#strategy_key_component' do
+ subject { cache.strategy_key_component }
- expect(cache_key).to eq(key)
- end
- end
+ it 'defaults to Gitlab.revision' do
+ expect(described_class.new.strategy_key_component).to eq Gitlab.revision
end
- context 'when namespace is set' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with namespace and Rails version' do
- cache = described_class.new(namespace: namespace, cache_key_with_version: true)
+ context 'when cache_key_strategy is :revision' do
+ let(:cache) { described_class.new(cache_key_strategy: :revision) }
- cache_key = cache.cache_key(key)
+ it { is_expected.to eq Gitlab.revision }
+ end
- expect(cache_key).to eq("#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
- end
+ context 'when cache_key_strategy is :version' do
+ let(:cache) { described_class.new(cache_key_strategy: :version) }
- context 'when cache_key_with_version is false' do
- it 'expands out the key with namespace' do
- cache = described_class.new(namespace: namespace, cache_key_with_version: false)
+ it { is_expected.to eq [Gitlab::VERSION, Rails.version] }
+ end
- cache_key = cache.cache_key(key)
+ context 'when cache_key_strategy is invalid' do
+ let(:cache) { described_class.new(cache_key_strategy: 'unknown') }
- expect(cache_key).to eq("#{namespace}:#{key}")
- end
+ it 'raises KeyError' do
+ expect { subject }.to raise_error(KeyError)
end
end
end
@@ -553,3 +548,4 @@ RSpec.describe Gitlab::JsonCache do
end
end
end
+# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
index 057c4373329..7d1f1aea291 100644
--- a/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
@@ -39,6 +39,51 @@ RSpec.describe Gitlab::Kubernetes::Kubeconfig::Template do
it { is_expected.to eq(YAML.dump(template.to_h.deep_stringify_keys)) }
end
+ describe '#merge_yaml' do
+ it 'appends to the configuration and overwrites the current context' do
+ template.add_cluster(name: 'hello-cluster', url: 'hello-url')
+ template.add_context(name: 'hello-context', cluster: 'hello-cluster', user: 'hello-user')
+ template.add_user(name: 'hello-user', token: 'hello-token')
+ ca_pem = Base64.strict_encode64('a certificate')
+ template.merge_yaml(<<~YAML)
+ apiVersion: v1
+ kind: Config
+ clusters:
+ - name: 'gitlab-deploy'
+ cluster:
+ server: url
+ certificate-authority-data: #{ca_pem.inspect}
+ contexts:
+ - name: gitlab-deploy
+ context:
+ cluster: gitlab-deploy
+ namespace: namespace
+ user: gitlab-deploy
+ current-context: gitlab-deploy
+ users:
+ - name: 'gitlab-deploy'
+ user: { token: token }
+ YAML
+ expect(template.to_h).to eq({
+ apiVersion: 'v1',
+ kind: 'Config',
+ clusters: [
+ { name: 'hello-cluster', cluster: { server: 'hello-url' } },
+ { name: 'gitlab-deploy', cluster: { server: 'url', 'certificate-authority-data': ca_pem } }
+ ],
+ contexts: [
+ { name: 'hello-context', context: { cluster: 'hello-cluster', user: 'hello-user' } },
+ { name: 'gitlab-deploy', context: { cluster: 'gitlab-deploy', namespace: 'namespace', user: 'gitlab-deploy' } }
+ ],
+ users: [
+ { name: 'hello-user', user: { token: 'hello-token' } },
+ { name: 'gitlab-deploy', user: { token: 'token' } }
+ ],
+ 'current-context': 'gitlab-deploy'
+ })
+ end
+ end
+
describe 'adding entries' do
let(:entry) { instance_double(entry_class, to_h: attributes) }
let(:attributes) do
diff --git a/spec/lib/gitlab/mail_room/authenticator_spec.rb b/spec/lib/gitlab/mail_room/authenticator_spec.rb
index 44120902661..2e62ed2d386 100644
--- a/spec/lib/gitlab/mail_room/authenticator_spec.rb
+++ b/spec/lib/gitlab/mail_room/authenticator_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
describe '#verify_api_request' do
let(:incoming_email_secret) { SecureRandom.hex(16) }
let(:service_desk_email_secret) { SecureRandom.hex(16) }
- let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes + 1.second).to_i } }
+ let(:payload) { { iss: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes + 1.second).to_i } }
before do
allow(described_class).to receive(:secret).with(:incoming_email).and_return(incoming_email_secret)
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify a valid token' do
it 'returns the decoded payload' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')[0]).to match a_hash_including(
"iss" => "gitlab-mailroom",
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
)
encoded_token = JWT.encode(payload, service_desk_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'service_desk_email')[0]).to match a_hash_including(
"iss" => "gitlab-mailroom",
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify an invalid token' do
it 'returns false' do
encoded_token = JWT.encode(payload, 'wrong secret', 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify a valid token but wrong mailbox type' do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'service_desk_email')).to eq(false)
end
@@ -94,18 +94,18 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
end
context 'verify a valid token but expired' do
- let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes - 1.second).to_i } }
+ let(:payload) { { iss: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes - 1.second).to_i } }
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -125,7 +125,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify headers for a non-existing mailbox type' do
it 'returns false' do
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => 'something' }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => 'something' }
expect(described_class.verify_api_request(headers, 'invalid_mailbox_type')).to eq(false)
end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index a4fcf71a012..12fb12ebd87 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -4,16 +4,30 @@ require 'spec_helper'
RSpec.describe Gitlab::MailRoom do
let(:default_port) { 143 }
+ let(:log_path) { Rails.root.join('log', 'mail_room_json.log').to_s }
+
+ let(:fake_redis_queues) do
+ double(
+ url: "localhost",
+ db: 99,
+ sentinels: [{ host: 'localhost', port: 1234 }],
+ sentinels?: true
+ )
+ end
+
let(:yml_config) do
{
enabled: true,
+ host: 'mail.example.com',
address: 'address@example.com',
+ user: 'user@example.com',
+ password: 'password',
port: default_port,
ssl: false,
start_tls: false,
mailbox: 'inbox',
idle_timeout: 60,
- log_path: Rails.root.join('log', 'mail_room_json.log').to_s,
+ log_path: log_path,
expunge_deleted: false
}
end
@@ -30,6 +44,7 @@ RSpec.describe Gitlab::MailRoom do
end
before do
+ allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
allow(described_class).to receive(:load_yaml).and_return(configs)
described_class.instance_variable_set(:@enabled_configs, nil)
end
@@ -39,6 +54,8 @@ RSpec.describe Gitlab::MailRoom do
end
describe '#enabled_configs' do
+ let(:first_value) { described_class.enabled_configs.each_value.first }
+
context 'when both email and address is set' do
it 'returns email configs' do
expect(described_class.enabled_configs.size).to eq(2)
@@ -76,7 +93,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { enabled: true, address: 'address@example.com' } }
it 'overwrites missing values with the default' do
- expect(described_class.enabled_configs.each_value.first[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
+ expect(first_value[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
end
end
@@ -85,23 +102,24 @@ RSpec.describe Gitlab::MailRoom do
it 'returns only encoming_email' do
expect(described_class.enabled_configs.size).to eq(1)
- expect(described_class.enabled_configs.each_value.first[:worker]).to eq('EmailReceiverWorker')
+ expect(first_value[:worker]).to eq('EmailReceiverWorker')
end
end
describe 'setting up redis settings' do
- let(:fake_redis_queues) { double(url: "localhost", db: 99, sentinels: "yes, them", sentinels?: true) }
-
- before do
- allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
+ it 'sets delivery method to Sidekiq by default' do
+ config = first_value
+ expect(config).to include(
+ delivery_method: 'sidekiq'
+ )
end
it 'sets redis config' do
- config = described_class.enabled_configs.each_value.first
+ config = first_value
expect(config).to include(
redis_url: 'localhost',
redis_db: 99,
- sentinels: 'yes, them'
+ sentinels: [{ host: 'localhost', port: 1234 }]
)
end
end
@@ -111,7 +129,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: 'tiny_log.log' } }
it 'expands the log path to an absolute value' do
- new_path = Pathname.new(described_class.enabled_configs.each_value.first[:log_path])
+ new_path = Pathname.new(first_value[:log_path])
expect(new_path.absolute?).to be_truthy
end
end
@@ -120,7 +138,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: '/dev/null' } }
it 'leaves the path as-is' do
- expect(described_class.enabled_configs.each_value.first[:log_path]).to eq '/dev/null'
+ expect(first_value[:log_path]).to eq '/dev/null'
end
end
end
@@ -164,4 +182,148 @@ RSpec.describe Gitlab::MailRoom do
end
end
end
+
+ describe 'config/mail_room.yml' do
+ let(:mail_room_template) { ERB.new(File.read(Rails.root.join("./config/mail_room.yml"))).result }
+ let(:mail_room_yml) { YAML.safe_load(mail_room_template, permitted_classes: [Symbol]) }
+
+ shared_examples 'renders mail-specific config file correctly' do
+ it 'renders mail room config file correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes]).to all(
+ match(
+ a_hash_including(
+ host: 'mail.example.com',
+ port: default_port,
+ ssl: false,
+ start_tls: false,
+ email: 'user@example.com',
+ password: 'password',
+ idle_timeout: 60,
+ logger: {
+ log_path: log_path
+ },
+ name: 'inbox',
+
+ delete_after_delivery: true,
+ expunge_deleted: false
+ )
+ )
+ )
+ end
+ end
+
+ shared_examples 'renders arbitration options correctly' do
+ it 'renders arbitration options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+ expect(mail_room_yml[:mailboxes]).to all(
+ match(
+ a_hash_including(
+ arbitration_method: "redis",
+ arbitration_options: {
+ redis_url: "localhost",
+ namespace: "mail_room:gitlab",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ )
+ end
+ end
+
+ shared_examples 'renders the sidekiq delivery method and options correctly' do
+ it 'renders the sidekiq delivery method and options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes][0]).to match(
+ a_hash_including(
+ delivery_method: 'sidekiq',
+ delivery_options: {
+ redis_url: "localhost",
+ redis_db: 99,
+ namespace: "resque:gitlab",
+ queue: "email_receiver",
+ worker: "EmailReceiverWorker",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ expect(mail_room_yml[:mailboxes][1]).to match(
+ a_hash_including(
+ delivery_method: 'sidekiq',
+ delivery_options: {
+ redis_url: "localhost",
+ redis_db: 99,
+ namespace: "resque:gitlab",
+ queue: "service_desk_email_receiver",
+ worker: "ServiceDeskEmailReceiverWorker",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ end
+ end
+
+ context 'when delivery_method is implicit' do
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+ it_behaves_like 'renders the sidekiq delivery method and options correctly'
+ end
+
+ context 'when delivery_method is explicitly sidekiq' do
+ let(:custom_config) { { delivery_method: 'sidekiq' } }
+
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+ it_behaves_like 'renders the sidekiq delivery method and options correctly'
+ end
+
+ context 'when delivery_method is webhook (internally postback in mail_room)' do
+ let(:custom_config) do
+ {
+ delivery_method: 'webhook',
+ gitlab_url: 'http://gitlab.example',
+ secret_file: '/path/to/secret/file'
+ }
+ end
+
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+
+ it 'renders the webhook (postback) delivery method and options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes][0]).to match(
+ a_hash_including(
+ delivery_method: 'postback',
+ delivery_options: {
+ delivery_url: "http://gitlab.example/api/v4/internal/mail_room/incoming_email",
+ jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
+ jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
+ jwt_algorithm: 'HS256',
+ jwt_secret_path: '/path/to/secret/file'
+ }
+ )
+ )
+
+ expect(mail_room_yml[:mailboxes][1]).to match(
+ a_hash_including(
+ delivery_method: 'postback',
+ delivery_options: {
+ delivery_url: "http://gitlab.example/api/v4/internal/mail_room/service_desk_email",
+ jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
+ jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
+ jwt_algorithm: 'HS256',
+ jwt_secret_path: '/path/to/secret/file'
+ }
+ )
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
index 2407b497249..ad528dca81a 100644
--- a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
@@ -403,6 +403,90 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
end
end
+ context 'when project has commit template with all_commits' do
+ let(message_template_name) { "All commits:\n%{all_commits}" }
+
+ it 'returns all commit messages' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+
+ context 'with 2 commits' do
+ let(:source_branch) { 'fix' }
+
+ it 'returns both messages' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Test file for directories with a leading dot
+
+ * JS fix
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+ end
+
+ context 'with over 100 commits' do
+ let(:source_branch) { 'signed-commits' }
+
+ it 'returns first 100 commits skipping merge commit' do
+ expected_message = <<~MSG
+ All commits:
+ * Multiple signatures commit
+
+ * Add conflicting file
+
+ * Add conflicting file
+
+ MSG
+ expected_message += (5..100).to_a.reverse
+ .map { |n| "* Unrelated signed commit #{n} to exceed page size of endpoint\n\n" }
+ .join.rstrip
+ expect(result_message).to eq expected_message
+ end
+ end
+
+ context 'when branch has no unmerged commits' do
+ let(:source_branch) { 'v1.1.0' }
+
+ it 'is an empty string' do
+ expect(result_message).to eq "All commits:\n"
+ end
+ end
+
+ context 'when branch has commit with message over 100kb' do
+ let(:source_branch) { 'add_commit_with_5mb_subject' }
+
+ it 'skips commit body' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Commit with 5MB text subject
+
+ -- Skipped commit body exceeding 100KiB in size.
+
+ * Correct test_env.rb path for adding branch
+
+ * Add file with a _flattable_ path
+
+
+ (cherry picked from commit ce369011c189f62c815f5971d096b26759bab0d1)
+
+ * Add file larger than 1 mb
+
+ In order to test Max File Size push rule we need a file larger than 1 MB
+
+ * LFS tracks "*.lfs" through .gitattributes
+
+ * Update README.md to include `Usage in testing and development`
+ MSG
+ end
+ end
+ end
+
context 'user' do
subject { described_class.new(merge_request: merge_request, current_user: nil) }
@@ -466,6 +550,7 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
approved_by:%{approved_by}
merged_by:%{merged_by}
co_authored_by:%{co_authored_by}
+ all_commits:%{all_commits}
MSG
it 'uses custom template' do
@@ -486,6 +571,9 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
approved_by:
merged_by:#{current_user.name} <#{current_user.commit_email_or_default}>
co_authored_by:Co-authored-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ all_commits:* Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
MSG
end
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
index 4f437e57600..50cfa6b64ea 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
let(:payload) { { test: 'test' } }
let(:hash) do
{
- status: status,
- payload: payload
+ 'status' => status,
+ 'payload' => payload
}
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
index d376dcb5b18..ed11f8ea6bb 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
@@ -10,10 +10,22 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
let(:merge_request) { double }
describe '#read' do
- it 'calls #retrieve on the interface' do
- expect(interface).to receive(:retrieve_check).with(merge_check: merge_check)
+ let(:result_hash) { { 'status' => 'success', 'payload' => {} } }
- results_store.read(merge_check: merge_check)
+ it 'calls #retrieve_check on the interface' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(result_hash)
+
+ cached_result = results_store.read(merge_check: merge_check)
+
+ expect(cached_result.status).to eq(result_hash['status'].to_sym)
+ expect(cached_result.payload).to eq(result_hash['payload'])
+ end
+
+ context 'when #retrieve_check returns nil' do
+ it 'returns nil' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(nil)
+ expect(results_store.read(merge_check: merge_check)).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
index 9467d441ae1..8c2edc85c35 100644
--- a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@@ -84,3 +85,4 @@ RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_
end
end
end
+# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/null_request_store_spec.rb b/spec/lib/gitlab/null_request_store_spec.rb
index f600af2e31f..66700313c9a 100644
--- a/spec/lib/gitlab/null_request_store_spec.rb
+++ b/spec/lib/gitlab/null_request_store_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::NullRequestStore do
describe '#fetch' do
it 'returns the block result' do
- expect(null_store.fetch('key') { 'block result' }).to eq('block result')
+ expect(null_store.fetch('key') { 'block result' }).to eq('block result') # rubocop:disable Style/RedundantFetchBlock
end
end
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 42ae5844b95..8b959cf787f 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -5,7 +5,161 @@ require 'spec_helper'
RSpec.describe Gitlab::OmniauthInitializer do
let(:devise_config) { class_double(Devise) }
- subject { described_class.new(devise_config) }
+ subject(:initializer) { described_class.new(devise_config) }
+
+ describe '.arguments_for' do
+ let(:devise_config) { nil }
+
+ let(:arguments) { initializer.send(:arguments_for, provider) }
+
+ context 'when there are no args at all' do
+ let(:provider) { { 'name' => 'unknown' } }
+
+ it 'returns an empty array' do
+ expect(arguments).to eq []
+ end
+ end
+
+ context 'when there is an app_id and an app_secret' do
+ let(:provider) { { 'name' => 'unknown', 'app_id' => 1, 'app_secret' => 2 } }
+
+ it 'includes both of them, in positional order' do
+ expect(arguments).to eq [1, 2]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and an array of args' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => %w[one two three]
+ }
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments).to eq [1, 2, 'one', 'two', 'three']
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and an array of args, and default values' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => %w[one two three]
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, 'one', 'two', 'three', { default_arg: :some_value }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and a hash of args' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => { 'foo' => 100, 'bar' => 200, 'nested' => { 'value' => 300 } }
+ }
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { foo: 100, bar: 200, nested: { value: 300 } }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and a hash of args, and default arguments' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => { 'foo' => 100, 'bar' => 200, 'nested' => { 'value' => 300 } }
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { default_arg: :some_value, foo: 100, bar: 200, nested: { value: 300 } }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, no args, and default values' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { default_arg: :some_value }]
+ end
+ end
+
+ context 'when there are args, of an unsupported type' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'args' => 1
+ }
+ end
+
+ context 'when there are default arguments' do
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'tracks a configuration error' do
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'unknown', arguments_type: 'Integer')
+
+ expect(arguments)
+ .to eq [{ default_arg: :some_value }]
+ end
+ end
+
+ context 'when there are no default arguments' do
+ it 'tracks a configuration error' do
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'unknown', arguments_type: 'Integer')
+
+ expect(arguments).to be_empty
+ end
+ end
+ end
+ end
describe '#execute' do
it 'configures providers from array' do
@@ -105,11 +259,50 @@ RSpec.describe Gitlab::OmniauthInitializer do
it 'configures defaults for gitlab' do
conf = {
'name' => 'gitlab',
- "args" => {}
+ "args" => { 'client_options' => { 'site' => generate(:url) } }
}
expect(devise_config).to receive(:omniauth).with(
:gitlab,
+ client_options: { site: conf.dig('args', 'client_options', 'site') },
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'configures defaults for gitlab, when arguments are not provided' do
+ conf = { 'name' => 'gitlab' }
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'configures defaults for gitlab, when array arguments are provided' do
+ conf = { 'name' => 'gitlab', 'args' => ['a'] }
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
+ 'a',
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'tracks a configuration error if the arguments are neither a hash nor an array' do
+ conf = { 'name' => 'gitlab', 'args' => 17 }
+
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'gitlab', arguments_type: 'Integer')
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
authorize_params: { gl_auth_type: 'login' }
)
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index 1a7c808d1bf..9cfcded6196 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Pages::Settings do
context 'when running under a web server outside of test mode' do
before do
allow(::Gitlab::Runtime).to receive(:test_suite?).and_return(false)
- allow(::Gitlab::Runtime).to receive(:web_server?).and_return(true)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(true)
end
it 'logs a DiskAccessDenied error' do
diff --git a/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb b/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb
new file mode 100644
index 00000000000..14f556ff348
--- /dev/null
+++ b/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::ActionCableRedisListener do
+ let(:adapter) { instance_double('ActionCable::SubscriptionAdapter::Redis') }
+ let(:connection) { instance_double('Redis') }
+ let(:listener) { ActionCable::SubscriptionAdapter::Redis::Listener.new(adapter, nil) }
+
+ before do
+ allow(Thread).to receive(:new).and_yield
+ allow(adapter).to receive(:redis_connection_for_subscriptions).and_return(connection)
+ end
+
+ it 'catches Redis connection errors and restarts Action Cable' do
+ expect(connection).to receive(:without_reconnect).and_raise Redis::ConnectionError
+ expect(ActionCable).to receive_message_chain(:server, :restart)
+
+ expect { listener.add_channel('test_channel', nil) }.not_to raise_error
+ end
+
+ it 're-raises other exceptions' do
+ expect(connection).to receive(:without_reconnect).and_raise StandardError
+ expect(ActionCable).not_to receive(:server)
+
+ expect { listener.add_channel('test_channel', nil) }.to raise_error(StandardError)
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index f0ba0f0459d..9876387512b 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -549,10 +549,11 @@ RSpec.describe Gitlab::PathRegex do
it { is_expected.to match('gitlab-foss') }
it { is_expected.to match('gitlab_foss') }
it { is_expected.to match('gitlab-org/gitlab-foss') }
+ it { is_expected.to match('a/b/c/d/e') }
it { is_expected.to match('100px.com/100px.ruby') }
- it 'only matches at most one slash' do
- expect(subject.match('foo/bar/baz')[0]).to eq('foo/bar')
+ it 'does not match beyond 4 slashes' do
+ expect(subject.match('foo/bar/baz/buz/zip/zap/zoo')[0]).to eq('foo/bar/baz/buz/zip')
end
it 'does not match other non-word characters' do
diff --git a/spec/lib/gitlab/process_supervisor_spec.rb b/spec/lib/gitlab/process_supervisor_spec.rb
new file mode 100644
index 00000000000..60b127dadda
--- /dev/null
+++ b/spec/lib/gitlab/process_supervisor_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require_relative '../../../lib/gitlab/process_supervisor'
+
+RSpec.describe Gitlab::ProcessSupervisor do
+ let(:health_check_interval_seconds) { 0.1 }
+ let(:check_terminate_interval_seconds) { 1 }
+ let(:forwarded_signals) { [] }
+ let(:process_ids) { [spawn_process, spawn_process] }
+
+ def spawn_process
+ Process.spawn('while true; do sleep 1; done').tap do |pid|
+ Process.detach(pid)
+ end
+ end
+
+ subject(:supervisor) do
+ described_class.new(
+ health_check_interval_seconds: health_check_interval_seconds,
+ check_terminate_interval_seconds: check_terminate_interval_seconds,
+ terminate_timeout_seconds: 1 + check_terminate_interval_seconds,
+ forwarded_signals: forwarded_signals
+ )
+ end
+
+ after do
+ process_ids.each do |pid|
+ Process.kill('KILL', pid)
+ rescue Errno::ESRCH
+ # Ignore if a process wasn't actually alive.
+ end
+ end
+
+ describe '#supervise' do
+ context 'while supervised processes are alive' do
+ it 'does not invoke callback' do
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ pids_killed = []
+
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ []
+ end
+
+ # Wait several times the poll frequency of the supervisor.
+ sleep health_check_interval_seconds * 10
+
+ expect(pids_killed).to be_empty
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ end
+ end
+
+ context 'when a supervised process dies' do
+ it 'triggers callback with the dead PIDs and adds new PIDs to supervised PIDs' do
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ pids_killed = []
+
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ [42] # Fake starting a new process in place of the terminated one.
+ end
+
+ # Terminate the supervised process.
+ Process.kill('TERM', process_ids.first)
+
+ await_condition(sleep_sec: health_check_interval_seconds) do
+ pids_killed == [process_ids.first]
+ end
+
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.first)).to be(false)
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.last)).to be(true)
+ expect(supervisor.supervised_pids).to match_array([process_ids.last, 42])
+ end
+ end
+
+ context 'signal handling' do
+ before do
+ allow(supervisor).to receive(:sleep)
+ allow(Gitlab::ProcessManagement).to receive(:trap_signals)
+ allow(Gitlab::ProcessManagement).to receive(:all_alive?).and_return(false)
+ allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(process_ids, anything)
+ end
+
+ context 'termination signals' do
+ context 'when TERM results in timely shutdown of processes' do
+ it 'forwards them to observed processes without waiting for grace period to expire' do
+ allow(Gitlab::ProcessManagement).to receive(:any_alive?).and_return(false)
+
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).ordered.with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(supervisor).not_to receive(:sleep).with(check_terminate_interval_seconds)
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+
+ context 'when TERM does not result in timely shutdown of processes' do
+ it 'issues a KILL signal after the grace period expires' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(supervisor).to receive(:sleep).ordered.with(check_terminate_interval_seconds).at_least(:once)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+ end
+
+ context 'forwarded signals' do
+ let(:forwarded_signals) { %i(USR1) }
+
+ it 'forwards given signals to the observed processes' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(USR1)).and_yield(:USR1)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :USR1)
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+ end
+ end
+
+ describe '#shutdown' do
+ context 'when supervisor is supervising processes' do
+ before do
+ supervisor.supervise(process_ids)
+ end
+
+ context 'when supervisor is alive' do
+ it 'signals TERM then KILL to all supervised processes' do
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
+
+ supervisor.shutdown
+ end
+
+ it 'stops the supervisor' do
+ expect { supervisor.shutdown }.to change { supervisor.alive }.from(true).to(false)
+ end
+ end
+
+ context 'when supervisor has already shut down' do
+ before do
+ supervisor.shutdown
+ end
+
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
+ end
+ end
+ end
+
+ context 'when supervisor never started' do
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
+ end
+ end
+ end
+
+ def await_condition(timeout_sec: 5, sleep_sec: 0.1)
+ Timeout.timeout(timeout_sec) do
+ sleep sleep_sec until yield
+ end
+ end
+end
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 5187c96b511..bfe1a588489 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -58,6 +58,30 @@ RSpec.describe Gitlab::Profiler do
described_class.profile('/', user: user, private_token: private_token)
end
+
+ context 'with sampling profiler' do
+ it 'generates sampling data' do
+ user = double(:user)
+ temp_data = Tempfile.new
+
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ described_class.profile('/', user: user, sampling_mode: true, profiler_options: { out: temp_data.path })
+
+ expect(File.stat(temp_data).size).to be > 0
+ File.unlink(temp_data)
+ end
+
+ it 'saves sampling data with a randomly-generated filename' do
+ user = double(:user)
+
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ result = described_class.profile('/', user: user, sampling_mode: true)
+
+ expect(result).to be_a(File)
+ expect(File.stat(result.path).size).to be > 0
+ File.unlink(result.path)
+ end
+ end
end
describe '.create_custom_logger' do
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index 7852470196b..640cf9be453 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::ProjectAuthorizations do
it 'includes the correct access levels' do
mapping = map_access_levels(authorizations)
- expect(mapping[owned_project.id]).to eq(Gitlab::Access::MAINTAINER)
+ expect(mapping[owned_project.id]).to eq(Gitlab::Access::OWNER)
expect(mapping[other_project.id]).to eq(Gitlab::Access::REPORTER)
expect(mapping[group_project.id]).to eq(Gitlab::Access::DEVELOPER)
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 54a0b282e99..f3e8c440fba 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -990,4 +990,19 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('../../../../../1.2.3') }
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+
+ describe '.saved_reply_name_regex' do
+ subject { described_class.saved_reply_name_regex }
+
+ it { is_expected.to match('test') }
+ it { is_expected.to match('test123') }
+ it { is_expected.to match('test-test') }
+ it { is_expected.to match('test-test_0123') }
+ it { is_expected.not_to match('test test') }
+ it { is_expected.not_to match('test-') }
+ it { is_expected.not_to match('/z/test_') }
+ it { is_expected.not_to match('.xtest_') }
+ it { is_expected.not_to match('.xt.est_') }
+ it { is_expected.not_to match('0test1') }
+ end
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 402b72b9220..86640efed5a 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -80,6 +80,10 @@ RSpec.describe Gitlab::Runtime do
it_behaves_like "valid runtime", :puma, 3 + Gitlab::ActionCable::Config.worker_pool_size
+ it 'identifies as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be true
+ end
+
context "when ActionCable worker pool size is configured" do
before do
stub_env('ACTION_CABLE_WORKER_POOL_SIZE', 10)
@@ -113,6 +117,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :sidekiq, 5
+
+ it 'identifies as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be true
+ end
end
context "console" do
@@ -121,6 +129,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :console, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
context "test suite" do
@@ -129,6 +141,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :test_suite, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
context "geo log cursor" do
@@ -145,5 +161,9 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :rails_runner, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
end
diff --git a/spec/lib/gitlab/safe_request_loader_spec.rb b/spec/lib/gitlab/safe_request_loader_spec.rb
new file mode 100644
index 00000000000..504ce233e4d
--- /dev/null
+++ b/spec/lib/gitlab/safe_request_loader_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SafeRequestLoader, :aggregate_failures do
+ let(:resource_key) { '_key_' }
+ let(:resource_ids) { [] }
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids } }
+ let(:block) { proc { {} } }
+
+ describe '.execute', :request_store do
+ let(:resource_data) { { 'foo' => 'bar' } }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = resource_data
+ end
+
+ subject(:execute_instance) { described_class.execute(**args, &block) }
+
+ it 'gets data from the store and returns it' do
+ expect(execute_instance.keys).to contain_exactly(*resource_data.keys)
+ expect(execute_instance).to match(a_hash_including(resource_data))
+ expect_store_to_be_updated
+ end
+ end
+
+ describe '#execute' do
+ subject(:execute_instance) { described_class.new(**args).execute(&block) }
+
+ context 'without a block' do
+ let(:block) { nil }
+
+ it 'raises an error' do
+ expect { execute_instance }.to raise_error(ArgumentError, 'Block is mandatory')
+ end
+ end
+
+ context 'when a resource_id is nil' do
+ let(:block) { proc { {} } }
+ let(:resource_ids) { [nil] }
+
+ it 'contains resource_data with nil key' do
+ expect(execute_instance.keys).to contain_exactly(nil)
+ expect(execute_instance).to match(a_hash_including(nil => nil))
+ end
+ end
+
+ context 'with SafeRequestStore considerations' do
+ let(:resource_data) { { 'foo' => 'bar' } }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = resource_data
+ end
+
+ context 'when request store is active', :request_store do
+ it 'gets data from the store' do
+ expect(execute_instance.keys).to contain_exactly(*resource_data.keys)
+ expect(execute_instance).to match(a_hash_including(resource_data))
+ expect_store_to_be_updated
+ end
+
+ context 'with already loaded resource_ids', :request_store do
+ let(:resource_key) { 'foo_data' }
+ let(:existing_resource_data) { { 'foo' => 'zoo' } }
+ let(:block) { proc { { 'foo' => 'bar' } } }
+ let(:resource_ids) { ['foo'] }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = existing_resource_data
+ end
+
+ it 'does not re-fetch data if resource_id already exists' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(existing_resource_data))
+ expect_store_to_be_updated
+ end
+
+ context 'with mixture of new and existing resource_ids' do
+ let(:existing_resource_data) { { 'foo' => 'bar' } }
+ let(:resource_ids) { %w[foo bar] }
+
+ context 'when block does not filter for only the missing resource_ids' do
+ let(:block) { proc { { 'foo' => 'zoo', 'bar' => 'foo' } } }
+
+ it 'overwrites existing keyed data with results from the block' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(block.call))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'when passing the missing resource_ids to a block that filters for them' do
+ let(:block) { proc { |rids| { 'foo' => 'zoo', 'bar' => 'foo' }.select { |k, _v| rids.include?(k) } } }
+
+ it 'only updates resource_data with keyed items that did not exist' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => 'foo' }))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'with default_value for resource_ids that did not exist in the results' do
+ context 'when default_value is provided' do
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids, default_value: '_value_' } }
+
+ it 'populates a default value' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => '_value_' }))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'when default_value is not provided' do
+ it 'populates a default_value of nil' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => nil }))
+ expect_store_to_be_updated
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when request store is not active' do
+ let(:block) { proc { { 'foo' => 'bar' } } }
+ let(:resource_ids) { ['foo'] }
+
+ it 'has no data added from the store' do
+ expect(execute_instance).to eq(block.call)
+ end
+
+ context 'with mixture of new and existing resource_ids' do
+ let(:resource_ids) { %w[foo bar] }
+
+ context 'when block does not filter out existing resource_data keys' do
+ let(:block) { proc { { 'foo' => 'zoo', 'bar' => 'foo' } } }
+
+ it 'overwrites existing keyed data with results from the block' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(block.call))
+ end
+ end
+
+ context 'when passing the missing resource_ids to a block that filters for them' do
+ let(:block) { proc { |rids| { 'foo' => 'zoo', 'bar' => 'foo' }.select { |k, _v| rids.include?(k) } } }
+
+ it 'only updates resource_data with keyed items that did not exist' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'zoo', 'bar' => 'foo' }))
+ end
+ end
+
+ context 'with default_value for resource_ids that did not exist in the results' do
+ context 'when default_value is provided' do
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids, default_value: '_value_' } }
+
+ it 'populates a default value' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => '_value_' }))
+ end
+ end
+
+ context 'when default_value is not provided' do
+ it 'populates a default_value of nil' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => nil }))
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def expect_store_to_be_updated
+ expect(execute_instance).to match(a_hash_including(Gitlab::SafeRequestStore[resource_key]))
+ expect(execute_instance.keys).to contain_exactly(*Gitlab::SafeRequestStore[resource_key].keys)
+ end
+end
diff --git a/spec/lib/gitlab/safe_request_store_spec.rb b/spec/lib/gitlab/safe_request_store_spec.rb
index 704102ccaee..accc491fbb7 100644
--- a/spec/lib/gitlab/safe_request_store_spec.rb
+++ b/spec/lib/gitlab/safe_request_store_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe Gitlab::SafeRequestStore do
context 'when RequestStore is active', :request_store do
it 'uses RequestStore' do
expect do
- described_class.fetch('foo') { 'block result' }
+ described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.to change { described_class.read('foo') }.from(nil).to('block result')
end
end
@@ -193,7 +193,7 @@ RSpec.describe Gitlab::SafeRequestStore do
RequestStore.clear! # Ensure clean
expect do
- described_class.fetch('foo') { 'block result' }
+ described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.not_to change { described_class.read('foo') }.from(nil)
RequestStore.clear! # Clean up
diff --git a/spec/lib/gitlab/sanitizers/exif_spec.rb b/spec/lib/gitlab/sanitizers/exif_spec.rb
index fbda9e6d0be..623fa4bc48a 100644
--- a/spec/lib/gitlab/sanitizers/exif_spec.rb
+++ b/spec/lib/gitlab/sanitizers/exif_spec.rb
@@ -131,6 +131,124 @@ RSpec.describe Gitlab::Sanitizers::Exif do
end
end
+ describe '#clean_existing_path' do
+ let(:dry_run) { false }
+
+ let(:tmp_file) { Tempfile.new("rails_sample.jpg") }
+
+ subject { sanitizer.clean_existing_path(tmp_file.path, dry_run: dry_run) }
+
+ context "no dry run" do
+ let(:file_content) { fixture_file_upload('spec/fixtures/rails_sample.jpg') }
+
+ before do
+ File.open(tmp_file.path, "w+b") { |f| f.write file_content }
+ end
+
+ it "removes exif from the image" do
+ expected_args = ["exiftool", "-all=", "-tagsFromFile", "@", *Gitlab::Sanitizers::Exif::EXCLUDE_PARAMS, "--IPTC:all", "--XMP-iptcExt:all", kind_of(String)]
+
+ expect(sanitizer).to receive(:extra_tags).and_return(["", 0])
+ expect(sanitizer).to receive(:exec_remove_exif!).once.and_call_original
+ expect(Gitlab::Popen).to receive(:popen).with(expected_args) do |args|
+ File.write("#{args.last}_original", "foo") if args.last.start_with?(Dir.tmpdir)
+
+ [expected_args, 0]
+ end
+
+ subject
+ end
+
+ it "ignores image without exif" do
+ expected_args = ["exiftool", "-all", "-j", "-sort", "--IPTC:all", "--XMP-iptcExt:all", kind_of(String)]
+
+ expect(Gitlab::Popen).to receive(:popen).with(expected_args).and_return(["[{}]", 0])
+ expect(sanitizer).not_to receive(:exec_remove_exif!)
+
+ subject
+ end
+
+ it "raises an error if the exiftool fails with an error" do
+ expect(Gitlab::Popen).to receive(:popen).and_return(["error", 1])
+
+ expect { subject }.to raise_exception(RuntimeError, "failed to get exif tags: error")
+ end
+
+ context 'for files that do not have the correct MIME type from file' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect { subject }.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+
+ context 'skip_unallowed_types is false' do
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'raises an error if not jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content)
+ end.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'raises an error if not jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content)
+ end.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+ end
+
+ context 'skip_unallowed_types is true' do
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content, skip_unallowed_types: true)
+ end.not_to raise_error
+ end
+ end
+
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content, skip_unallowed_types: true)
+ end.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context "dry run" do
+ let(:dry_run) { true }
+
+ it "doesn't change the image" do
+ expect(sanitizer).to receive(:extra_tags).and_return({ 'foo' => 'bar' })
+ expect(sanitizer).not_to receive(:exec_remove_exif!)
+
+ subject
+ end
+ end
+ end
+
describe "#extra_tags" do
it "returns a list of keys for exif file" do
tags = '[{
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
index 877461a7064..71d0a41ef98 100644
--- a/spec/lib/gitlab/seeder_spec.rb
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -4,6 +4,26 @@ require 'spec_helper'
RSpec.describe Gitlab::Seeder do
describe '.quiet' do
+ let(:database_base_models) do
+ {
+ main: ApplicationRecord,
+ ci: Ci::ApplicationRecord
+ }
+ end
+
+ it 'disables database logging' do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return(database_base_models.with_indifferent_access)
+
+ described_class.quiet do
+ expect(ApplicationRecord.logger).to be_nil
+ expect(Ci::ApplicationRecord.logger).to be_nil
+ end
+
+ expect(ApplicationRecord.logger).not_to be_nil
+ expect(Ci::ApplicationRecord.logger).not_to be_nil
+ end
+
it 'disables mail deliveries' do
expect(ActionMailer::Base.perform_deliveries).to eq(true)
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 833de6ae624..8d46845548a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -122,20 +122,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
it_behaves_like 'sets Redis keys with correct TTL'
end
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "does not change the existing wal locations key's TTL" do
- expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([nil, -2])
- end
- end
-
it "adds the idempotency key to the jobs payload" do
expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
end
@@ -186,28 +172,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
duplicate_job.check!
end
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- let(:existing_wal) { {} }
-
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "doesn't call Sidekiq.redis" do
- expect(Sidekiq).not_to receive(:redis)
-
- duplicate_job.update_latest_wal_location!
- end
-
- it "doesn't update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- end
- end
-
context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
@@ -328,20 +292,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when job is not deduplication and wal locations were not persisted' do
it { expect(duplicate_job.latest_wal_locations).to be_empty }
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "doesn't call Sidekiq.redis" do
- expect(Sidekiq).not_to receive(:redis)
-
- duplicate_job.latest_wal_locations
- end
-
- it { expect(duplicate_job.latest_wal_locations).to eq({}) }
- end
end
describe '#delete!' do
@@ -406,32 +356,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
let(:key) { wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
- end
- end
end
context 'when the idempotency key is not part of the job' do
@@ -666,16 +590,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
it 'returns true' do
expect(duplicate_job).to be_idempotent
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it 'returns false' do
- expect(duplicate_job).not_to be_idempotent
- end
- end
end
end
diff --git a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
index b021abc9f25..43f155091ad 100644
--- a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
describe '.matches_syntax?' do
@@ -71,44 +71,6 @@ RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
end
end
- context 'when unsafe regexp is used' do
- include StubFeatureFlags
-
- before do
- # When removed we could use `require 'fast_spec_helper'` again.
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
-
- allow(Gitlab::UntrustedRegexp).to receive(:new).and_raise(RegexpError)
- end
-
- context 'when no fallback is enabled' do
- it 'raises an exception' do
- expect { described_class.fabricate!('/something/') }
- .to raise_error(RegexpError)
- end
- end
-
- context 'when fallback is used' do
- it 'fabricates regexp with a single flag' do
- regexp = described_class.fabricate!('/something/i', fallback: true)
-
- expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE)
- end
-
- it 'fabricates regexp with multiple flags' do
- regexp = described_class.fabricate!('/something/im', fallback: true)
-
- expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE | Regexp::MULTILINE)
- end
-
- it 'fabricates regexp without flags' do
- regexp = described_class.fabricate!('/something/', fallback: true)
-
- expect(regexp).to eq Regexp.new('something')
- end
- end
- end
-
context 'when regexp is a raw pattern' do
it 'raises an error' do
expect { described_class.fabricate!('some .* thing') }
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 5b77290ce2e..57b0297a0a0 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -39,6 +39,73 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
end
+ context 'when URI is for a local object storage' do
+ let(:import_url) { "#{host}/external-diffs/merge_request_diffs/mr-1/diff-1" }
+ let(:enabled_object_storage_setting) do
+ {
+ 'object_store' =>
+ {
+ 'enabled' => true,
+ 'connection' => {
+ 'endpoint' => host
+ }
+ }
+ }
+ end
+
+ before do
+ allow(Settings).to receive(:external_diffs).and_return(enabled_object_storage_setting)
+ end
+
+ context 'when allow_object_storage is true' do
+ subject { described_class.validate!(import_url, allow_object_storage: true) }
+
+ context 'with a local domain name' do
+ let(:host) { 'http://review-minio-svc.svc:9000' }
+
+ before do
+ stub_dns(host, ip_address: '127.0.0.1')
+ end
+
+ it_behaves_like 'validates URI and hostname' do
+ let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
+ let(:expected_hostname) { 'review-minio-svc.svc' }
+ end
+ end
+
+ context 'with an IP address' do
+ let(:host) { 'http://127.0.0.1:9000' }
+
+ it_behaves_like 'validates URI and hostname' do
+ let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
+ let(:expected_hostname) { nil }
+ end
+ end
+ end
+
+ context 'when allow_object_storage is false' do
+ context 'with a local domain name' do
+ let(:host) { 'http://review-minio-svc.svc:9000' }
+
+ before do
+ stub_dns(host, ip_address: '127.0.0.1')
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::BlockedUrlError)
+ end
+ end
+
+ context 'with an IP address' do
+ let(:host) { 'http://127.0.0.1:9000' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::BlockedUrlError)
+ end
+ end
+ end
+ end
+
context 'when the URL hostname is a domain' do
context 'when domain can be resolved' do
let(:import_url) { 'https://example.org' }
@@ -299,6 +366,21 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
]
end
+ let(:limited_broadcast_address_variants) do
+ [
+ '255.255.255.255', # "normal" dotted decimal
+ '0377.0377.0377.0377', # Octal
+ '0377.00000000377.00377.0000377', # Still octal
+ '0xff.0xff.0xff.0xff', # hex
+ '0xffffffff', # still hex
+ '0xBaaaaaaaaaaaaaaaaffffffff', # padded hex
+ '255.255.255.255:65535', # with a port
+ '4294967295', # as an integer / dword
+ '[::ffff:ffff:ffff]', # short IPv6
+ '[0000:0000:0000:0000:0000:ffff:ffff:ffff]' # long IPv6
+ ]
+ end
+
let(:fake_domain) { 'www.fakedomain.fake' }
shared_examples 'allows local requests' do |url_blocker_attributes|
@@ -336,6 +418,12 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a864]', **url_blocker_attributes)
expect(described_class).not_to be_blocked_url('http://[fe80::c800:eff:fe74:8]', **url_blocker_attributes)
end
+
+ it 'allows limited broadcast address 255.255.255.255 and variants' do
+ limited_broadcast_address_variants.each do |variant|
+ expect(described_class).not_to be_blocked_url("https://#{variant}", **url_blocker_attributes), "Expected #{variant} to be allowed"
+ end
+ end
end
context 'true (default)' do
@@ -368,6 +456,17 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url('http://[fe80::c800:eff:fe74:8]', allow_local_network: false)
end
+ it 'blocks limited broadcast address 255.255.255.255 and variants' do
+ # Raise BlockedUrlError for invalid URLs.
+ # The padded hex version, for example, is a valid URL on Mac but
+ # not on Ubuntu.
+ stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
+
+ limited_broadcast_address_variants.each do |variant|
+ expect(described_class).to be_blocked_url("https://#{variant}", allow_local_network: false), "Expected #{variant} to be blocked"
+ end
+ end
+
context 'when local domain/IP is allowed' do
let(:url_blocker_attributes) do
{
@@ -394,6 +493,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
'::ffff:169.254.168.100',
'::ffff:a9fe:a864',
'fe80::c800:eff:fe74:8',
+ '255.255.255.255',
# garbage IPs
'45645632345',
@@ -415,6 +515,10 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url(url, **attrs)
end
end
+
+ it 'allows the limited broadcast address 255.255.255.255' do
+ expect(described_class).not_to be_blocked_url('http://255.255.255.255', **url_blocker_attributes)
+ end
end
context 'with domains in allowlist' do
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index a22b3a733bd..1127d1cd477 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -50,6 +50,28 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
expect { described_class.definitions }.not_to raise_error
end
+ describe 'not_removed' do
+ let(:all_definitions) do
+ metrics_definitions = [
+ { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
+ { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' }
+ ]
+ metrics_definitions.map { |definition| described_class.new(definition[:key_path], definition.symbolize_keys) }
+ end
+
+ before do
+ allow(described_class).to receive(:all).and_return(all_definitions)
+ end
+
+ it 'includes metrics that are not removed' do
+ expect(described_class.not_removed.count).to eq(3)
+
+ expect(described_class.not_removed.keys).to match_array(%w(metric1 metric2 metric3))
+ end
+ end
+
describe '#with_instrumentation_class' do
let(:metric_status) { 'active' }
let(:all_definitions) do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb
new file mode 100644
index 00000000000..09cc6ae71d4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CertBasedClustersFfMetric do
+ context 'with FF enabled' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'database' } do
+ let(:expected_value) { true }
+ end
+ end
+
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'database' } do
+ let(:expected_value) { false }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index 4d84423cde4..ea5ae1970de 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -36,6 +36,28 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(nil)
end
+ context 'with metric options specified with custom batch_size' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.metric_options { { batch_size: 12345 } }
+ end.new(time_frame: 'all')
+ end
+
+ it 'calls metric with customized batch_size' do
+ expect(subject).to receive(:count).with(any_args, hash_including(batch_size: 12345, start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+ end
+
context 'with start and finish not called' do
subject do
described_class.tap do |metric_class|
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index 0f95da74ff9..f81ad9b193d 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -27,8 +27,8 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
context 'for count with default column metrics' do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with count(Board)
- let(:key_path) { 'counts.boards' }
- let(:name_suggestion) { /count_boards/ }
+ let(:key_path) { 'counts.issues' }
+ let(:name_suggestion) { /count_issues/ }
end
end
diff --git a/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
new file mode 100644
index 00000000000..76548483cfa
--- /dev/null
+++ b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::ServicePing::InstrumentedPayload do
+ let(:uuid) { "0000-0000-0000" }
+
+ before do
+ allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
+ allow(Gitlab::CurrentSettings).to receive(:uuid).and_return(uuid)
+ end
+
+ context 'when building service ping with values' do
+ let(:metrics_key_paths) { %w(counts.boards uuid redis_hll_counters.search.i_search_total_monthly) }
+ let(:expected_payload) do
+ {
+ counts: { boards: 0 },
+ redis_hll_counters: { search: { i_search_total_monthly: 0 } },
+ uuid: uuid
+ }
+ end
+
+ it 'builds the service ping payload for the metrics key_paths' do
+ expect(described_class.new(metrics_key_paths, :with_value).build).to eq(expected_payload)
+ end
+ end
+
+ context 'when building service ping with instrumentations' do
+ let(:metrics_key_paths) { %w(counts.boards uuid redis_hll_counters.search.i_search_total_monthly) }
+ let(:expected_payload) do
+ {
+ counts: { boards: "SELECT COUNT(\"boards\".\"id\") FROM \"boards\"" },
+ redis_hll_counters: { search: { i_search_total_monthly: 0 } },
+ uuid: uuid
+ }
+ end
+
+ it 'builds the service ping payload for the metrics key_paths' do
+ expect(described_class.new(metrics_key_paths, :with_instrumentation).build).to eq(expected_payload)
+ end
+ end
+
+ context 'when missing instrumentation class' do
+ it 'returns empty hash' do
+ expect(described_class.new(['counts.ci_builds'], :with_instrumentation).build).to eq({})
+ expect(described_class.new(['counts.ci_builds'], :with_value).build).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb b/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb
new file mode 100644
index 00000000000..dd4349b99df
--- /dev/null
+++ b/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::ServicePing::PayloadKeysProcessor do
+ context 'with an object metric' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, topology: { duration_d: 100 }, redis_hll_counters: { search: { i_search_total_monthly: 1 } } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology', 'redis_hll_counters.search.i_search_total_monthly'])
+ end
+ end
+
+ context 'with a missing metric definition' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, missing_definition: 1, topology: { duration_d: 100 } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology'])
+ end
+ end
+
+ context 'with array metric' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, settings: { collected_data_categories: ['standard'] }, topology: { duration_d: 100 } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology', 'settings.collected_data_categories'])
+ end
+ end
+
+ context 'missing_instrumented_metrics_key_paths' do
+ let(:payload) do
+ {
+ counts: { issues: 1, boards: 1 },
+ topology: { duration_d: 100 },
+ redis_hll_counters: { search: { i_search_total_monthly: 1 } }
+ }
+ end
+
+ let(:metrics_definitions) do
+ [
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'counts.issues'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'topology'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'redis_hll_counters.search.i_search_total_monthly'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'settings.collected_data_categories')
+ ]
+ end
+
+ before do
+ allow(::Gitlab::Usage::MetricDefinition).to receive(:with_instrumentation_class).and_return(metrics_definitions)
+ end
+
+ it 'returns the missing keys' do
+ expect(described_class.new(payload).missing_instrumented_metrics_key_paths).to match_array(['settings.collected_data_categories'])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index 9b9b24ad128..1f62ddd0bbb 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -3,66 +3,216 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_caching do
- let(:usage_data) { { uuid: "1111" } }
+ include UsageDataHelpers
- context 'for output: :all_metrics_values' do
- it 'generates the service ping' do
- expect(Gitlab::UsageData).to receive(:data)
+ let(:usage_data) { { uuid: "1111", counts: { issue: 0 } } }
- described_class.for(output: :all_metrics_values)
+ context 'when feature merge_service_ping_instrumented_metrics enabled' do
+ before do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: true)
+
+ allow_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor) do |instance|
+ allow(instance).to receive(:missing_key_paths).and_return([])
+ end
+
+ allow_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload) do |instance|
+ allow(instance).to receive(:build).and_return({})
+ end
end
- end
- context 'for output: :metrics_queries' do
- it 'generates the service ping' do
- expect(Gitlab::UsageDataQueries).to receive(:data)
+ context 'all_metrics_values' do
+ it 'generates the service ping when there are no missing values' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } })
+ end
- described_class.for(output: :metrics_queries)
+ it 'generates the service ping with the missing values' do
+ expect_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor, usage_data) do |instance|
+ expect(instance).to receive(:missing_instrumented_metrics_key_paths).and_return(['counts.boards'])
+ end
+
+ expect_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload, ['counts.boards'], :with_value) do |instance|
+ expect(instance).to receive(:build).and_return({ counts: { boards: 1 } })
+ end
+
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0, boards: 1 } })
+ end
end
- end
- context 'for output: :non_sql_metrics_values' do
- it 'generates the service ping' do
- expect(Gitlab::UsageDataNonSqlMetrics).to receive(:data)
+ context 'for output: :metrics_queries' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+
+ described_class.for(output: :metrics_queries)
+ end
+ end
+
+ context 'for output: :non_sql_metrics_values' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- described_class.for(output: :non_sql_metrics_values)
+ described_class.for(output: :non_sql_metrics_values)
+ end
+ end
+
+ context 'when using cached' do
+ context 'for cached: true' do
+ let(:new_usage_data) { { uuid: "1112" } }
+
+ it 'caches the values' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(usage_data)
+ end
+
+ it 'writes to cache and returns fresh data' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+ expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ end
+ end
+
+ context 'when no caching' do
+ let(:new_usage_data) { { uuid: "1112" } }
+
+ it 'returns fresh data' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ end
+ end
end
end
- context 'when using cached' do
- context 'for cached: true' do
- let(:new_usage_data) { { uuid: "1112" } }
+ context 'when feature merge_service_ping_instrumented_metrics disabled' do
+ before do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: false)
+ end
- it 'caches the values' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ context 'all_metrics_values' do
+ it 'generates the service ping when there are no missing values' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } })
+ end
+ end
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data)
+ context 'for output: :metrics_queries' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- expect(Rails.cache.fetch('usage_data')).to eq(usage_data)
+ described_class.for(output: :metrics_queries)
end
+ end
- it 'writes to cache and returns fresh data' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ context 'for output: :non_sql_metrics_values' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
- expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data)
+ described_class.for(output: :non_sql_metrics_values)
+ end
+ end
+ end
+
+ context 'cross test values against queries' do
+ # TODO: fix failing metrics https://gitlab.com/gitlab-org/gitlab/-/issues/353559
+ let(:failing_todo_metrics) do
+ ["counts.labels",
+ "counts.jira_imports_total_imported_issues_count",
+ "counts.in_product_marketing_email_create_0_sent",
+ "counts.in_product_marketing_email_create_0_cta_clicked",
+ "counts.in_product_marketing_email_create_1_sent",
+ "counts.in_product_marketing_email_create_1_cta_clicked",
+ "counts.in_product_marketing_email_create_2_sent",
+ "counts.in_product_marketing_email_create_2_cta_clicked",
+ "counts.in_product_marketing_email_verify_0_sent",
+ "counts.in_product_marketing_email_verify_0_cta_clicked",
+ "counts.in_product_marketing_email_verify_1_sent",
+ "counts.in_product_marketing_email_verify_1_cta_clicked",
+ "counts.in_product_marketing_email_verify_2_sent",
+ "counts.in_product_marketing_email_verify_2_cta_clicked",
+ "counts.in_product_marketing_email_trial_0_sent",
+ "counts.in_product_marketing_email_trial_0_cta_clicked",
+ "counts.in_product_marketing_email_trial_1_sent",
+ "counts.in_product_marketing_email_trial_1_cta_clicked",
+ "counts.in_product_marketing_email_trial_2_sent",
+ "counts.in_product_marketing_email_trial_2_cta_clicked",
+ "counts.in_product_marketing_email_team_0_sent",
+ "counts.in_product_marketing_email_team_0_cta_clicked",
+ "counts.in_product_marketing_email_team_1_sent",
+ "counts.in_product_marketing_email_team_1_cta_clicked",
+ "counts.in_product_marketing_email_team_2_sent",
+ "counts.in_product_marketing_email_team_2_cta_clicked",
+ "counts.in_product_marketing_email_experience_0_sent",
+ "counts.in_product_marketing_email_team_short_0_sent",
+ "counts.in_product_marketing_email_team_short_0_cta_clicked",
+ "counts.in_product_marketing_email_trial_short_0_sent",
+ "counts.in_product_marketing_email_trial_short_0_cta_clicked",
+ "counts.in_product_marketing_email_admin_verify_0_sent",
+ "counts.in_product_marketing_email_admin_verify_0_cta_clicked",
+ "counts.ldap_users",
+ "usage_activity_by_stage.create.projects_with_sectional_code_owner_rules",
+ "usage_activity_by_stage.monitor.clusters_integrations_prometheus",
+ "usage_activity_by_stage.monitor.projects_with_enabled_alert_integrations_histogram",
+ "usage_activity_by_stage_monthly.create.projects_with_sectional_code_owner_rules",
+ "usage_activity_by_stage_monthly.monitor.clusters_integrations_prometheus"]
+ end
- expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ def fetch_value_by_query(query)
+ # Because test cases are run inside a transaction, if any query raise and error all queries that follows
+ # it are automatically canceled by PostgreSQL, to avoid that problem, and to provide exhaustive information
+ # about every metric, queries are wrapped explicitly in sub transactions.
+ ApplicationRecord.transaction do
+ ApplicationRecord.connection.execute(query)&.first&.values&.first
end
+ rescue ActiveRecord::StatementInvalid => e
+ e.message
+ end
+
+ def build_payload_from_queries(payload, accumulator = [], key_path = [])
+ payload.each do |key, value|
+ if value.is_a?(Hash)
+ build_payload_from_queries(value, accumulator, key_path.dup << key)
+ elsif value.is_a?(String) && /SELECT .* FROM.*/ =~ value
+ accumulator << [key_path.dup << key, value, fetch_value_by_query(value)]
+ end
+ end
+ accumulator
+ end
+
+ before do
+ stub_usage_data_connections
+ stub_object_store_settings
+ stub_prometheus_queries
+ memoized_constatns = Gitlab::UsageData::CE_MEMOIZED_VALUES
+ memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
+ memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
+ stub_database_flavor_check('Cloud SQL for PostgreSQL')
end
- context 'when no caching' do
- let(:new_usage_data) { { uuid: "1112" } }
+ let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
+ let(:metrics_queries_with_values) { build_payload_from_queries(described_class.for(output: :metrics_queries)) }
- it 'returns fresh data' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ it 'generates queries that match collected data', :aggregate_failures do
+ message = "Expected %{query} result to match %{value} for %{key_path} metric"
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+ metrics_queries_with_values.each do |key_path, query, value|
+ next if failing_todo_metrics.include?(key_path.join('.'))
- expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ expect(value).to(
+ eq(service_ping_payload.dig(*key_path)),
+ message % { query: query, value: (value || 'NULL'), key_path: key_path.join('.') }
+ )
end
end
end
diff --git a/spec/lib/gitlab/usage_counters/pod_logs_spec.rb b/spec/lib/gitlab/usage_counters/pod_logs_spec.rb
new file mode 100644
index 00000000000..1059c519b19
--- /dev/null
+++ b/spec/lib/gitlab/usage_counters/pod_logs_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageCounters::PodLogs, :clean_gitlab_redis_shared_state do
+ it_behaves_like 'a usage counter'
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 5e74ea3293c..77cf94daa3f 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -50,7 +50,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'importer',
'network_policies',
'geo',
- 'growth'
+ 'growth',
+ 'work_items',
+ 'ci_users',
+ 'error_tracking'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb
new file mode 100644
index 00000000000..ca6df5b260f
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::ServiceUsageDataCounter do
+ it_behaves_like 'a redis usage counter', 'Service Usage Data', :download_payload_click
+end
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..4561d898479
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user) { build(:user, id: 1) }
+
+ shared_examples 'counter that does not track the event' do
+ it 'does not track the event' do
+ expect { 3.times { track_event } }.to not_change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ end
+ end
+
+ shared_examples 'work item unique counter' do
+ context 'when track_work_items_activity FF is enabled' do
+ it 'tracks a unique event only once' do
+ expect { 3.times { track_event } }.to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }.by(1)
+ end
+
+ context 'when author is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ context 'when track_work_items_activity FF is disabled' do
+ before do
+ stub_feature_flags(track_work_items_activity: false)
+ end
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ describe '.track_work_item_created_action' do
+ subject(:track_event) { described_class.track_work_item_created_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_CREATED }
+
+ it_behaves_like 'work item unique counter'
+ end
+
+ describe '.track_work_item_title_changed_action' do
+ subject(:track_event) { described_class.track_work_item_title_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_TITLE_CHANGED }
+
+ it_behaves_like 'work item unique counter'
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index a8cf87d9364..c3ac9d7db90 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -45,6 +45,19 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
+ describe '.alt_usage_data' do
+ subject(:alt_usage_data) { described_class.alt_usage_data { 42 } }
+
+ it 'returns value when used with value' do
+ expect(described_class.alt_usage_data(1))
+ .to eq(alt_usage_data_value: 1)
+ end
+
+ it 'returns a stringified block for alt_usage_data with a block' do
+ expect(alt_usage_data[:alt_usage_data_block]).to start_with('#<Proc:')
+ end
+ end
+
describe '.sum' do
it 'returns the raw SQL' do
expect(described_class.sum(Issue, :weight)).to eq('SELECT SUM("issues"."weight") FROM "issues"')
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index bea07dd9c43..958df7baf72 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -507,6 +507,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'gathers usage counts', :aggregate_failures do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: false)
+
count_data = subject[:counts]
expect(count_data[:boards]).to eq(1)
expect(count_data[:projects]).to eq(4)
@@ -1098,6 +1100,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
+ it 'reports status of the certificate_based_clusters feature flag as true' do
+ expect(subject[:settings][:certificate_based_clusters_ff]).to eq(true)
+ end
+
+ context 'with certificate_based_clusters disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'reports status of the certificate_based_clusters feature flag as false' do
+ expect(subject[:settings][:certificate_based_clusters_ff]).to eq(false)
+ end
+ end
+
context 'snowplow stats' do
before do
stub_feature_flags(usage_data_instrumentation: false)
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
index d9fa2e516e1..5350e090e2b 100644
--- a/spec/lib/gitlab/utils/strong_memoize_spec.rb
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -48,6 +48,36 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
let(:value) { value }
it_behaves_like 'caching the value'
+
+ it 'raises exception for invalid key' do
+ expect { object.strong_memoize(10) { 20 } }.to raise_error /Invalid type of '10'/
+ end
+ end
+ end
+
+ context "memory allocation", type: :benchmark do
+ let(:value) { 'aaa' }
+
+ before do
+ object.method_name # warmup
+ end
+
+ [:method_name, "method_name"].each do |argument|
+ context "for #{argument.class}" do
+ it 'does allocate exactly one string when fetching value' do
+ expect do
+ object.strong_memoize(argument) { 10 }
+ end.to perform_allocation(1)
+ end
+
+ it 'does allocate exactly one string when storing value' do
+ object.clear_memoization(:method_name) # clear to force set
+
+ expect do
+ object.strong_memoize(argument) { 10 }
+ end.to perform_allocation(1)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index ba6997adbf6..6b12fb4a84a 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Utils do
delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
:ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, to: :described_class
+ :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class
describe '.check_path_traversal!' do
it 'detects path traversal in string without any separators' do
@@ -53,11 +53,80 @@ RSpec.describe Gitlab::Utils do
expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
end
+ it 'logs potential path traversal attempts' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: "Potential path traversal attempt detected", path: "..")
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'logs does nothing for a safe string' do
+ expect(Gitlab::AppLogger).not_to receive(:warn).with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
it 'does nothing for a non-string' do
expect(check_path_traversal!(nil)).to be_nil
end
end
+ describe '.check_allowed_absolute_path_and_path_traversal!' do
+ let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
+
+ it 'detects path traversal in string without any separators' do
+ expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'does not return errors for a safe string' do
+ expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
+ end
+
+ it 'raises error for a non-string' do
+ expect {check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths)}.to raise_error(StandardError)
+ end
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+
describe '.allowlisted?' do
let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd']}
diff --git a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
index 3152dc2ad2f..c0629c8d795 100644
--- a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
+++ b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
@@ -3,11 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::WikiPages::FrontMatterParser do
- subject(:parser) { described_class.new(raw_content, gate) }
+ subject(:parser) { described_class.new(raw_content) }
let(:content) { 'This is the content' }
let(:end_divider) { '---' }
- let(:gate) { stub_feature_flag_gate('Gate') }
let(:with_front_matter) do
<<~MD
@@ -62,32 +61,6 @@ RSpec.describe Gitlab::WikiPages::FrontMatterParser do
it { is_expected.to have_attributes(reason: :no_match) }
end
- context 'the feature flag is disabled' do
- let(:raw_content) { with_front_matter }
-
- before do
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
- end
-
- it { is_expected.to have_attributes(front_matter: be_empty, content: raw_content) }
- end
-
- context 'the feature flag is enabled for the gated object' do
- let(:raw_content) { with_front_matter }
-
- before do
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => gate)
- end
-
- it do
- is_expected.to have_attributes(
- front_matter: have_correct_front_matter,
- content: content + "\n",
- reason: be_nil
- )
- end
- end
-
context 'the end divider is ...' do
let(:end_divider) { '...' }
let(:raw_content) { with_front_matter }
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 57a4bdc9bb5..c44bb64a5c0 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -80,34 +80,53 @@ RSpec.describe Gitlab do
end
describe '.com?' do
- it "is true when on #{Gitlab::Saas.com_url}" do
- stub_config_setting(url: Gitlab::Saas.com_url)
+ context 'when not simulating SaaS' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
- expect(described_class.com?).to eq true
- end
+ it "is true when on #{Gitlab::Saas.com_url}" do
+ stub_config_setting(url: Gitlab::Saas.com_url)
- it "is true when on #{Gitlab::Saas.staging_com_url}" do
- stub_config_setting(url: Gitlab::Saas.staging_com_url)
+ expect(described_class.com?).to eq true
+ end
- expect(described_class.com?).to eq true
- end
+ it "is true when on #{Gitlab::Saas.staging_com_url}" do
+ stub_config_setting(url: Gitlab::Saas.staging_com_url)
- it 'is true when on other gitlab subdomain' do
- url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://example.')
- stub_config_setting(url: url_with_subdomain)
+ expect(described_class.com?).to eq true
+ end
- expect(described_class.com?).to eq true
+ it 'is true when on other gitlab subdomain' do
+ url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://example.')
+ stub_config_setting(url: url_with_subdomain)
+
+ expect(described_class.com?).to eq true
+ end
+
+ it 'is true when on other gitlab subdomain with hyphen' do
+ url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://test-example.')
+ stub_config_setting(url: url_with_subdomain)
+
+ expect(described_class.com?).to eq true
+ end
+
+ it 'is false when not on GitLab.com' do
+ stub_config_setting(url: 'http://example.com')
+
+ expect(described_class.com?).to eq false
+ end
end
- it 'is true when on other gitlab subdomain with hyphen' do
- url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://test-example.')
- stub_config_setting(url: url_with_subdomain)
+ it 'is true when GITLAB_SIMULATE_SAAS is true and in development' do
+ stub_rails_env('development')
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
expect(described_class.com?).to eq true
end
- it 'is false when not on GitLab.com' do
- stub_config_setting(url: 'http://example.com')
+ it 'is false when GITLAB_SIMULATE_SAAS is true and in test' do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
expect(described_class.com?).to eq false
end
@@ -197,51 +216,71 @@ RSpec.describe Gitlab do
end
end
- describe '.dev_env_org_or_com?' do
+ describe '.org_or_com?' do
it 'is true when on .com' do
allow(described_class).to receive_messages(com?: true, org?: false)
- expect(described_class.dev_env_org_or_com?).to eq true
+ expect(described_class.org_or_com?).to eq true
end
it 'is true when org' do
allow(described_class).to receive_messages(com?: false, org?: true)
- expect(described_class.dev_env_org_or_com?).to eq true
- end
-
- it 'is true when dev env' do
- allow(described_class).to receive_messages(com?: false, org?: false)
- stub_rails_env('development')
-
- expect(described_class.dev_env_org_or_com?).to eq true
+ expect(described_class.org_or_com?).to eq true
end
it 'is false when not dev, org or com' do
allow(described_class).to receive_messages(com?: false, org?: false)
- expect(described_class.dev_env_org_or_com?).to eq false
+ expect(described_class.org_or_com?).to eq false
end
end
- describe '.dev_env_or_com?' do
- it 'is true when on .com' do
- allow(described_class).to receive(:com?).and_return(true)
+ describe '.simulate_com?' do
+ subject { described_class.simulate_com? }
- expect(described_class.dev_env_or_com?).to eq true
- end
+ context 'when GITLAB_SIMULATE_SAAS is true' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
+ end
- it 'is true when dev env' do
- allow(described_class).to receive(:com?).and_return(false)
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is true when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq true
+ end
+
+ it 'is false when env is not dev' do
+ stub_rails_env('production')
- expect(described_class.dev_env_or_com?).to eq true
+ expect(subject).to eq false
+ end
end
- it 'is false when not dev or com' do
- allow(described_class).to receive(:com?).and_return(false)
+ context 'when GITLAB_SIMULATE_SAAS is false' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
+
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is false when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq false
+ end
+
+ it 'is false when env is not dev or test' do
+ stub_rails_env('production')
- expect(described_class.dev_env_or_com?).to eq false
+ expect(subject).to eq false
+ end
end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 29e5445cfaa..a81ed38382b 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -334,4 +334,20 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
is_expected.to eq(operation)
end
end
+
+ describe '#revoke_authorizations' do
+ subject { client.revoke_authorizations }
+
+ it 'calls the revoke endpoint' do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .with(
+ body: "token=token",
+ headers: {
+ 'Accept' => '*/*',
+ 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
+ 'User-Agent' => 'Ruby'
+ })
+ .to_return(status: 200, body: "", headers: {})
+ end
+ end
end
diff --git a/spec/lib/learn_gitlab/onboarding_spec.rb b/spec/lib/learn_gitlab/onboarding_spec.rb
index 6b4be65f3b2..8c7284ed7f5 100644
--- a/spec/lib/learn_gitlab/onboarding_spec.rb
+++ b/spec/lib/learn_gitlab/onboarding_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe LearnGitlab::Onboarding do
let(:namespace) { build(:namespace) }
let_it_be(:tracked_action_columns) do
- tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_DOC_URLS.keys
+ tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_PATHS
tracked_actions.map { |key| OnboardingProgress.column_name(key) }
end
diff --git a/spec/lib/learn_gitlab/project_spec.rb b/spec/lib/learn_gitlab/project_spec.rb
index 5d649740c65..23784709817 100644
--- a/spec/lib/learn_gitlab/project_spec.rb
+++ b/spec/lib/learn_gitlab/project_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe LearnGitlab::Project do
let_it_be(:current_user) { create(:user) }
let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME) }
- let_it_be(:learn_gitlab_ultimate_trial_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL) }
let_it_be(:learn_gitlab_board) { create(:board, project: learn_gitlab_project, name: LearnGitlab::Project::BOARD_NAME) }
let_it_be(:learn_gitlab_label) { create(:label, project: learn_gitlab_project, name: LearnGitlab::Project::LABEL_NAME) }
@@ -48,7 +47,7 @@ RSpec.describe LearnGitlab::Project do
it { is_expected.to eq learn_gitlab_project }
context 'when it is created during trial signup' do
- let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL) }
+ let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL, path: 'learn-gitlab-ultimate-trial') }
it { is_expected.to eq learn_gitlab_project }
end
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index c89f6a21b35..7bc15f40065 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -119,16 +119,4 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
)
)
end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include db_config_name field' do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
-
- expect(subject.results[:details][0][:db_config_name]).to be_nil
- end
- end
end
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index 6f702e51b73..efb8b0b9984 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
],
'pipeline' =>
[
@@ -19,7 +19,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'new_registry' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'new_registry' }
],
'pipeline' =>
[
@@ -164,7 +164,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
],
'pipeline' =>
[
@@ -275,7 +275,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => '' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => '' }
] }
end
diff --git a/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
index 4c459058368..7b2a0d22918 100644
--- a/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
@@ -7,12 +7,13 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
let(:params) { {} }
- context 'with existing .gitlab-ci.yml' do
- let(:auto_devops_enabled) { false }
+ shared_examples 'existing .gitlab-ci.yml tests' do
+ context 'with existing .gitlab-ci.yml' do
+ let(:auto_devops_enabled) { false }
- context 'sast iac has not been included' do
- let(:expected_yml) do
- <<-CI_YML.strip_heredoc
+ context 'sast iac has not been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
@@ -28,39 +29,39 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
include:
- template: existing.yml
- template: Security/SAST-IaC.latest.gitlab-ci.yml
- CI_YML
- end
-
- context 'template includes are an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test security),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => [{ "template" => "existing.yml" }] }
+ CI_YML
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
- end
- end
-
- context 'template include is not an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test security),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => { "template" => "existing.yml" } }
+ context 'template includes are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "existing.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
+ context 'template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "existing.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
end
- end
- context 'secret_detection has been included' do
- let(:expected_yml) do
- <<-CI_YML.strip_heredoc
+ context 'secret_detection has been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
@@ -74,37 +75,50 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
RANDOM: make sure this persists
include:
- template: Security/SAST-IaC.latest.gitlab-ci.yml
- CI_YML
- end
-
- context 'secret_detection template include are an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => [{ "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" }] }
+ CI_YML
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
- end
- end
-
- context 'secret_detection template include is not an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => { "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" } }
+ context 'secret_detection template include are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
+ context 'secret_detection template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
end
end
end
+ context 'with existing .gitlab-ci.yml and when the ci config file configuration was not set' do
+ subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content).generate }
+
+ it_behaves_like 'existing .gitlab-ci.yml tests'
+ end
+
+ context 'with existing .gitlab-ci.yml and when the ci config file configuration was deleted' do
+ subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content, ci_config_path: '').generate }
+
+ it_behaves_like 'existing .gitlab-ci.yml tests'
+ end
+
context 'with no .gitlab-ci.yml' do
let(:gitlab_ci_content) { nil }
diff --git a/spec/lib/serializers/unsafe_json_spec.rb b/spec/lib/serializers/unsafe_json_spec.rb
new file mode 100644
index 00000000000..9bf04f8f4aa
--- /dev/null
+++ b/spec/lib/serializers/unsafe_json_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'oj'
+
+RSpec.describe Serializers::UnsafeJson do
+ let(:result) { double(:result) }
+
+ describe '.dump' do
+ let(:obj) { { key: "value" } }
+
+ it 'calls object#to_json with unsafe: true and returns the result' do
+ expect(obj).to receive(:to_json).with(unsafe: true).and_return(result)
+ expect(described_class.dump(obj)).to eq(result)
+ end
+ end
+
+ describe '.load' do
+ let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
+ let(:data_hash) { Gitlab::Json.parse(data_string) }
+
+ it 'calls JSON.load and returns the result' do
+ expect(JSON).to receive(:load).with(data_hash).and_return(result)
+ expect(described_class.load(data_hash)).to eq(result)
+ end
+ end
+end
diff --git a/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb b/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb
deleted file mode 100644
index 2120341bf23..00000000000
--- a/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Concerns::WorkItemHierarchy do
- shared_examples 'hierarchy menu' do
- let(:item_id) { :hierarchy }
- specify { is_expected.not_to be_nil }
- end
-
- describe 'Project hierarchy menu item' do
- let_it_be_with_reload(:project) { create(:project, :repository) }
-
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { Sidebars::Projects::Menus::ProjectInformationMenu.new(context).renderable_items.index { |e| e.item_id == item_id } }
-
- it_behaves_like 'hierarchy menu'
- end
-end
diff --git a/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
index 76e58367c9d..36d5b3376b7 100644
--- a/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
@@ -28,5 +28,15 @@ RSpec.describe Sidebars::Groups::Menus::KubernetesMenu do
expect(menu.render?).to eq false
end
end
+
+ context ':certificate_based_clusters feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'returns false' do
+ expect(menu.render?).to eq false
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
index bc1fa3e88ff..d3cb18222b5 100644
--- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when menu does not have any menu item to show' do
it 'returns false' do
+ stub_feature_flags(harbor_registry_integration: false)
stub_container_registry_config(enabled: false)
stub_config(packages: { enabled: false })
stub_config(dependency_proxy: { enabled: false })
@@ -35,11 +36,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
describe '#link' do
let(:registry_enabled) { true }
let(:packages_enabled) { true }
+ let(:harbor_registry_integration) { true }
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
stub_config(dependency_proxy: { enabled: true })
+ stub_feature_flags(harbor_registry_integration: harbor_registry_integration)
end
subject { menu.link }
@@ -60,8 +63,16 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when Container Registry is not visible' do
let(:registry_enabled) { false }
- it 'menu link points to Dependency Proxy page' do
- expect(subject).to eq find_menu(menu, :dependency_proxy).link
+ it 'menu link points to Harbor Registry page' do
+ expect(subject).to eq find_menu(menu, :harbor_registry).link
+ end
+
+ context 'when Harbor Registry is not visible' do
+ let(:harbor_registry_integration) { false }
+
+ it 'menu link points to Dependency Proxy page' do
+ expect(subject).to eq find_menu(menu, :dependency_proxy).link
+ end
end
end
end
@@ -175,6 +186,26 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
it_behaves_like 'the menu entry is not available'
end
end
+
+ describe 'Harbor Registry' do
+ let(:item_id) { :harbor_registry }
+
+ before do
+ stub_feature_flags(harbor_registry_integration: harbor_registry_enabled)
+ end
+
+ context 'when config harbor registry setting is disabled' do
+ let(:harbor_registry_enabled) { false }
+
+ it_behaves_like 'the menu entry is not available'
+ end
+
+ context 'when config harbor registry setting is enabled' do
+ let(:harbor_registry_enabled) { true }
+
+ it_behaves_like 'the menu entry is available'
+ end
+ end
end
private
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
index 252da8ea699..71b696516b6 100644
--- a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -72,6 +72,18 @@ RSpec.describe Sidebars::Groups::Menus::SettingsMenu do
let(:item_id) { :ci_cd }
it_behaves_like 'access rights checks'
+
+ describe 'when runner list group view is disabled' do
+ before do
+ stub_feature_flags(runner_list_group_view_vue_ui: false)
+ end
+
+ it_behaves_like 'access rights checks'
+
+ it 'has group runners as active_routes' do
+ expect(subject.active_routes[:path]).to match_array %w[ci_cd#show groups/runners#show groups/runners#edit]
+ end
+ end
end
describe 'Applications menu' do
diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
index 0e415ec6014..8a6b0e4e95d 100644
--- a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
@@ -92,6 +92,14 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do
let(:item_id) { :serverless }
it_behaves_like 'access rights checks'
+
+ context 'when feature :deprecated_serverless is disabled' do
+ before do
+ stub_feature_flags(deprecated_serverless: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
end
describe 'Terraform' do
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index afe0b2a8951..9b78fc807bf 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
+ stub_feature_flags(harbor_registry_integration: false)
end
context 'when Packages Registry is visible' do
@@ -144,5 +145,25 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
end
end
end
+
+ describe 'Harbor Registry' do
+ let(:item_id) { :harbor_registry }
+
+ context 'when config harbor registry setting is disabled' do
+ it 'does not add the menu item to the list' do
+ stub_feature_flags(harbor_registry_integration: false)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config harbor registry setting is enabled' do
+ it 'the menu item is added to list of menu items' do
+ stub_feature_flags(harbor_registry_integration: true)
+
+ is_expected.not_to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
index 24625413ded..7ff06ac229e 100644
--- a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
@@ -59,11 +59,5 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
specify { is_expected.to be_nil }
end
end
-
- describe 'Hierarchy' do
- let(:item_id) { :hierarchy }
-
- specify { is_expected.not_to be_nil }
- end
end
end