Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 21:18:33 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 21:18:33 +0300
commitf64a639bcfa1fc2bc89ca7db268f594306edfd7c (patch)
treea2c3c2ebcc3b45e596949db485d6ed18ffaacfa1 /spec/lib
parentbfbc3e0d6583ea1a91f627528bedc3d65ba4b10f (diff)
Add latest changes from gitlab-org/gitlab@13-10-stable-eev13.10.0-rc40
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/plan_limit_spec.rb24
-rw-r--r--spec/lib/api/entities/projects/repository_storage_move_spec.rb (renamed from spec/lib/api/entities/project_repository_storage_move_spec.rb)2
-rw-r--r--spec/lib/api/entities/public_group_details_spec.rb24
-rw-r--r--spec/lib/api/entities/snippets/repository_storage_move_spec.rb (renamed from spec/lib/api/entities/snippet_repository_storage_move_spec.rb)2
-rw-r--r--spec/lib/backup/repositories_spec.rb10
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb29
-rw-r--r--spec/lib/banzai/filter/emoji_filter_spec.rb27
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb28
-rw-r--r--spec/lib/banzai/filter/video_link_filter_spec.rb1
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb7
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb80
-rw-r--r--spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb30
-rw-r--r--spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb6
-rw-r--r--spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb (renamed from spec/lib/bulk_imports/common/transformers/award_emoji_transformer_spec.rb)25
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb21
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb35
-rw-r--r--spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb30
-rw-r--r--spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb42
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb41
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb32
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb151
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb4
-rw-r--r--spec/lib/bulk_imports/importers/group_importer_spec.rb3
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb162
-rw-r--r--spec/lib/bulk_imports/pipeline_spec.rb114
-rw-r--r--spec/lib/error_tracking/sentry_client/api_urls_spec.rb (renamed from spec/lib/sentry/api_urls_spec.rb)4
-rw-r--r--spec/lib/error_tracking/sentry_client/event_spec.rb (renamed from spec/lib/sentry/client/event_spec.rb)2
-rw-r--r--spec/lib/error_tracking/sentry_client/issue_link_spec.rb (renamed from spec/lib/sentry/client/issue_link_spec.rb)2
-rw-r--r--spec/lib/error_tracking/sentry_client/issue_spec.rb (renamed from spec/lib/sentry/client/issue_spec.rb)10
-rw-r--r--spec/lib/error_tracking/sentry_client/pagination_parser_spec.rb (renamed from spec/lib/sentry/pagination_parser_spec.rb)2
-rw-r--r--spec/lib/error_tracking/sentry_client/projects_spec.rb (renamed from spec/lib/sentry/client/projects_spec.rb)6
-rw-r--r--spec/lib/error_tracking/sentry_client/repo_spec.rb (renamed from spec/lib/sentry/client/repo_spec.rb)4
-rw-r--r--spec/lib/error_tracking/sentry_client_spec.rb (renamed from spec/lib/sentry/client_spec.rb)4
-rw-r--r--spec/lib/expand_variables_spec.rb7
-rw-r--r--spec/lib/feature_spec.rb2
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb75
-rw-r--r--spec/lib/gitlab/alert_management/payload/generic_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb66
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb58
-rw-r--r--spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb (renamed from spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb)12
-rw-r--r--spec/lib/gitlab/application_context_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb17
-rw-r--r--spec/lib/gitlab/avatar_cache_spec.rb101
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb45
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb29
-rw-r--r--spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb99
-rw-r--r--spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb86
-rw-r--r--spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb149
-rw-r--r--spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb80
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb76
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/artifacts/metrics_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/build/cache_spec.rb105
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/context/global_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/policy/variables_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/charts_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb344
-rw-r--r--spec/lib/gitlab/ci/config/entry/environment_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb40
-rw-r--r--spec/lib/gitlab/ci/config/entry/need_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/entry/needs_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb229
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb287
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb282
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb56
-rw-r--r--spec/lib/gitlab/ci/reports/reports_comparer_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/status/factory_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb372
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/collection/item_spec.rb104
-rw-r--r--spec/lib/gitlab/ci/variables/collection/sort_spec.rb185
-rw-r--r--spec/lib/gitlab/ci/variables/collection/sorted_spec.rb259
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb386
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb263
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb33
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb8
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb8
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb50
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb160
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb70
-rw-r--r--spec/lib/gitlab/database/background_migration/scheduler_spec.rb182
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb36
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb28
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb116
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb68
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb31
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb108
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb16
-rw-r--r--spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/inline_diff_spec.rb11
-rw-r--r--spec/lib/gitlab/diff/pair_selector_spec.rb84
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb176
-rw-r--r--spec/lib/gitlab/error_tracking/log_formatter_spec.rb71
-rw-r--r--spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb45
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb219
-rw-r--r--spec/lib/gitlab/etag_caching/router/graphql_spec.rb50
-rw-r--r--spec/lib/gitlab/etag_caching/router/restful_spec.rb124
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb147
-rw-r--r--spec/lib/gitlab/etag_caching/store_spec.rb84
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb74
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb6
-rw-r--r--spec/lib/gitlab/git/push_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb87
-rw-r--r--spec/lib/gitlab/graphql/calls_gitaly/instrumentation_spec.rb23
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb234
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb38
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb143
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb56
-rw-r--r--spec/lib/gitlab/hook_data/project_member_builder_spec.rb58
-rw-r--r--spec/lib/gitlab/http_connection_adapter_spec.rb231
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/import_export_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb20
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/marker_range_spec.rb71
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb67
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb106
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb315
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb260
-rw-r--r--spec/lib/gitlab/optimistic_locking_spec.rb109
-rw-r--r--spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb188
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb420
-rw-r--r--spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb12
-rw-r--r--spec/lib/gitlab/query_limiting/transaction_spec.rb24
-rw-r--r--spec/lib/gitlab/query_limiting_spec.rb14
-rw-r--r--spec/lib/gitlab/regex_spec.rb29
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/client_spec.rb99
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb35
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb253
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb1
-rw-r--r--spec/lib/gitlab/string_range_marker_spec.rb2
-rw-r--r--spec/lib/gitlab/string_regex_marker_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb20
-rw-r--r--spec/lib/gitlab/tracking_spec.rb4
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb31
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/docs/renderer_spec.rb10
-rw-r--r--spec/lib/gitlab/usage/docs/value_formatter_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb243
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb67
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb17
-rw-r--r--spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb24
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb23
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb52
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb102
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb20
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb72
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb166
-rw-r--r--spec/lib/gitlab/visibility_level_spec.rb25
-rw-r--r--spec/lib/gitlab/word_diff/chunk_collection_spec.rb44
-rw-r--r--spec/lib/gitlab/word_diff/line_processor_spec.rb46
-rw-r--r--spec/lib/gitlab/word_diff/parser_spec.rb67
-rw-r--r--spec/lib/gitlab/word_diff/positions_counter_spec.rb35
-rw-r--r--spec/lib/gitlab/word_diff/segments/chunk_spec.rb53
-rw-r--r--spec/lib/gitlab/word_diff/segments/diff_hunk_spec.rb51
-rw-r--r--spec/lib/gitlab/word_diff/segments/newline_spec.rb13
-rw-r--r--spec/lib/gitlab/x509/signature_spec.rb122
-rw-r--r--spec/lib/marginalia_spec.rb83
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb11
-rw-r--r--spec/lib/pager_duty/webhook_payload_parser_spec.rb3
-rw-r--r--spec/lib/peek/views/active_record_spec.rb71
-rw-r--r--spec/lib/quality/test_level_spec.rb4
-rw-r--r--spec/lib/release_highlights/validator/entry_spec.rb19
-rw-r--r--spec/lib/release_highlights/validator_spec.rb5
-rw-r--r--spec/lib/rspec_flaky/config_spec.rb106
-rw-r--r--spec/lib/rspec_flaky/example_spec.rb92
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb165
-rw-r--r--spec/lib/rspec_flaky/flaky_examples_collection_spec.rb74
-rw-r--r--spec/lib/rspec_flaky/listener_spec.rb219
-rw-r--r--spec/lib/rspec_flaky/report_spec.rb129
-rw-r--r--spec/lib/system_check/sidekiq_check_spec.rb81
197 files changed, 9357 insertions, 3409 deletions
diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb
new file mode 100644
index 00000000000..ee42c67f9b6
--- /dev/null
+++ b/spec/lib/api/entities/plan_limit_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::PlanLimit do
+ let(:plan_limits) { create(:plan_limits) }
+
+ subject { described_class.new(plan_limits).as_json }
+
+ it 'exposes correct attributes' do
+ expect(subject).to include(
+ :conan_max_file_size,
+ :generic_packages_max_file_size,
+ :maven_max_file_size,
+ :npm_max_file_size,
+ :nuget_max_file_size,
+ :pypi_max_file_size
+ )
+ end
+
+ it 'does not expose id and plan_id' do
+ expect(subject).not_to include(:id, :plan_id)
+ end
+end
diff --git a/spec/lib/api/entities/project_repository_storage_move_spec.rb b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
index b0102dc376a..81f5d98b713 100644
--- a/spec/lib/api/entities/project_repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::ProjectRepositoryStorageMove do
+RSpec.describe API::Entities::Projects::RepositoryStorageMove do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/api/entities/public_group_details_spec.rb b/spec/lib/api/entities/public_group_details_spec.rb
new file mode 100644
index 00000000000..34162ed00ca
--- /dev/null
+++ b/spec/lib/api/entities/public_group_details_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::PublicGroupDetails do
+ subject(:entity) { described_class.new(group) }
+
+ let(:group) { create(:group, :with_avatar) }
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'includes public group fields' do
+ is_expected.to eq(
+ id: group.id,
+ name: group.name,
+ web_url: group.web_url,
+ avatar_url: group.avatar_url(only_path: false),
+ full_name: group.full_name,
+ full_path: group.full_path
+ )
+ end
+ end
+end
diff --git a/spec/lib/api/entities/snippet_repository_storage_move_spec.rb b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
index 8086be3ffa7..a848afbcff9 100644
--- a/spec/lib/api/entities/snippet_repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::SnippetRepositoryStorageMove do
+RSpec.describe API::Entities::Snippets::RepositoryStorageMove do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 492058c6a00..7a8cc713e4f 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -230,6 +230,16 @@ RSpec.describe Backup::Repositories do
expect(pool_repository).not_to be_failed
expect(pool_repository.object_pool.exists?).to be(true)
end
+
+ it 'skips pools with no source project, :sidekiq_might_not_need_inline' do
+ pool_repository = create(:pool_repository, state: :obsolete)
+ pool_repository.update_column(:source_project_id, nil)
+
+ subject.restore
+
+ pool_repository.reload
+ expect(pool_repository).to be_obsolete
+ end
end
it 'cleans existing repositories' do
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index ca8c9750e7f..5e76e8164dd 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter do
let_it_be(:custom_emoji) { create(:custom_emoji, name: 'tanuki', group: group) }
let_it_be(:custom_emoji2) { create(:custom_emoji, name: 'happy_tanuki', group: group, file: 'https://foo.bar/happy.png') }
+ it_behaves_like 'emoji filter' do
+ let(:emoji_name) { ':tanuki:' }
+ end
+
it 'replaces supported name custom emoji' do
doc = filter('<p>:tanuki:</p>', project: project)
@@ -17,25 +21,12 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter do
expect(doc.css('gl-emoji img').size).to eq 1
end
- it 'ignores non existent custom emoji' do
- exp = act = '<p>:foo:</p>'
- doc = filter(act)
-
- expect(doc.to_html).to match Regexp.escape(exp)
- end
-
it 'correctly uses the custom emoji URL' do
doc = filter('<p>:tanuki:</p>')
expect(doc.css('img').first.attributes['src'].value).to eq(custom_emoji.file)
end
- it 'matches with adjacent text' do
- doc = filter('tanuki (:tanuki:)')
-
- expect(doc.css('img').size).to eq 1
- end
-
it 'matches multiple same custom emoji' do
doc = filter(':tanuki: :tanuki:')
@@ -54,18 +45,6 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter do
expect(doc.css('img').size).to be 0
end
- it 'keeps whitespace intact' do
- doc = filter('This deserves a :tanuki:, big time.')
-
- expect(doc.to_html).to match(/^This deserves a <gl-emoji.+>, big time\.\z/)
- end
-
- it 'does not match emoji in a string' do
- doc = filter("'2a00:tanuki:100::1'")
-
- expect(doc.css('gl-emoji').size).to eq 0
- end
-
it 'does not do N+1 query' do
create(:custom_emoji, name: 'party-parrot', group: group)
diff --git a/spec/lib/banzai/filter/emoji_filter_spec.rb b/spec/lib/banzai/filter/emoji_filter_spec.rb
index 9005b4401b7..cb0b470eaa1 100644
--- a/spec/lib/banzai/filter/emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/emoji_filter_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::EmojiFilter do
include FilterSpecHelper
+ it_behaves_like 'emoji filter' do
+ let(:emoji_name) { ':+1:' }
+ end
+
it 'replaces supported name emoji' do
doc = filter('<p>:heart:</p>')
expect(doc.css('gl-emoji').first.text).to eq '❤'
@@ -15,12 +19,6 @@ RSpec.describe Banzai::Filter::EmojiFilter do
expect(doc.css('gl-emoji').first.text).to eq '❤'
end
- it 'ignores unsupported emoji' do
- exp = act = '<p>:foo:</p>'
- doc = filter(act)
- expect(doc.to_html).to match Regexp.escape(exp)
- end
-
it 'ignores unicode versions of trademark, copyright, and registered trademark' do
exp = act = '<p>™ © ®</p>'
doc = filter(act)
@@ -65,11 +63,6 @@ RSpec.describe Banzai::Filter::EmojiFilter do
expect(doc.css('gl-emoji').size).to eq 1
end
- it 'matches with adjacent text' do
- doc = filter('+1 (:+1:)')
- expect(doc.css('gl-emoji').size).to eq 1
- end
-
it 'unicode matches with adjacent text' do
doc = filter('+1 (👍)')
expect(doc.css('gl-emoji').size).to eq 1
@@ -90,12 +83,6 @@ RSpec.describe Banzai::Filter::EmojiFilter do
expect(doc.css('gl-emoji').size).to eq 6
end
- it 'does not match emoji in a string' do
- doc = filter("'2a00:a4c0:100::1'")
-
- expect(doc.css('gl-emoji').size).to eq 0
- end
-
it 'has a data-name attribute' do
doc = filter(':-1:')
expect(doc.css('gl-emoji').first.attr('data-name')).to eq 'thumbsdown'
@@ -106,12 +93,6 @@ RSpec.describe Banzai::Filter::EmojiFilter do
expect(doc.css('gl-emoji').first.attr('data-unicode-version')).to eq '6.0'
end
- it 'keeps whitespace intact' do
- doc = filter('This deserves a :+1:, big time.')
-
- expect(doc.to_html).to match(/^This deserves a <gl-emoji.+>, big time\.\z/)
- end
-
it 'unicode keeps whitespace intact' do
doc = filter('This deserves a 🎱, big time.')
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index f39b5280490..ec17bb26346 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Banzai::Filter::GollumTagsFilter do
path: 'images/image.jpg',
raw_data: '')
wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
- expect(wiki).to receive(:find_file).with('images/image.jpg').and_return(wiki_file)
+ expect(wiki).to receive(:find_file).with('images/image.jpg', load_content: false).and_return(wiki_file)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", wiki: wiki)
@@ -31,7 +31,7 @@ RSpec.describe Banzai::Filter::GollumTagsFilter do
end
it 'does not creates img tag if image does not exist' do
- expect(wiki).to receive(:find_file).with('images/image.jpg').and_return(nil)
+ expect(wiki).to receive(:find_file).with('images/image.jpg', load_content: false).and_return(nil)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", wiki: wiki)
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index bc4b60dfe60..f880fe06ce3 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -33,14 +33,14 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
end
it 'sanitizes `class` attribute from all elements' do
- act = %q{<pre class="code highlight white c"><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
- exp = %q{<pre><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>}
+ act = %q(<pre class="code highlight white c"><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>)
+ exp = %q(<pre><code>&lt;span class="k"&gt;def&lt;/span&gt;</code></pre>)
expect(filter(act).to_html).to eq exp
end
it 'sanitizes `class` attribute from non-highlight spans' do
- act = %q{<span class="k">def</span>}
- expect(filter(act).to_html).to eq %q{<span>def</span>}
+ act = %q(<span class="k">def</span>)
+ expect(filter(act).to_html).to eq %q(<span>def</span>)
end
it 'allows `text-align` property in `style` attribute on table elements' do
@@ -82,12 +82,12 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
end
it 'allows `span` elements' do
- exp = act = %q{<span>Hello</span>}
+ exp = act = %q(<span>Hello</span>)
expect(filter(act).to_html).to eq exp
end
it 'allows `abbr` elements' do
- exp = act = %q{<abbr title="HyperText Markup Language">HTML</abbr>}
+ exp = act = %q(<abbr title="HyperText Markup Language">HTML</abbr>)
expect(filter(act).to_html).to eq exp
end
@@ -132,7 +132,7 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
end
it 'allows the `data-sourcepos` attribute globally' do
- exp = %q{<p data-sourcepos="1:1-1:10">foo/bar.md</p>}
+ exp = %q(<p data-sourcepos="1:1-1:10">foo/bar.md</p>)
act = filter(exp)
expect(act.to_html).to eq exp
@@ -140,41 +140,41 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
describe 'footnotes' do
it 'allows correct footnote id property on links' do
- exp = %q{<a href="#fn1" id="fnref1">foo/bar.md</a>}
+ exp = %q(<a href="#fn1" id="fnref1">foo/bar.md</a>)
act = filter(exp)
expect(act.to_html).to eq exp
end
it 'allows correct footnote id property on li element' do
- exp = %q{<ol><li id="fn1">footnote</li></ol>}
+ exp = %q(<ol><li id="fn1">footnote</li></ol>)
act = filter(exp)
expect(act.to_html).to eq exp
end
it 'removes invalid id for footnote links' do
- exp = %q{<a href="#fn1">link</a>}
+ exp = %q(<a href="#fn1">link</a>)
%w[fnrefx test xfnref1].each do |id|
- act = filter(%Q{<a href="#fn1" id="#{id}">link</a>})
+ act = filter(%(<a href="#fn1" id="#{id}">link</a>))
expect(act.to_html).to eq exp
end
end
it 'removes invalid id for footnote li' do
- exp = %q{<ol><li>footnote</li></ol>}
+ exp = %q(<ol><li>footnote</li></ol>)
%w[fnx test xfn1].each do |id|
- act = filter(%Q{<ol><li id="#{id}">footnote</li></ol>})
+ act = filter(%(<ol><li id="#{id}">footnote</li></ol>))
expect(act.to_html).to eq exp
end
end
it 'allows footnotes numbered higher than 9' do
- exp = %q{<a href="#fn15" id="fnref15">link</a><ol><li id="fn15">footnote</li></ol>}
+ exp = %q(<a href="#fn15" id="fnref15">link</a><ol><li id="fn15">footnote</li></ol>)
act = filter(exp)
expect(act.to_html).to eq exp
diff --git a/spec/lib/banzai/filter/video_link_filter_spec.rb b/spec/lib/banzai/filter/video_link_filter_spec.rb
index 32fbc6b687f..ec954aa9163 100644
--- a/spec/lib/banzai/filter/video_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/video_link_filter_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do
expect(video.name).to eq 'video'
expect(video['src']).to eq src
expect(video['width']).to eq "400"
+ expect(video['preload']).to eq 'metadata'
expect(paragraph.name).to eq 'p'
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index bcee6f8f65d..989e06a992d 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -142,5 +142,12 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
expect(output).to include("<span>#</span>#{issue.iid}")
end
+
+ it 'converts user reference with escaped underscore because of italics' do
+ markdown = '_@test\__'
+ output = described_class.to_html(markdown, project: project)
+
+ expect(output).to include('<em>@test_</em>')
+ end
end
end
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index 241d6db4f11..5f31ad0c8f6 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -31,11 +31,13 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
end
end
- # Test strings taken from https://spec.commonmark.org/0.29/#backslash-escapes
describe 'CommonMark tests', :aggregate_failures do
- it 'converts all ASCII punctuation to literals' do
- markdown = %q(\!\"\#\$\%\&\'\*\+\,\-\.\/\:\;\<\=\>\?\@\[\]\^\_\`\{\|\}\~) + %q[\(\)\\\\]
- punctuation = %w(! " # $ % &amp; ' * + , - . / : ; &lt; = &gt; ? @ [ \\ ] ^ _ ` { | } ~) + %w[( )]
+ it 'converts all reference punctuation to literals' do
+ reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS
+ markdown = reference_chars.split('').map {|char| char.prepend("\\") }.join
+ punctuation = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.split('')
+ punctuation = punctuation.delete_if {|char| char == '&' }
+ punctuation << '&amp;'
result = described_class.call(markdown, project: project)
output = result[:output].to_html
@@ -44,57 +46,45 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
expect(result[:escaped_literals]).to be_truthy
end
- it 'does not convert other characters to literals' do
- markdown = %q(\→\A\a\ \3\φ\«)
- expected = '\→\A\a\ \3\φ\«'
-
- result = correct_html_included(markdown, expected)
- expect(result[:escaped_literals]).to be_falsey
- end
+ it 'ensure we handle all the GitLab reference characters' do
+ reference_chars = ObjectSpace.each_object(Class).map do |klass|
+ next unless klass.included_modules.include?(Referable)
+ next unless klass.respond_to?(:reference_prefix)
+ next unless klass.reference_prefix.length == 1
- describe 'escaped characters are treated as regular characters and do not have their usual Markdown meanings' do
- where(:markdown, :expected) do
- %q(\*not emphasized*) | %q(<span>*</span>not emphasized*)
- %q(\<br/> not a tag) | %q(<span>&lt;</span>br/&gt; not a tag)
- %q!\[not a link](/foo)! | %q!<span>[</span>not a link](/foo)!
- %q(\`not code`) | %q(<span>`</span>not code`)
- %q(1\. not a list) | %q(1<span>.</span> not a list)
- %q(\# not a heading) | %q(<span>#</span> not a heading)
- %q(\[foo]: /url "not a reference") | %q(<span>[</span>foo]: /url "not a reference")
- %q(\&ouml; not a character entity) | %q(<span>&amp;</span>ouml; not a character entity)
- end
+ klass.reference_prefix
+ end.compact
- with_them do
- it 'keeps them as literals' do
- correct_html_included(markdown, expected)
- end
+ reference_chars.all? do |char|
+ Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.include?(char)
end
end
- it 'backslash is itself escaped, the following character is not' do
- markdown = %q(\\\\*emphasis*)
- expected = %q(<span>\</span><em>emphasis</em>)
+ it 'does not convert non-reference punctuation to spans' do
+ markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\_\`\{\|\}) + %q[\(\)\\\\]
- correct_html_included(markdown, expected)
+ result = described_class.call(markdown, project: project)
+ output = result[:output].to_html
+
+ expect(output).not_to include('<span>')
+ expect(result[:escaped_literals]).to be_falsey
end
- it 'backslash at the end of the line is a hard line break' do
- markdown = <<~MARKDOWN
- foo\\
- bar
- MARKDOWN
- expected = "foo<br>\nbar"
+ it 'does not convert other characters to literals' do
+ markdown = %q(\→\A\a\ \3\φ\«)
+ expected = '\→\A\a\ \3\φ\«'
- correct_html_included(markdown, expected)
+ result = correct_html_included(markdown, expected)
+ expect(result[:escaped_literals]).to be_falsey
end
describe 'backslash escapes do not work in code blocks, code spans, autolinks, or raw HTML' do
where(:markdown, :expected) do
- %q(`` \[\` ``) | %q(<code>\[\`</code>)
- %q( \[\]) | %Q(<code>\\[\\]\n</code>)
- %Q(~~~\n\\[\\]\n~~~) | %Q(<code>\\[\\]\n</code>)
- %q(<http://example.com?find=\*>) | %q(<a href="http://example.com?find=%5C*">http://example.com?find=\*</a>)
- %q[<a href="/bar\/)">] | %q[<a href="/bar%5C/)">]
+ %q(`` \@\! ``) | %q(<code>\@\!</code>)
+ %q( \@\!) | %Q(<code>\\@\\!\n</code>)
+ %Q(~~~\n\\@\\!\n~~~) | %Q(<code>\\@\\!\n</code>)
+ %q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
+ %q[<a href="/bar\@)">] | %q[<a href="/bar%5C@)">]
end
with_them do
@@ -104,9 +94,9 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
describe 'work in all other contexts, including URLs and link titles, link references, and info strings in fenced code blocks' do
where(:markdown, :expected) do
- %q![foo](/bar\* "ti\*tle")! | %q(<a href="/bar*" title="ti*tle">foo</a>)
- %Q![foo]\n\n[foo]: /bar\\* "ti\\*tle"! | %q(<a href="/bar*" title="ti*tle">foo</a>)
- %Q(``` foo\\+bar\nfoo\n```) | %Q(<code lang="foo+bar">foo\n</code>)
+ %q![foo](/bar\@ "\@title")! | %q(<a href="/bar@" title="@title">foo</a>)
+ %Q![foo]\n\n[foo]: /bar\\@ "\\@title"! | %q(<a href="/bar@" title="@title">foo</a>)
+ %Q(``` foo\\@bar\nfoo\n```) | %Q(<code lang="foo@bar">foo\n</code>)
end
with_them do
diff --git a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
deleted file mode 100644
index 57ffdfa9aee..00000000000
--- a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Common::Loaders::EntityLoader do
- describe '#load' do
- it "creates entities for the given data" do
- group = create(:group, path: "imported-group")
- parent_entity = create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import))
- context = BulkImports::Pipeline::Context.new(parent_entity)
-
- data = {
- source_type: :group_entity,
- source_full_path: "parent/subgroup",
- destination_name: "subgroup",
- destination_namespace: parent_entity.group.full_path,
- parent_id: parent_entity.id
- }
-
- expect { subject.load(context, data) }.to change(BulkImports::Entity, :count).by(1)
-
- subgroup_entity = BulkImports::Entity.last
-
- expect(subgroup_entity.source_full_path).to eq 'parent/subgroup'
- expect(subgroup_entity.destination_namespace).to eq 'imported-group'
- expect(subgroup_entity.destination_name).to eq 'subgroup'
- expect(subgroup_entity.parent_id).to eq parent_entity.id
- end
- end
-end
diff --git a/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
index 03d138b227c..08a82bc84ed 100644
--- a/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
@@ -68,5 +68,11 @@ RSpec.describe BulkImports::Common::Transformers::ProhibitedAttributesTransforme
expect(transformed_hash).to eq(expected_hash)
end
+
+ context 'when there is no data to transform' do
+ it 'returns' do
+ expect(subject.transform(nil, nil)).to be_nil
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/common/transformers/award_emoji_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
index 5b560a30bf5..ff11a10bfe9 100644
--- a/spec/lib/bulk_imports/common/transformers/award_emoji_transformer_spec.rb
+++ b/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Common::Transformers::AwardEmojiTransformer do
+RSpec.describe BulkImports::Common::Transformers::UserReferenceTransformer do
describe '#transform' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -12,7 +12,6 @@ RSpec.describe BulkImports::Common::Transformers::AwardEmojiTransformer do
let(:hash) do
{
- 'name' => 'thumbs up',
'user' => {
'public_email' => email
}
@@ -44,5 +43,27 @@ RSpec.describe BulkImports::Common::Transformers::AwardEmojiTransformer do
include_examples 'sets user_id and removes user key'
end
+
+ context 'when there is no data to transform' do
+ it 'returns' do
+ expect(subject.transform(nil, nil)).to be_nil
+ end
+ end
+
+ context 'when custom reference is provided' do
+ it 'updates provided reference' do
+ hash = {
+ 'author' => {
+ 'public_email' => user.email
+ }
+ }
+
+ transformer = described_class.new(reference: 'author')
+ result = transformer.transform(context, hash)
+
+ expect(result['author']).to be_nil
+ expect(result['author_id']).to eq(user.id)
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
index 247da200d68..85f82be7d18 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
@@ -3,15 +3,18 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
- describe '#variables' do
- let(:entity) { double(source_full_path: 'test', next_page_for: 'next_page', bulk_import: nil) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
-
- it 'returns query variables based on entity information' do
- expected = { full_path: entity.source_full_path, cursor: entity.next_page_for }
-
- expect(described_class.variables(context)).to eq(expected)
- end
+ it 'has a valid query' do
+ entity = create(:bulk_import_entity)
+ context = BulkImports::Pipeline::Context.new(entity)
+
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
end
describe '#data_path' do
diff --git a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb
new file mode 100644
index 00000000000..a38505fbf85
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Graphql::GetMilestonesQuery do
+ it 'has a valid query' do
+ entity = create(:bulk_import_entity)
+ context = BulkImports::Pipeline::Context.new(entity)
+
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data group milestones nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data group milestones page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
deleted file mode 100644
index ac2f9c8cb1d..00000000000
--- a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Loaders::LabelsLoader do
- describe '#load' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:entity) { create(:bulk_import_entity, group: group) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
-
- let(:data) do
- {
- 'title' => 'label',
- 'description' => 'description',
- 'color' => '#FFFFFF'
- }
- end
-
- it 'creates the label' do
- expect { subject.load(context, data) }.to change(Label, :count).by(1)
-
- label = group.labels.first
-
- expect(label.title).to eq(data['title'])
- expect(label.description).to eq(data['description'])
- expect(label.color).to eq(data['color'])
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb
deleted file mode 100644
index d552578e7be..00000000000
--- a/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Loaders::MembersLoader do
- describe '#load' do
- let_it_be(:user_importer) { create(:user) }
- let_it_be(:user_member) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user_importer) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
-
- let_it_be(:data) do
- {
- 'user_id' => user_member.id,
- 'created_by_id' => user_importer.id,
- 'access_level' => 30,
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil
- }
- end
-
- it 'does nothing when there is no data' do
- expect { subject.load(context, nil) }.not_to change(GroupMember, :count)
- end
-
- it 'creates the member' do
- expect { subject.load(context, data) }.to change(GroupMember, :count).by(1)
-
- member = group.members.last
-
- expect(member.user).to eq(user_member)
- expect(member.created_by).to eq(user_importer)
- expect(member.access_level).to eq(30)
- expect(member.created_at).to eq('2020-01-01T00:00:00Z')
- expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
- expect(member.expires_at).to eq(nil)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
index 63f28916d9a..3327a30f1d5 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:cursor) { 'cursor' }
+ let(:timestamp) { Time.new(2020, 01, 01).utc }
let(:entity) do
create(
:bulk_import_entity,
@@ -20,21 +21,23 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
subject { described_class.new(context) }
- def extractor_data(title:, has_next_page:, cursor: nil)
- data = [
- {
- 'title' => title,
- 'description' => 'desc',
- 'color' => '#428BCA'
- }
- ]
+ def label_data(title)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'color' => '#428BCA',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+ def extractor_data(title:, has_next_page:, cursor: nil)
page_info = {
'end_cursor' => cursor,
'has_next_page' => has_next_page
}
- BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
+ BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
end
describe '#run' do
@@ -55,6 +58,8 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
expect(label.title).to eq('label2')
expect(label.description).to eq('desc')
expect(label.color).to eq('#428BCA')
+ expect(label.created_at).to eq(timestamp)
+ expect(label.updated_at).to eq(timestamp)
end
end
@@ -90,6 +95,20 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
end
end
+ describe '#load' do
+ it 'creates the label' do
+ data = label_data('label')
+
+ expect { subject.load(context, data) }.to change(Label, :count).by(1)
+
+ label = group.labels.first
+
+ data.each do |key, value|
+ expect(label[key]).to eq(value)
+ end
+ end
+ end
+
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
@@ -110,9 +129,5 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
-
- it 'has loaders' do
- expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::LabelsLoader, options: nil)
- end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
index 9f498f8154f..74d3e09d263 100644
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
@@ -37,6 +37,34 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
end
end
+ describe '#load' do
+ it 'does nothing when there is no data' do
+ expect { subject.load(context, nil) }.not_to change(GroupMember, :count)
+ end
+
+ it 'creates the member' do
+ data = {
+ 'user_id' => member_user1.id,
+ 'created_by_id' => member_user2.id,
+ 'access_level' => 30,
+ 'created_at' => '2020-01-01T00:00:00Z',
+ 'updated_at' => '2020-01-01T00:00:00Z',
+ 'expires_at' => nil
+ }
+
+ expect { subject.load(context, data) }.to change(GroupMember, :count).by(1)
+
+ member = group.members.last
+
+ expect(member.user).to eq(member_user1)
+ expect(member.created_by).to eq(member_user2)
+ expect(member.access_level).to eq(30)
+ expect(member.created_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.expires_at).to eq(nil)
+ end
+ end
+
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
@@ -58,10 +86,6 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
{ klass: BulkImports::Groups::Transformers::MemberAttributesTransformer, options: nil }
)
end
-
- it 'has loaders' do
- expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::MembersLoader, options: nil)
- end
end
def member_data(email:, has_next_page:, cursor: nil)
diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
new file mode 100644
index 00000000000..f0c34c65257
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:cursor) { 'cursor' }
+ let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let(:entity) do
+ create(
+ :bulk_import_entity,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path,
+ group: group
+ )
+ end
+
+ let(:context) { BulkImports::Pipeline::Context.new(entity) }
+
+ subject { described_class.new(context) }
+
+ def milestone_data(title)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'state' => 'closed',
+ 'start_date' => '2020-10-21',
+ 'due_date' => '2020-10-22',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+
+ def extracted_data(title:, has_next_page:, cursor: nil)
+ page_info = {
+ 'end_cursor' => cursor,
+ 'has_next_page' => has_next_page
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [milestone_data(title)], page_info: page_info)
+ end
+
+ before do
+ group.add_owner(user)
+ end
+
+ describe '#run' do
+ it 'imports group milestones' do
+ first_page = extracted_data(title: 'milestone1', has_next_page: true, cursor: cursor)
+ last_page = extracted_data(title: 'milestone2', has_next_page: false)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor)
+ .to receive(:extract)
+ .and_return(first_page, last_page)
+ end
+
+ expect { subject.run }.to change(Milestone, :count).by(2)
+
+ expect(group.milestones.pluck(:title)).to contain_exactly('milestone1', 'milestone2')
+
+ milestone = group.milestones.last
+
+ expect(milestone.description).to eq('desc')
+ expect(milestone.state).to eq('closed')
+ expect(milestone.start_date.to_s).to eq('2020-10-21')
+ expect(milestone.due_date.to_s).to eq('2020-10-22')
+ expect(milestone.created_at).to eq(timestamp)
+ expect(milestone.updated_at).to eq(timestamp)
+ end
+ end
+
+ describe '#after_run' do
+ context 'when extracted data has next page' do
+ it 'updates tracker information and runs pipeline again' do
+ data = extracted_data(title: 'milestone', has_next_page: true, cursor: cursor)
+
+ expect(subject).to receive(:run)
+
+ subject.after_run(data)
+
+ tracker = entity.trackers.find_by(relation: :milestones)
+
+ expect(tracker.has_next_page).to eq(true)
+ expect(tracker.next_page).to eq(cursor)
+ end
+ end
+
+ context 'when extracted data has no next page' do
+ it 'updates tracker information and does not run pipeline' do
+ data = extracted_data(title: 'milestone', has_next_page: false)
+
+ expect(subject).not_to receive(:run)
+
+ subject.after_run(data)
+
+ tracker = entity.trackers.find_by(relation: :milestones)
+
+ expect(tracker.has_next_page).to eq(false)
+ expect(tracker.next_page).to be_nil
+ end
+ end
+ end
+
+ describe '#load' do
+ it 'creates the milestone' do
+ data = milestone_data('milestone')
+
+ expect { subject.load(context, data) }.to change(Milestone, :count).by(1)
+ end
+
+ context 'when user is not authorized to create the milestone' do
+ before do
+ allow(user).to receive(:can?).with(:admin_milestone, group).and_return(false)
+ end
+
+ it 'raises NotAllowedError' do
+ data = extracted_data(title: 'milestone', has_next_page: false)
+
+ expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
+ end
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Groups::Graphql::GetMilestonesQuery
+ }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers)
+ .to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
+ )
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index 0404c52b895..2a99646bb4a 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -3,9 +3,14 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, path: 'group') }
+ let_it_be(:parent) { create(:group, name: 'imported-group', path: 'imported-group') }
+ let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
+
+ subject { described_class.new(context) }
+
describe '#run' do
- let_it_be(:user) { create(:user) }
- let(:parent) { create(:group, name: 'imported-group', path: 'imported-group') }
let!(:parent_entity) do
create(
:bulk_import_entity,
@@ -14,8 +19,6 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
-
let(:subgroup_data) do
[
{
@@ -25,8 +28,6 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
]
end
- subject { described_class.new(context) }
-
before do
allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(subgroup_data)
@@ -47,6 +48,29 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
end
end
+ describe '#load' do
+ let(:parent_entity) { create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import)) }
+
+ it 'creates entities for the given data' do
+ data = {
+ source_type: :group_entity,
+ source_full_path: 'parent/subgroup',
+ destination_name: 'subgroup',
+ destination_namespace: parent_entity.group.full_path,
+ parent_id: parent_entity.id
+ }
+
+ expect { subject.load(context, data) }.to change(BulkImports::Entity, :count).by(1)
+
+ subgroup_entity = BulkImports::Entity.last
+
+ expect(subgroup_entity.source_full_path).to eq 'parent/subgroup'
+ expect(subgroup_entity.destination_namespace).to eq 'group'
+ expect(subgroup_entity.destination_name).to eq 'subgroup'
+ expect(subgroup_entity.parent_id).to eq parent_entity.id
+ end
+ end
+
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
@@ -61,9 +85,5 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
{ klass: BulkImports::Groups::Transformers::SubgroupToEntityTransformer, options: nil }
)
end
-
- it 'has loaders' do
- expect(described_class.get_loader).to eq(klass: BulkImports::Common::Loaders::EntityLoader, options: nil)
- end
end
end
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index 5a7a51675d6..b3fe8a2ba25 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -80,14 +80,14 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
expect(transformed_data['parent_id']).to eq(parent.id)
end
- context 'when destination namespace is user namespace' do
+ context 'when destination namespace is empty' do
it 'does not set parent id' do
entity = create(
:bulk_import_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: group.name,
- destination_namespace: user.namespace.full_path
+ destination_namespace: ''
)
context = BulkImports::Pipeline::Context.new(entity)
diff --git a/spec/lib/bulk_imports/importers/group_importer_spec.rb b/spec/lib/bulk_imports/importers/group_importer_spec.rb
index b4fdb7b5e5b..5d501b49e41 100644
--- a/spec/lib/bulk_imports/importers/group_importer_spec.rb
+++ b/spec/lib/bulk_imports/importers/group_importer_spec.rb
@@ -22,10 +22,13 @@ RSpec.describe BulkImports::Importers::GroupImporter do
expect_to_run_pipeline BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline, context: context
expect_to_run_pipeline BulkImports::Groups::Pipelines::MembersPipeline, context: context
expect_to_run_pipeline BulkImports::Groups::Pipelines::LabelsPipeline, context: context
+ expect_to_run_pipeline BulkImports::Groups::Pipelines::MilestonesPipeline, context: context
if Gitlab.ee?
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicsPipeline'.constantize, context: context)
expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline'.constantize, context: context)
+ expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicEventsPipeline'.constantize, context: context)
+ expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::IterationsPipeline'.constantize, context: context)
end
subject.execute
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 76e4e64a7d6..59f01c9caaa 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -27,29 +27,31 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
end
- describe 'pipeline runner' do
- before do
- stub_const('BulkImports::Extractor', extractor)
- stub_const('BulkImports::Transformer', transformer)
- stub_const('BulkImports::Loader', loader)
-
- pipeline = Class.new do
- include BulkImports::Pipeline
+ before do
+ stub_const('BulkImports::Extractor', extractor)
+ stub_const('BulkImports::Transformer', transformer)
+ stub_const('BulkImports::Loader', loader)
- extractor BulkImports::Extractor
- transformer BulkImports::Transformer
- loader BulkImports::Loader
+ pipeline = Class.new do
+ include BulkImports::Pipeline
- def after_run(_); end
- end
+ extractor BulkImports::Extractor
+ transformer BulkImports::Transformer
+ loader BulkImports::Loader
- stub_const('BulkImports::MyPipeline', pipeline)
+ def after_run(_); end
end
- context 'when entity is not marked as failed' do
- let(:entity) { create(:bulk_import_entity) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ stub_const('BulkImports::MyPipeline', pipeline)
+ end
+ let_it_be_with_refind(:entity) { create(:bulk_import_entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(entity, extra: :data) }
+
+ subject { BulkImports::MyPipeline.new(context) }
+
+ describe 'pipeline runner' do
+ context 'when entity is not marked as failed' do
it 'runs pipeline extractor, transformer, loader' do
extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
@@ -76,58 +78,61 @@ RSpec.describe BulkImports::Pipeline::Runner do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- message: 'Pipeline started',
- pipeline_class: 'BulkImports::MyPipeline'
+ log_params(
+ context,
+ message: 'Pipeline started',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
)
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- pipeline_class: 'BulkImports::MyPipeline',
- pipeline_step: :extractor,
- step_class: 'BulkImports::Extractor'
+ log_params(
+ context,
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :extractor,
+ step_class: 'BulkImports::Extractor'
+ )
)
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- pipeline_class: 'BulkImports::MyPipeline',
- pipeline_step: :transformer,
- step_class: 'BulkImports::Transformer'
+ log_params(
+ context,
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :transformer,
+ step_class: 'BulkImports::Transformer'
+ )
)
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- pipeline_class: 'BulkImports::MyPipeline',
- pipeline_step: :loader,
- step_class: 'BulkImports::Loader'
+ log_params(
+ context,
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :loader,
+ step_class: 'BulkImports::Loader'
+ )
)
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- pipeline_class: 'BulkImports::MyPipeline',
- pipeline_step: :after_run
+ log_params(
+ context,
+ pipeline_class: 'BulkImports::MyPipeline',
+ pipeline_step: :after_run
+ )
)
expect(logger).to receive(:info)
.with(
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity',
- message: 'Pipeline finished',
- pipeline_class: 'BulkImports::MyPipeline'
+ log_params(
+ context,
+ message: 'Pipeline finished',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
)
end
- BulkImports::MyPipeline.new(context).run
+ subject.run
end
context 'when exception is raised' do
- let(:entity) { create(:bulk_import_entity, :created) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
-
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
allow(extractor).to receive(:extract).with(context).and_raise(StandardError, 'Error!')
@@ -135,7 +140,21 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'logs import failure' do
- BulkImports::MyPipeline.new(context).run
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error)
+ .with(
+ log_params(
+ context,
+ pipeline_step: :extractor,
+ pipeline_class: 'BulkImports::MyPipeline',
+ exception_class: 'StandardError',
+ exception_message: 'Error!'
+ )
+ )
+ end
+
+ expect { subject.run }
+ .to change(entity.failures, :count).by(1)
failure = entity.failures.first
@@ -152,29 +171,29 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'marks entity as failed' do
- BulkImports::MyPipeline.new(context).run
-
- expect(entity.failed?).to eq(true)
+ expect { subject.run }
+ .to change(entity, :status_name).to(:failed)
end
it 'logs warn message' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:warn)
.with(
- message: 'Pipeline failed',
- pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: entity.source_type
+ log_params(
+ context,
+ message: 'Pipeline failed',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
)
end
- BulkImports::MyPipeline.new(context).run
+ subject.run
end
end
context 'when pipeline is not marked to abort on failure' do
- it 'marks entity as failed' do
- BulkImports::MyPipeline.new(context).run
+ it 'does not mark entity as failed' do
+ subject.run
expect(entity.failed?).to eq(false)
end
@@ -183,24 +202,31 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is marked as failed' do
- let(:entity) { create(:bulk_import_entity) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
-
it 'logs and returns without execution' do
- allow(entity).to receive(:failed?).and_return(true)
+ entity.fail_op!
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
- message: 'Skipping due to failed pipeline status',
- pipeline_class: 'BulkImports::MyPipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: 'group_entity'
+ log_params(
+ context,
+ message: 'Skipping due to failed pipeline status',
+ pipeline_class: 'BulkImports::MyPipeline'
+ )
)
end
- BulkImports::MyPipeline.new(context).run
+ subject.run
end
end
end
+
+ def log_params(context, extra = {})
+ {
+ bulk_import_id: context.bulk_import.id,
+ bulk_import_entity_id: context.entity.id,
+ bulk_import_entity_type: context.entity.source_type,
+ context_extra: context.extra
+ }.merge(extra)
+ end
end
diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb
index 3811a02a7fd..c882e3d26ea 100644
--- a/spec/lib/bulk_imports/pipeline_spec.rb
+++ b/spec/lib/bulk_imports/pipeline_spec.rb
@@ -3,25 +3,25 @@
require 'spec_helper'
RSpec.describe BulkImports::Pipeline do
- describe 'pipeline attributes' do
- before do
- stub_const('BulkImports::Extractor', Class.new)
- stub_const('BulkImports::Transformer', Class.new)
- stub_const('BulkImports::Loader', Class.new)
-
- klass = Class.new do
- include BulkImports::Pipeline
+ before do
+ stub_const('BulkImports::Extractor', Class.new)
+ stub_const('BulkImports::Transformer', Class.new)
+ stub_const('BulkImports::Loader', Class.new)
- abort_on_failure!
+ klass = Class.new do
+ include BulkImports::Pipeline
- extractor BulkImports::Extractor, { foo: :bar }
- transformer BulkImports::Transformer, { foo: :bar }
- loader BulkImports::Loader, { foo: :bar }
- end
+ abort_on_failure!
- stub_const('BulkImports::MyPipeline', klass)
+ extractor BulkImports::Extractor, foo: :bar
+ transformer BulkImports::Transformer, foo: :bar
+ loader BulkImports::Loader, foo: :bar
end
+ stub_const('BulkImports::MyPipeline', klass)
+ end
+
+ describe 'pipeline attributes' do
describe 'getters' do
it 'retrieves class attributes' do
expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: BulkImports::Extractor, options: { foo: :bar } })
@@ -29,6 +29,27 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::MyPipeline.get_loader).to eq({ klass: BulkImports::Loader, options: { foo: :bar } })
expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
end
+
+ context 'when extractor and loader are defined within the pipeline' do
+ before do
+ klass = Class.new do
+ include BulkImports::Pipeline
+
+ def extract; end
+
+ def load; end
+ end
+
+ stub_const('BulkImports::AnotherPipeline', klass)
+ end
+
+ it 'returns itself when retrieving extractor & loader' do
+ pipeline = BulkImports::AnotherPipeline.new(nil)
+
+ expect(pipeline.send(:extractor)).to eq(pipeline)
+ expect(pipeline.send(:loader)).to eq(pipeline)
+ end
+ end
end
describe 'setters' do
@@ -54,4 +75,69 @@ RSpec.describe BulkImports::Pipeline do
end
end
end
+
+ describe '#instantiate' do
+ context 'when options are present' do
+ it 'instantiates new object with options' do
+ expect(BulkImports::Extractor).to receive(:new).with(foo: :bar)
+ expect(BulkImports::Transformer).to receive(:new).with(foo: :bar)
+ expect(BulkImports::Loader).to receive(:new).with(foo: :bar)
+
+ pipeline = BulkImports::MyPipeline.new(nil)
+
+ pipeline.send(:extractor)
+ pipeline.send(:transformers)
+ pipeline.send(:loader)
+ end
+ end
+
+ context 'when options are missing' do
+ before do
+ klass = Class.new do
+ include BulkImports::Pipeline
+
+ extractor BulkImports::Extractor
+ transformer BulkImports::Transformer
+ loader BulkImports::Loader
+ end
+
+ stub_const('BulkImports::NoOptionsPipeline', klass)
+ end
+
+ it 'instantiates new object without options' do
+ expect(BulkImports::Extractor).to receive(:new).with(no_args)
+ expect(BulkImports::Transformer).to receive(:new).with(no_args)
+ expect(BulkImports::Loader).to receive(:new).with(no_args)
+
+ pipeline = BulkImports::NoOptionsPipeline.new(nil)
+
+ pipeline.send(:extractor)
+ pipeline.send(:transformers)
+ pipeline.send(:loader)
+ end
+ end
+ end
+
+ describe '#transformers' do
+ before do
+ klass = Class.new do
+ include BulkImports::Pipeline
+
+ transformer BulkImports::Transformer
+
+ def transform; end
+ end
+
+ stub_const('BulkImports::TransformersPipeline', klass)
+ end
+
+ it 'has instance transform method first to run' do
+ transformer = double
+ allow(BulkImports::Transformer).to receive(:new).and_return(transformer)
+
+ pipeline = BulkImports::TransformersPipeline.new(nil)
+
+ expect(pipeline.send(:transformers)).to eq([pipeline, transformer])
+ end
+ end
end
diff --git a/spec/lib/sentry/api_urls_spec.rb b/spec/lib/error_tracking/sentry_client/api_urls_spec.rb
index d56b4397e1c..bd701748dc2 100644
--- a/spec/lib/sentry/api_urls_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/api_urls_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe Sentry::ApiUrls do
+RSpec.describe ErrorTracking::SentryClient::ApiUrls do
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' }
let(:token) { 'test-token' }
let(:issue_id) { '123456' }
let(:issue_id_with_reserved_chars) { '123$%' }
let(:escaped_issue_id) { '123%24%25' }
- let(:api_urls) { Sentry::ApiUrls.new(sentry_url) }
+ let(:api_urls) { described_class.new(sentry_url) }
# Sentry API returns 404 if there are extra slashes in the URL!
shared_examples 'correct url with extra slashes' do
diff --git a/spec/lib/sentry/client/event_spec.rb b/spec/lib/error_tracking/sentry_client/event_spec.rb
index 07ed331c44c..64e674f1e9b 100644
--- a/spec/lib/sentry/client/event_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/event_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sentry::Client do
+RSpec.describe ErrorTracking::SentryClient do
include SentryClientHelpers
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
diff --git a/spec/lib/sentry/client/issue_link_spec.rb b/spec/lib/error_tracking/sentry_client/issue_link_spec.rb
index fe3abe7cb23..f86d328ef89 100644
--- a/spec/lib/sentry/client/issue_link_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/issue_link_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sentry::Client::IssueLink do
+RSpec.describe ErrorTracking::SentryClient::IssueLink do
include SentryClientHelpers
let_it_be(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
diff --git a/spec/lib/sentry/client/issue_spec.rb b/spec/lib/error_tracking/sentry_client/issue_spec.rb
index dedef905c95..e54296c58e0 100644
--- a/spec/lib/sentry/client/issue_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/issue_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe Sentry::Client::Issue do
+RSpec.describe ErrorTracking::SentryClient::Issue do
include SentryClientHelpers
let(:token) { 'test-token' }
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
- let(:client) { Sentry::Client.new(sentry_url, token) }
+ let(:client) { ErrorTracking::SentryClient.new(sentry_url, token) }
let(:issue_id) { 11 }
describe '#list_issues' do
@@ -136,7 +136,7 @@ RSpec.describe Sentry::Client::Issue do
subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'fish') }
it 'throws an error' do
- expect { subject }.to raise_error(Sentry::Client::BadRequestError, 'Invalid value for sort param')
+ expect { subject }.to raise_error(ErrorTracking::SentryClient::BadRequestError, 'Invalid value for sort param')
end
end
@@ -164,7 +164,7 @@ RSpec.describe Sentry::Client::Issue do
end
it 'raises exception' do
- expect { subject }.to raise_error(Sentry::Client::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"')
+ expect { subject }.to raise_error(ErrorTracking::SentryClient::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"')
end
end
@@ -173,7 +173,7 @@ RSpec.describe Sentry::Client::Issue do
deep_size = double('Gitlab::Utils::DeepSize', valid?: false)
allow(Gitlab::Utils::DeepSize).to receive(:new).with(sentry_api_response).and_return(deep_size)
- expect { subject }.to raise_error(Sentry::Client::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.')
+ expect { subject }.to raise_error(ErrorTracking::SentryClient::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.')
end
end
diff --git a/spec/lib/sentry/pagination_parser_spec.rb b/spec/lib/error_tracking/sentry_client/pagination_parser_spec.rb
index c4ed24827bb..c4b771d5b93 100644
--- a/spec/lib/sentry/pagination_parser_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/pagination_parser_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Sentry::PaginationParser do
+RSpec.describe ErrorTracking::SentryClient::PaginationParser do
describe '.parse' do
subject { described_class.parse(headers) }
diff --git a/spec/lib/sentry/client/projects_spec.rb b/spec/lib/error_tracking/sentry_client/projects_spec.rb
index ea2c5ccb81e..247f9c1c085 100644
--- a/spec/lib/sentry/client/projects_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/projects_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe Sentry::Client::Projects do
+RSpec.describe ErrorTracking::SentryClient::Projects do
include SentryClientHelpers
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
- let(:client) { Sentry::Client.new(sentry_url, token) }
+ let(:client) { ErrorTracking::SentryClient.new(sentry_url, token) }
let(:projects_sample_response) do
Gitlab::Utils.deep_indifferent_access(
Gitlab::Json.parse(fixture_file('sentry/list_projects_sample_response.json'))
@@ -44,7 +44,7 @@ RSpec.describe Sentry::Client::Projects do
end
it 'raises exception' do
- expect { subject }.to raise_error(Sentry::Client::MissingKeysError, 'Sentry API response is missing keys. key not found: "slug"')
+ expect { subject }.to raise_error(ErrorTracking::SentryClient::MissingKeysError, 'Sentry API response is missing keys. key not found: "slug"')
end
end
diff --git a/spec/lib/sentry/client/repo_spec.rb b/spec/lib/error_tracking/sentry_client/repo_spec.rb
index 956c0b6eee1..9a1c7a69c3d 100644
--- a/spec/lib/sentry/client/repo_spec.rb
+++ b/spec/lib/error_tracking/sentry_client/repo_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe Sentry::Client::Repo do
+RSpec.describe ErrorTracking::SentryClient::Repo do
include SentryClientHelpers
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
- let(:client) { Sentry::Client.new(sentry_url, token) }
+ let(:client) { ErrorTracking::SentryClient.new(sentry_url, token) }
let(:repos_sample_response) { Gitlab::Json.parse(fixture_file('sentry/repos_sample_response.json')) }
describe '#repos' do
diff --git a/spec/lib/sentry/client_spec.rb b/spec/lib/error_tracking/sentry_client_spec.rb
index cddcb6e98fa..9ffd756f057 100644
--- a/spec/lib/sentry/client_spec.rb
+++ b/spec/lib/error_tracking/sentry_client_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Sentry::Client do
+RSpec.describe ErrorTracking::SentryClient do
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
- subject { Sentry::Client.new(sentry_url, token) }
+ subject { described_class.new(sentry_url, token) }
it { is_expected.to respond_to :projects }
it { is_expected.to respond_to :list_issues }
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index b603325cdb8..407187ea05f 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -82,6 +82,13 @@ RSpec.describe ExpandVariables do
value: 'key$variable',
result: 'keyvalue',
variables: -> { [{ key: 'variable', value: 'value' }] }
+ },
+ "simple expansion using Collection": {
+ value: 'key$variable',
+ result: 'keyvalue',
+ variables: Gitlab::Ci::Variables::Collection.new([
+ { key: 'variable', value: 'value' }
+ ])
}
}
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 1bcb2223012..3e158391d7f 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -269,7 +269,7 @@ RSpec.describe Feature, stub_feature_flags: false do
end
it 'when invalid type is used' do
- expect { described_class.enabled?(:my_feature_flag, type: :licensed) }
+ expect { described_class.enabled?(:my_feature_flag, type: :ops) }
.to raise_error(/The `type:` of/)
end
diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
new file mode 100644
index 00000000000..b62eac14e3e
--- /dev/null
+++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'generator_helper'
+
+RSpec.describe Gitlab::UsageMetricDefinitionGenerator do
+ describe 'Validation' do
+ let(:key_path) { 'counter.category.event' }
+ let(:dir) { '7d' }
+ let(:options) { [key_path, '--dir', dir, '--pretend'] }
+
+ subject { described_class.start(options) }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+
+ context 'with a missing directory' do
+ let(:options) { [key_path, '--pretend'] }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with an invalid directory' do
+ let(:dir) { '8d' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with an already existing metric with the same key_path' do
+ before do
+ allow(Gitlab::Usage::MetricDefinition).to receive(:definitions).and_return(Hash[key_path, 'definition'])
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ describe 'Name suggestions' do
+ let(:temp_dir) { Dir.mktmpdir }
+
+ before do
+ stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
+ end
+
+ context 'with product_intelligence_metrics_names_suggestions feature ON' do
+ it 'adds name key to metric definition' do
+ stub_feature_flags(product_intelligence_metrics_names_suggestions: true)
+
+ expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name')
+ described_class.new(['counts_weekly.test_metric'], { 'dir' => '7d' }).invoke_all
+ metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
+
+ expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name")
+ end
+ end
+
+ context 'with product_intelligence_metrics_names_suggestions feature OFF' do
+ it 'adds name key to metric definition' do
+ stub_feature_flags(product_intelligence_metrics_names_suggestions: false)
+
+ expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).not_to receive(:generate)
+ described_class.new(['counts_weekly.test_metric'], { 'dir' => '7d' }).invoke_all
+ metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
+
+ expect(YAML.safe_load(File.read(metric_definition_path)).keys).not_to include(:name)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alert_management/payload/generic_spec.rb b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
index d022c629458..b0c238c62c8 100644
--- a/spec/lib/gitlab/alert_management/payload/generic_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::AlertManagement::Payload::Generic do
describe '#title' do
subject { parsed_payload.title }
- it_behaves_like 'parsable alert payload field with fallback', 'New: Incident', 'title'
+ it_behaves_like 'parsable alert payload field with fallback', 'New: Alert', 'title'
end
describe '#severity' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
new file mode 100644
index 00000000000..e2fdd4918d5
--- /dev/null
+++ b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:issue_1) do
+ # Duration: 10 days
+ create(:issue, project: project, created_at: 20.days.ago).tap do |issue|
+ issue.metrics.update!(first_mentioned_in_commit_at: 10.days.ago)
+ end
+ end
+
+ let_it_be(:issue_2) do
+ # Duration: 5 days
+ create(:issue, project: project, created_at: 20.days.ago).tap do |issue|
+ issue.metrics.update!(first_mentioned_in_commit_at: 15.days.ago)
+ end
+ end
+
+ let(:stage) do
+ build(
+ :cycle_analytics_project_stage,
+ start_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated.identifier,
+ end_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueFirstMentionedInCommit.identifier,
+ project: project
+ )
+ end
+
+ let(:query) { Issue.joins(:metrics).in_projects(project.id) }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ subject(:average) { described_class.new(stage: stage, query: query) }
+
+ describe '#seconds' do
+ subject(:average_duration_in_seconds) { average.seconds }
+
+ context 'when no results' do
+ let(:query) { Issue.none }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'returns the average duration in seconds' do
+ it { is_expected.to be_within(0.5).of(7.5.days.to_f) }
+ end
+ end
+
+ describe '#days' do
+ subject(:average_duration_in_days) { average.days }
+
+ context 'when no results' do
+ let(:query) { Issue.none }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'returns the average duration in days' do
+ it { is_expected.to be_within(0.01).of(7.5) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
new file mode 100644
index 00000000000..8f5be709a11
--- /dev/null
+++ b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Sorting do
+ let(:stage) { build(:cycle_analytics_project_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
+
+ subject(:order_values) { described_class.apply(MergeRequest.joins(:metrics), stage, sort, direction).order_values }
+
+ context 'when invalid sorting params are given' do
+ let(:sort) { :unknown_sort }
+ let(:direction) { :unknown_direction }
+
+ it 'falls back to end_event DESC sorting' do
+ expect(order_values).to eq([stage.end_event.timestamp_projection.desc])
+ end
+ end
+
+ context 'sorting end_event' do
+ let(:sort) { :end_event }
+
+ context 'direction desc' do
+ let(:direction) { :desc }
+
+ specify do
+ expect(order_values).to eq([stage.end_event.timestamp_projection.desc])
+ end
+ end
+
+ context 'direction asc' do
+ let(:direction) { :asc }
+
+ specify do
+ expect(order_values).to eq([stage.end_event.timestamp_projection.asc])
+ end
+ end
+ end
+
+ context 'sorting duration' do
+ let(:sort) { :duration }
+
+ context 'direction desc' do
+ let(:direction) { :desc }
+
+ specify do
+ expect(order_values).to eq([Arel::Nodes::Subtraction.new(stage.end_event.timestamp_projection, stage.start_event.timestamp_projection).desc])
+ end
+ end
+
+ context 'direction asc' do
+ let(:direction) { :asc }
+
+ specify do
+ expect(order_values).to eq([Arel::Nodes::Subtraction.new(stage.end_event.timestamp_projection, stage.start_event.timestamp_projection).asc])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
index 115c8145f59..34c5bd6c6ae 100644
--- a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do
+RSpec.describe Gitlab::Analytics::UsageTrends::WorkersArgumentBuilder do
context 'when no measurement identifiers are given' do
it 'returns empty array' do
expect(described_class.new(measurement_identifiers: []).execute).to be_empty
@@ -16,8 +16,8 @@ RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do
let_it_be(:project_3) { create(:project, namespace: user_1.namespace, creator: user_1) }
let(:recorded_at) { 2.days.ago }
- let(:projects_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:projects) }
- let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) }
+ let(:projects_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:projects) }
+ let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) }
let(:measurement_identifiers) { [projects_measurement_identifier, users_measurement_identifier] }
subject { described_class.new(measurement_identifiers: measurement_identifiers, recorded_at: recorded_at).execute }
@@ -46,19 +46,19 @@ RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do
context 'when custom min and max queries are present' do
let(:min_id) { User.second.id }
let(:max_id) { User.maximum(:id) }
- let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) }
+ let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) }
before do
create_list(:user, 2)
min_max_queries = {
- ::Analytics::InstanceStatistics::Measurement.identifiers[:users] => {
+ ::Analytics::UsageTrends::Measurement.identifiers[:users] => {
minimum_query: -> { min_id },
maximum_query: -> { max_id }
}
}
- allow(::Analytics::InstanceStatistics::Measurement).to receive(:identifier_min_max_queries) { min_max_queries }
+ allow(::Analytics::UsageTrends::Measurement).to receive(:identifier_min_max_queries) { min_max_queries }
end
subject do
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 88f865adea7..0fbbc67ef6a 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::ApplicationContext do
describe '.push' do
it 'passes the expected context on to labkit' do
fake_proc = duck_type(:call)
- expected_context = { user: fake_proc }
+ expected_context = { user: fake_proc, client_id: fake_proc }
expect(Labkit::Context).to receive(:push).with(expected_context)
@@ -92,6 +92,34 @@ RSpec.describe Gitlab::ApplicationContext do
expect(result(context))
.to include(project: project.full_path, root_namespace: project.full_path_components.first)
end
+
+ describe 'setting the client' do
+ let_it_be(:remote_ip) { '127.0.0.1' }
+ let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:options) { { remote_ip: remote_ip, runner: runner, user: user } }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:provided_options, :client) do
+ [:remote_ip] | :remote_ip
+ [:remote_ip, :runner] | :runner
+ [:remote_ip, :runner, :user] | :user
+ end
+
+ with_them do
+ it 'sets the client_id to the expected value' do
+ context = described_class.new(**options.slice(*provided_options))
+
+ client_id = case client
+ when :remote_ip then "ip/#{remote_ip}"
+ when :runner then "runner/#{runner.id}"
+ when :user then "user/#{user.id}"
+ end
+
+ expect(result(context)[:client_id]).to eq(client_id)
+ end
+ end
+ end
end
describe '#use' do
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 6c6cee9c273..7a8e6e77d52 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -995,6 +995,23 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
end
+ context 'when gl_user is nil' do
+ # We can't use `allow_next_instance_of` here because the stubbed method is called inside `initialize`.
+ # When the class calls `gl_user` during `initialize`, the `nil` value is overwritten and we do not see expected results from the spec.
+ # So we use `allow_any_instance_of` to preserve the `nil` value to test the behavior when `gl_user` is nil.
+
+ # rubocop:disable RSpec/AnyInstanceOf
+ before do
+ allow_any_instance_of(described_class).to receive(:gl_user) { nil }
+ allow_any_instance_of(described_class).to receive(:sync_profile_from_provider?) { true } # to make the code flow proceed until gl_user.build_user_synced_attributes_metadata is called
+ end
+ # rubocop:enable RSpec/AnyInstanceOf
+
+ it 'does not raise NoMethodError' do
+ expect { oauth_user }.not_to raise_error
+ end
+ end
+
describe '._uid_and_provider' do
let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
diff --git a/spec/lib/gitlab/avatar_cache_spec.rb b/spec/lib/gitlab/avatar_cache_spec.rb
new file mode 100644
index 00000000000..ffe6f81b6e7
--- /dev/null
+++ b/spec/lib/gitlab/avatar_cache_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::AvatarCache, :clean_gitlab_redis_cache do
+ def with(&blk)
+ Gitlab::Redis::Cache.with(&blk) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ def read(key, subkey)
+ with do |redis|
+ redis.hget(key, subkey)
+ end
+ end
+
+ let(:thing) { double("thing", avatar_path: avatar_path) }
+ let(:avatar_path) { "/avatars/my_fancy_avatar.png" }
+ let(:key) { described_class.send(:email_key, "foo@bar.com") }
+
+ let(:perform_fetch) do
+ described_class.by_email("foo@bar.com", 20, 2, true) do
+ thing.avatar_path
+ end
+ end
+
+ describe "#by_email" do
+ it "writes a new value into the cache" do
+ expect(read(key, "20:2:true")).to eq(nil)
+
+ perform_fetch
+
+ expect(read(key, "20:2:true")).to eq(avatar_path)
+ end
+
+ it "finds the cached value and doesn't execute the block" do
+ expect(thing).to receive(:avatar_path).once
+
+ described_class.by_email("foo@bar.com", 20, 2, true) do
+ thing.avatar_path
+ end
+
+ described_class.by_email("foo@bar.com", 20, 2, true) do
+ thing.avatar_path
+ end
+ end
+
+ it "finds the cached value in the request store and doesn't execute the block" do
+ expect(thing).to receive(:avatar_path).once
+
+ Gitlab::WithRequestStore.with_request_store do
+ described_class.by_email("foo@bar.com", 20, 2, true) do
+ thing.avatar_path
+ end
+
+ described_class.by_email("foo@bar.com", 20, 2, true) do
+ thing.avatar_path
+ end
+
+ expect(Gitlab::SafeRequestStore.read([key, "20:2:true"])).to eq(avatar_path)
+ end
+ end
+ end
+
+ describe "#delete_by_email" do
+ subject { described_class.delete_by_email(*emails) }
+
+ before do
+ perform_fetch
+ end
+
+ context "no emails, somehow" do
+ let(:emails) { [] }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context "single email" do
+ let(:emails) { "foo@bar.com" }
+
+ it "removes the email" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
+
+ expect(subject).to eq(1)
+
+ expect(read(key, "20:2:true")).to eq(nil)
+ end
+ end
+
+ context "multiple emails" do
+ let(:emails) { ["foo@bar.com", "missing@baz.com"] }
+
+ it "removes the emails it finds" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
+
+ expect(subject).to eq(1)
+
+ expect(read(key, "20:2:true")).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
new file mode 100644
index 00000000000..8febe850e04
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy, '#next_batch' do
+ let(:batching_strategy) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+
+ let!(:namespace1) { namespaces.create!(name: 'batchtest1', path: 'batch-test1') }
+ let!(:namespace2) { namespaces.create!(name: 'batchtest2', path: 'batch-test2') }
+ let!(:namespace3) { namespaces.create!(name: 'batchtest3', path: 'batch-test3') }
+ let!(:namespace4) { namespaces.create!(name: 'batchtest4', path: 'batch-test4') }
+
+ context 'when starting on the first batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace1.id, batch_size: 3)
+
+ expect(batch_bounds).to eq([namespace1.id, namespace3.id])
+ end
+ end
+
+ context 'when additional batches remain' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace2.id, batch_size: 3)
+
+ expect(batch_bounds).to eq([namespace2.id, namespace4.id])
+ end
+ end
+
+ context 'when on the final batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3)
+
+ expect(batch_bounds).to eq([namespace4.id, namespace4.id])
+ end
+ end
+
+ context 'when no additional batches remain' do
+ it 'returns nil' do
+ batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id + 1, batch_size: 1)
+
+ expect(batch_bounds).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
index 110a1ff8a08..7ad93c3124a 100644
--- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -38,22 +38,9 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
describe '#perform' do
let(:migration_class) { described_class.name }
- let!(:job1) do
- table(:background_migration_jobs).create!(
- class_name: migration_class,
- arguments: [1, 10, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
- )
- end
-
- let!(:job2) do
- table(:background_migration_jobs).create!(
- class_name: migration_class,
- arguments: [11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size]
- )
- end
it 'copies all primary keys in range' do
- subject.perform(12, 15, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
+ subject.perform(12, 15, table_name, 'id', sub_batch_size, 'id', 'id_convert_to_bigint')
expect(test_table.where('id = id_convert_to_bigint').pluck(:id)).to contain_exactly(12, 15)
expect(test_table.where(id_convert_to_bigint: 0).pluck(:id)).to contain_exactly(11, 19)
@@ -61,7 +48,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
end
it 'copies all foreign keys in range' do
- subject.perform(10, 14, table_name, 'id', 'fk', 'fk_convert_to_bigint', sub_batch_size)
+ subject.perform(10, 14, table_name, 'id', sub_batch_size, 'fk', 'fk_convert_to_bigint')
expect(test_table.where('fk = fk_convert_to_bigint').pluck(:id)).to contain_exactly(11, 12)
expect(test_table.where(fk_convert_to_bigint: 0).pluck(:id)).to contain_exactly(15, 19)
@@ -71,21 +58,11 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
it 'copies columns with NULLs' do
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(4)
- subject.perform(10, 20, table_name, 'id', 'name', 'name_convert_to_text', sub_batch_size)
+ subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(11, 12, 19)
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
end
-
- it 'tracks completion with BackgroundMigrationJob' do
- expect do
- subject.perform(11, 20, table_name, 'id', 'id', 'id_convert_to_bigint', sub_batch_size)
- end.to change { Gitlab::Database::BackgroundMigrationJob.succeeded.count }.from(0).to(1)
-
- expect(job1.reload.status).to eq(0)
- expect(job2.reload.status).to eq(1)
- expect(test_table.where('id = id_convert_to_bigint').count).to eq(4)
- end
end
end
diff --git a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
deleted file mode 100644
index 85a9c88ebff..00000000000
--- a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressCheck do
- context 'rescheduling' do
- context 'when there are ongoing and no dead jobs' do
- it 'reschedules check' do
- allow(Gitlab::BackgroundMigration).to receive(:exists?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(true)
-
- allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
-
- described_class.new.perform
- end
- end
-
- context 'when there are ongoing and dead jobs' do
- it 'reschedules check' do
- allow(Gitlab::BackgroundMigration).to receive(:exists?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(true)
-
- allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(true)
-
- expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
-
- described_class.new.perform
- end
- end
-
- context 'when there retrying jobs and no scheduled' do
- it 'reschedules check' do
- allow(Gitlab::BackgroundMigration).to receive(:exists?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(true)
-
- expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
-
- described_class.new.perform
- end
- end
- end
-
- context 'when there are no scheduled, or retrying or dead' do
- before do
- stub_feature_flags(multiple_merge_request_assignees: false)
- end
-
- it 'enables feature' do
- allow(Gitlab::BackgroundMigration).to receive(:exists?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- described_class.new.perform
-
- expect(Feature.enabled?(:multiple_merge_request_assignees, type: :licensed)).to eq(true)
- end
- end
-
- context 'when there are only dead jobs' do
- it 'raises DeadJobsError error' do
- allow(Gitlab::BackgroundMigration).to receive(:exists?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(false)
-
- allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
- .with('PopulateMergeRequestAssigneesTable')
- .and_return(true)
-
- expect { described_class.new.perform }
- .to raise_error(described_class::DeadJobsError,
- "Only dead background jobs in the queue for #{described_class::WORKER}")
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
index 08f2b2a043e..5c93e69b5e5 100644
--- a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts do
+RSpec.describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, schema: 20210210093901 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
new file mode 100644
index 00000000000..1c62d703a34
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 2021_02_26_120851 do
+ let(:enabled) { 20 }
+ let(:disabled) { 0 }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:project_features) { table(:project_features) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let!(:project1) { projects.create!(namespace_id: namespace.id) }
+ let!(:project2) { projects.create!(namespace_id: namespace.id) }
+ let!(:project3) { projects.create!(namespace_id: namespace.id) }
+ let!(:project4) { projects.create!(namespace_id: namespace.id) }
+
+ # pages_access_level cannot be null.
+ let(:non_null_project_features) { { pages_access_level: enabled } }
+ let!(:project_feature1) { project_features.create!(project_id: project1.id, **non_null_project_features) }
+ let!(:project_feature2) { project_features.create!(project_id: project2.id, **non_null_project_features) }
+ let!(:project_feature3) { project_features.create!(project_id: project3.id, **non_null_project_features) }
+
+ describe '#perform' do
+ before do
+ project1.update!(container_registry_enabled: true)
+ project2.update!(container_registry_enabled: false)
+ project3.update!(container_registry_enabled: nil)
+ project4.update!(container_registry_enabled: true)
+ end
+
+ it 'copies values to project_features' do
+ expect(project1.container_registry_enabled).to eq(true)
+ expect(project2.container_registry_enabled).to eq(false)
+ expect(project3.container_registry_enabled).to eq(nil)
+ expect(project4.container_registry_enabled).to eq(true)
+
+ expect(project_feature1.container_registry_access_level).to eq(disabled)
+ expect(project_feature2.container_registry_access_level).to eq(disabled)
+ expect(project_feature3.container_registry_access_level).to eq(disabled)
+
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:info)
+ .with(message: "#{described_class}: Copied container_registry_enabled values for projects with IDs between #{project1.id}..#{project4.id}")
+
+ expect(logger).not_to receive(:info)
+ end
+
+ subject.perform(project1.id, project4.id)
+
+ expect(project1.reload.container_registry_enabled).to eq(true)
+ expect(project2.reload.container_registry_enabled).to eq(false)
+ expect(project3.reload.container_registry_enabled).to eq(nil)
+ expect(project4.container_registry_enabled).to eq(true)
+
+ expect(project_feature1.reload.container_registry_access_level).to eq(enabled)
+ expect(project_feature2.reload.container_registry_access_level).to eq(disabled)
+ expect(project_feature3.reload.container_registry_access_level).to eq(disabled)
+ end
+
+ context 'when no projects exist in range' do
+ it 'does not fail' do
+ expect(project1.container_registry_enabled).to eq(true)
+ expect(project_feature1.container_registry_access_level).to eq(disabled)
+
+ expect { subject.perform(-1, -2) }.not_to raise_error
+
+ expect(project1.container_registry_enabled).to eq(true)
+ expect(project_feature1.container_registry_access_level).to eq(disabled)
+ end
+ end
+
+ context 'when projects in range all have nil container_registry_enabled' do
+ it 'does not fail' do
+ expect(project3.container_registry_enabled).to eq(nil)
+ expect(project_feature3.container_registry_access_level).to eq(disabled)
+
+ expect { subject.perform(project3.id, project3.id) }.not_to raise_error
+
+ expect(project3.container_registry_enabled).to eq(nil)
+ expect(project_feature3.container_registry_access_level).to eq(disabled)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
index 8e74935e127..07b1d99d333 100644
--- a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
@@ -27,12 +27,33 @@ RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityF
let(:finding_1) { finding_creator.call(sast_report, location_fingerprint_1) }
let(:finding_2) { finding_creator.call(dast_report, location_fingerprint_2) }
let(:finding_3) { finding_creator.call(secret_detection_report, location_fingerprint_3) }
- let(:uuid_1_components) { ['sast', identifier.fingerprint, location_fingerprint_1, project.id].join('-') }
- let(:uuid_2_components) { ['dast', identifier.fingerprint, location_fingerprint_2, project.id].join('-') }
- let(:uuid_3_components) { ['secret_detection', identifier.fingerprint, location_fingerprint_3, project.id].join('-') }
- let(:expected_uuid_1) { Gitlab::UUID.v5(uuid_1_components) }
- let(:expected_uuid_2) { Gitlab::UUID.v5(uuid_2_components) }
- let(:expected_uuid_3) { Gitlab::UUID.v5(uuid_3_components) }
+ let(:expected_uuid_1) do
+ Security::VulnerabilityUUID.generate(
+ report_type: 'sast',
+ primary_identifier_fingerprint: identifier.fingerprint,
+ location_fingerprint: location_fingerprint_1,
+ project_id: project.id
+ )
+ end
+
+ let(:expected_uuid_2) do
+ Security::VulnerabilityUUID.generate(
+ report_type: 'dast',
+ primary_identifier_fingerprint: identifier.fingerprint,
+ location_fingerprint: location_fingerprint_2,
+ project_id: project.id
+ )
+ end
+
+ let(:expected_uuid_3) do
+ Security::VulnerabilityUUID.generate(
+ report_type: 'secret_detection',
+ primary_identifier_fingerprint: identifier.fingerprint,
+ location_fingerprint: location_fingerprint_3,
+ project_id: project.id
+ )
+ end
+
let(:finding_creator) do
-> (report_type, location_fingerprint) do
findings.create!(
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
new file mode 100644
index 00000000000..990ef4fbe6a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20201110110454 do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let(:different_vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v4',
+ external_id: 'uuid-v4',
+ fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89',
+ name: 'Identifier for UUIDv4')
+ end
+
+ let!(:vulnerability_for_uuidv4) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:vulnerability_for_uuidv5) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let(:known_uuid_v5) { "77211ed6-7dff-5f6b-8c9a-da89ad0a9b60" }
+ let(:known_uuid_v4) { "b3cc2518-5446-4dea-871c-89d5e999c1ac" }
+ let(:desired_uuid_v5) { "3ca8ad45-6344-508b-b5e3-306a3bd6c6ba" }
+
+ subject { described_class.new.perform(finding.id, finding.id) }
+
+ context "when finding has a UUIDv4" do
+ before do
+ @uuid_v4 = create_finding!(
+ vulnerability_id: vulnerability_for_uuidv4.id,
+ project_id: project.id,
+ scanner_id: different_scanner.id,
+ primary_identifier_id: different_vulnerability_identifier.id,
+ report_type: 0, # "sast"
+ location_fingerprint: "fa18f432f1d56675f4098d318739c3cd5b14eb3e",
+ uuid: known_uuid_v4
+ )
+ end
+
+ let(:finding) { @uuid_v4 }
+
+ it "replaces it with UUIDv5" do
+ expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v4])
+
+ subject
+
+ expect(vulnerabilities_findings.pluck(:uuid)).to eq([desired_uuid_v5])
+ end
+ end
+
+ context "when finding has a UUIDv5" do
+ before do
+ @uuid_v5 = create_finding!(
+ vulnerability_id: vulnerability_for_uuidv5.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ report_type: 0, # "sast"
+ location_fingerprint: "838574be0210968bf6b9f569df9c2576242cbf0a",
+ uuid: known_uuid_v5
+ )
+ end
+
+ let(:finding) { @uuid_v5 }
+
+ it "stays the same" do
+ expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v5])
+
+ subject
+
+ expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v5])
+ end
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb b/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb
new file mode 100644
index 00000000000..46c919f0854
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SetDefaultIterationCadences, schema: 20201231133921 do
+ let(:namespaces) { table(:namespaces) }
+ let(:iterations) { table(:sprints) }
+ let(:iterations_cadences) { table(:iterations_cadences) }
+
+ describe '#perform' do
+ context 'when no iteration cadences exists' do
+ let!(:group_1) { namespaces.create!(name: 'group 1', path: 'group-1') }
+ let!(:group_2) { namespaces.create!(name: 'group 2', path: 'group-2') }
+ let!(:group_3) { namespaces.create!(name: 'group 3', path: 'group-3') }
+
+ let!(:iteration_1) { iterations.create!(group_id: group_1.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) }
+ let!(:iteration_2) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 2', start_date: 10.days.ago, due_date: 8.days.ago) }
+ let!(:iteration_3) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 3', start_date: 5.days.ago, due_date: 2.days.ago) }
+
+ subject { described_class.new.perform(group_1.id, group_2.id, group_3.id, namespaces.last.id + 1) }
+
+ before do
+ subject
+ end
+
+ it 'creates iterations_cadence records for the requested groups' do
+ expect(iterations_cadences.count).to eq(2)
+ end
+
+ it 'assigns the iteration cadences to the iterations correctly' do
+ iterations_cadence = iterations_cadences.find_by(group_id: group_1.id)
+ iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id)
+
+ expect(iterations_cadence.start_date).to eq(iteration_1.start_date)
+ expect(iterations_cadence.last_run_date).to eq(iteration_1.start_date)
+ expect(iterations_cadence.title).to eq('group 1 Iterations')
+ expect(iteration_records.size).to eq(1)
+ expect(iteration_records.first.id).to eq(iteration_1.id)
+
+ iterations_cadence = iterations_cadences.find_by(group_id: group_3.id)
+ iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id)
+
+ expect(iterations_cadence.start_date).to eq(iteration_3.start_date)
+ expect(iterations_cadence.last_run_date).to eq(iteration_3.start_date)
+ expect(iterations_cadence.title).to eq('group 3 Iterations')
+ expect(iteration_records.size).to eq(2)
+ expect(iteration_records.first.id).to eq(iteration_2.id)
+ expect(iteration_records.second.id).to eq(iteration_3.id)
+ end
+
+ it 'does not call Group class' do
+ expect(::Group).not_to receive(:where)
+
+ subject
+ end
+ end
+
+ context 'when an iteration cadence exists for a group' do
+ let!(:group) { namespaces.create!(name: 'group', path: 'group') }
+
+ let!(:iterations_cadence_1) { iterations_cadences.create!(group_id: group.id, start_date: 2.days.ago, title: 'Cadence 1') }
+
+ let!(:iteration_1) { iterations.create!(group_id: group.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) }
+ let!(:iteration_2) { iterations.create!(group_id: group.id, iterations_cadence_id: iterations_cadence_1.id, iid: 2, title: 'Iteration 2', start_date: 5.days.ago, due_date: 3.days.ago) }
+
+ subject { described_class.new.perform(group.id) }
+
+ it 'does not create a new iterations_cadence' do
+ expect { subject }.not_to change { iterations_cadences.count }
+ end
+
+ it 'assigns iteration cadences to iterations if needed' do
+ subject
+
+ expect(iteration_1.reload.iterations_cadence_id).to eq(iterations_cadence_1.id)
+ expect(iteration_2.reload.iterations_cadence_id).to eq(iterations_cadence_1.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 822bdc8389d..3086cb1bd33 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -70,6 +70,82 @@ RSpec.describe Gitlab::Checks::BranchCheck do
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to push code to protected branches on this project.')
end
+ context 'when user has push access' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(true)
+ end
+
+ context 'if protected branches is allowed to force push' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:allow_force_push?)
+ .with(project, 'master')
+ .and_return(true)
+ end
+
+ it 'allows force push' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'if protected branches is not allowed to force push' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:allow_force_push?)
+ .with(project, 'master')
+ .and_return(false)
+ end
+
+ it 'prevents force push' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+
+ expect { subject.validate! }.to raise_error
+ end
+ end
+ end
+
+ context 'when user does not have push access' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(false)
+ end
+
+ context 'if protected branches is allowed to force push' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:allow_force_push?)
+ .with(project, 'master')
+ .and_return(true)
+ end
+
+ it 'prevents force push' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+
+ expect { subject.validate! }.to raise_error
+ end
+ end
+
+ context 'if protected branches is not allowed to force push' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:allow_force_push?)
+ .with(project, 'master')
+ .and_return(false)
+ end
+
+ it 'prevents force push' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+
+ expect { subject.validate! }.to raise_error
+ end
+ end
+ end
+
context 'when project repository is empty' do
let(:project) { create(:project) }
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index 713858e0e35..19c1d820dff 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -39,13 +39,26 @@ RSpec.describe Gitlab::Checks::LfsCheck do
end
end
- context 'deletion' do
- let(:changes) { { oldrev: oldrev, ref: ref } }
+ context 'with deletion' do
+ shared_examples 'a skipped integrity check' do
+ it 'skips integrity check' do
+ expect(project.repository).not_to receive(:new_objects)
+ expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers)
+
+ subject.validate!
+ end
+ end
- it 'skips integrity check' do
- expect(project.repository).not_to receive(:new_objects)
+ context 'with missing newrev' do
+ it_behaves_like 'a skipped integrity check' do
+ let(:changes) { { oldrev: oldrev, ref: ref } }
+ end
+ end
- subject.validate!
+ context 'with blank newrev' do
+ it_behaves_like 'a skipped integrity check' do
+ let(:changes) { { oldrev: oldrev, newrev: Gitlab::Git::BLANK_SHA, ref: ref } }
+ end
end
end
diff --git a/spec/lib/gitlab/ci/artifacts/metrics_spec.rb b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb
new file mode 100644
index 00000000000..3a2095498ec
--- /dev/null
+++ b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Artifacts::Metrics, :prometheus do
+ let(:metrics) { described_class.new }
+
+ describe '#increment_destroyed_artifacts' do
+ context 'when incrementing by more than one' do
+ let(:counter) { metrics.send(:destroyed_artifacts_counter) }
+
+ it 'increments a single counter' do
+ subject.increment_destroyed_artifacts(10)
+ subject.increment_destroyed_artifacts(20)
+ subject.increment_destroyed_artifacts(30)
+
+ expect(counter.get).to eq 60
+ expect(counter.values.count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/cache_spec.rb b/spec/lib/gitlab/ci/build/cache_spec.rb
new file mode 100644
index 00000000000..9188045988b
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/cache_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::Cache do
+ describe '.initialize' do
+ context 'when the multiple cache feature flag is disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+
+ it 'instantiates a cache seed' do
+ cache_config = { key: 'key-a' }
+ pipeline = double(::Ci::Pipeline)
+ cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
+ expect(cache.instance_variable_get(:@cache)).to eq(cache_seed)
+ end
+ end
+
+ context 'when the multiple cache feature flag is enabled' do
+ context 'when the cache is an array' do
+ it 'instantiates an array of cache seeds' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
+ end
+ end
+
+ context 'when the cache is a hash' do
+ it 'instantiates a cache seed' do
+ cache_config = { key: 'key-a' }
+ pipeline = double(::Ci::Pipeline)
+ cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
+ end
+ end
+ end
+ end
+
+ describe '#cache_attributes' do
+ context 'when the multiple cache feature flag is disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+
+ it "returns the cache seed's build attributes" do
+ cache_config = { key: 'key-a' }
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
+
+ attributes = cache.cache_attributes
+
+ expect(attributes).to eq({
+ options: { cache: { key: 'key-a' } }
+ })
+ end
+ end
+
+ context 'when the multiple cache feature flag is enabled' do
+ context 'when there are no caches' do
+ it 'returns an empty hash' do
+ cache_config = []
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
+
+ attributes = cache.cache_attributes
+
+ expect(attributes).to eq({})
+ end
+ end
+
+ context 'when there are caches' do
+ it 'returns the structured attributes for the caches' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
+
+ attributes = cache.cache_attributes
+
+ expect(attributes).to eq({
+ options: { cache: cache_config }
+ })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
index 61ca8e759b5..46447231424 100644
--- a/spec/lib/gitlab/ci/build/context/build_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -9,7 +9,9 @@ RSpec.describe Gitlab::Ci::Build::Context::Build do
let(:context) { described_class.new(pipeline, seed_attributes) }
describe '#variables' do
- subject { context.variables }
+ subject { context.variables.to_hash }
+
+ it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb
index 7394708f9b6..61f2b90426d 100644
--- a/spec/lib/gitlab/ci/build/context/global_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/global_spec.rb
@@ -9,7 +9,9 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do
let(:context) { described_class.new(pipeline, yaml_variables: yaml_variables) }
describe '#variables' do
- subject { context.variables }
+ subject { context.variables.to_hash }
+
+ it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
index f692aa6146e..6c8c968dc0c 100644
--- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables
)
end
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('bridge seed',
to_resource: bridge,
- variables: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables
)
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule_spec.rb b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
index 5694cd5d0a0..6f3c9278677 100644
--- a/spec/lib/gitlab/ci/build/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables
)
end
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 0b50def05d4..1d5bdf30278 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables_hash
+ variables: ci_build.scoped_variables
)
end
diff --git a/spec/lib/gitlab/ci/charts_spec.rb b/spec/lib/gitlab/ci/charts_spec.rb
index 46d7d4a58f0..3a82d058819 100644
--- a/spec/lib/gitlab/ci/charts_spec.rb
+++ b/spec/lib/gitlab/ci/charts_spec.rb
@@ -98,7 +98,12 @@ RSpec.describe Gitlab::Ci::Charts do
subject { chart.total }
before do
- create(:ci_empty_pipeline, project: project, duration: 120)
+ # The created_at time used by the following execution
+ # can end up being after the creation of the 'today' time
+ # objects created above, and cause the queried counts to
+ # go to zero when the test executes close to midnight on the
+ # CI system, so we explicitly set it to a day earlier
+ create(:ci_empty_pipeline, project: project, duration: 120, created_at: today - 1.day)
end
it 'uses a utc time zone for range times' do
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index b3b7901074a..179578fe0a8 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -244,6 +244,52 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
end
end
end
+
+ context 'when bridge config contains parallel' do
+ let(:config) { { trigger: 'some/project', parallel: parallel_config } }
+
+ context 'when parallel config is a number' do
+ let(:parallel_config) { 2 }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns an error message' do
+ expect(subject.errors)
+ .to include(/cannot use "parallel: <number>"/)
+ end
+ end
+ end
+
+ context 'when parallel config is a matrix' do
+ let(:parallel_config) do
+ { matrix: [{ PROVIDER: 'aws', STACK: %w[monitoring app1] },
+ { PROVIDER: 'gcp', STACK: %w[data] }] }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'is returns a bridge job configuration' do
+ expect(subject.value).to eq(
+ name: :my_bridge,
+ trigger: { project: 'some/project' },
+ ignore: false,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ parallel: { matrix: [{ 'PROVIDER' => ['aws'], 'STACK' => %w(monitoring app1) },
+ { 'PROVIDER' => ['gcp'], 'STACK' => %w(data) }] },
+ variables: {},
+ scheduling_type: :stage
+ )
+ end
+ end
+ end
+ end
end
describe '#manual_action?' do
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 247f4b63910..064990667d5 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -7,225 +7,285 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
subject(:entry) { described_class.new(config) }
- describe 'validations' do
+ context 'with multiple caches' do
before do
entry.compose!
end
- context 'when entry config value is correct' do
- let(:policy) { nil }
- let(:key) { 'some key' }
- let(:when_config) { nil }
-
- let(:config) do
- {
- key: key,
- untracked: true,
- paths: ['some/path/']
- }.tap do |config|
- config[:policy] = policy if policy
- config[:when] = when_config if when_config
+ describe '#valid?' do
+ context 'when configuration is valid with a single cache' do
+ let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
- describe '#value' do
- shared_examples 'hash key value' do
- it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
- end
+ context 'when configuration is valid with multiple caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true }
+ ]
end
- it_behaves_like 'hash key value'
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
+ context 'when configuration is not a Hash or Array' do
+ let(:config) { 'invalid' }
- it_behaves_like 'hash key value'
+ it 'is invalid' do
+ expect(entry).not_to be_valid
end
+ end
- context 'with files and prefix' do
- let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
+ context 'when entry values contain more than four caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true },
+ { key: 'key4', paths: ["logs/"], untracked: true },
+ { key: 'key5', paths: ["logs/"], untracked: true }
+ ]
+ end
- it_behaves_like 'hash key value'
+ it 'is invalid' do
+ expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
+ expect(entry).not_to be_valid
end
+ end
+ end
+ end
+
+ context 'with a single cache' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+ describe 'validations' do
+ before do
+ entry.compose!
+ end
- context 'with prefix' do
- let(:key) { { prefix: 'prefix-value' } }
+ context 'when entry config value is correct' do
+ let(:policy) { nil }
+ let(:key) { 'some key' }
+ let(:when_config) { nil }
- it 'key is nil' do
- expect(entry.value).to match(a_hash_including(key: nil))
+ let(:config) do
+ {
+ key: key,
+ untracked: true,
+ paths: ['some/path/']
+ }.tap do |config|
+ config[:policy] = policy if policy
+ config[:when] = when_config if when_config
end
end
- context 'with `policy`' do
- where(:policy, :result) do
- 'pull-push' | 'pull-push'
- 'push' | 'push'
- 'pull' | 'pull'
- 'unknown' | 'unknown' # invalid
+ describe '#value' do
+ shared_examples 'hash key value' do
+ it 'returns hash value' do
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
+ end
end
- with_them do
- it { expect(entry.value).to include(policy: result) }
+ it_behaves_like 'hash key value'
+
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
+
+ it_behaves_like 'hash key value'
end
- end
- context 'without `policy`' do
- it 'assigns policy to default' do
- expect(entry.value).to include(policy: 'pull-push')
+ context 'with files and prefix' do
+ let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
+
+ it_behaves_like 'hash key value'
end
- end
- context 'with `when`' do
- where(:when_config, :result) do
- 'on_success' | 'on_success'
- 'on_failure' | 'on_failure'
- 'always' | 'always'
- 'unknown' | 'unknown' # invalid
+ context 'with prefix' do
+ let(:key) { { prefix: 'prefix-value' } }
+
+ it 'key is nil' do
+ expect(entry.value).to match(a_hash_including(key: nil))
+ end
end
- with_them do
- it { expect(entry.value).to include(when: result) }
+ context 'with `policy`' do
+ where(:policy, :result) do
+ 'pull-push' | 'pull-push'
+ 'push' | 'push'
+ 'pull' | 'pull'
+ 'unknown' | 'unknown' # invalid
+ end
+
+ with_them do
+ it { expect(entry.value).to include(policy: result) }
+ end
end
- end
- context 'without `when`' do
- it 'assigns when to default' do
- expect(entry.value).to include(when: 'on_success')
+ context 'without `policy`' do
+ it 'assigns policy to default' do
+ expect(entry.value).to include(policy: 'pull-push')
+ end
end
- end
- end
- describe '#valid?' do
- it { is_expected.to be_valid }
+ context 'with `when`' do
+ where(:when_config, :result) do
+ 'on_success' | 'on_success'
+ 'on_failure' | 'on_failure'
+ 'always' | 'always'
+ 'unknown' | 'unknown' # invalid
+ end
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
+ with_them do
+ it { expect(entry.value).to include(when: result) }
+ end
+ end
- it { is_expected.to be_valid }
+ context 'without `when`' do
+ it 'assigns when to default' do
+ expect(entry.value).to include(when: 'on_success')
+ end
+ end
end
- end
- context 'with `policy`' do
- where(:policy, :valid) do
- 'pull-push' | true
- 'push' | true
- 'pull' | true
- 'unknown' | false
- end
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
+ it { is_expected.to be_valid }
end
end
- end
- context 'with `when`' do
- where(:when_config, :valid) do
- 'on_success' | true
- 'on_failure' | true
- 'always' | true
- 'unknown' | false
- end
+ context 'with `policy`' do
+ where(:policy, :valid) do
+ 'pull-push' | true
+ 'push' | true
+ 'pull' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
+ end
end
end
- end
- context 'with key missing' do
- let(:config) do
- { untracked: true,
- paths: ['some/path/'] }
+ context 'with `when`' do
+ where(:when_config, :valid) do
+ 'on_success' | true
+ 'on_failure' | true
+ 'always' | true
+ 'unknown' | false
+ end
+
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
+ end
+ end
end
- describe '#value' do
- it 'sets key with the default' do
- expect(entry.value[:key])
- .to eq(Gitlab::Ci::Config::Entry::Key.default)
+ context 'with key missing' do
+ let(:config) do
+ { untracked: true,
+ paths: ['some/path/'] }
+ end
+
+ describe '#value' do
+ it 'sets key with the default' do
+ expect(entry.value[:key])
+ .to eq(Gitlab::Ci::Config::Entry::Key.default)
+ end
end
end
end
- end
- context 'when entry value is not correct' do
- describe '#errors' do
- subject { entry.errors }
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ subject { entry.errors }
- context 'when is not a hash' do
- let(:config) { 'ls' }
+ context 'when is not a hash' do
+ let(:config) { 'ls' }
- it 'reports errors with config value' do
- is_expected.to include 'cache config should be a hash'
+ it 'reports errors with config value' do
+ is_expected.to include 'cache config should be a hash'
+ end
end
- end
- context 'when policy is unknown' do
- let(:config) { { policy: 'unknown' } }
+ context 'when policy is unknown' do
+ let(:config) { { policy: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache policy should be pull-push, push, or pull')
+ it 'reports error' do
+ is_expected.to include('cache policy should be pull-push, push, or pull')
+ end
end
- end
- context 'when `when` is unknown' do
- let(:config) { { when: 'unknown' } }
+ context 'when `when` is unknown' do
+ let(:config) { { when: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache when should be on_success, on_failure or always')
+ it 'reports error' do
+ is_expected.to include('cache when should be on_success, on_failure or always')
+ end
end
- end
- context 'when descendants are invalid' do
- context 'with invalid keys' do
- let(:config) { { key: 1 } }
+ context 'when descendants are invalid' do
+ context 'with invalid keys' do
+ let(:config) { { key: 1 } }
- it 'reports error with descendants' do
- is_expected.to include 'key should be a hash, a string or a symbol'
+ it 'reports error with descendants' do
+ is_expected.to include 'key should be a hash, a string or a symbol'
+ end
end
- end
- context 'with empty key' do
- let(:config) { { key: {} } }
+ context 'with empty key' do
+ let(:config) { { key: {} } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
+ end
end
- end
- context 'with invalid files' do
- let(:config) { { key: { files: 'a-file' } } }
+ context 'with invalid files' do
+ let(:config) { { key: { files: 'a-file' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key:files config should be an array of strings'
+ it 'reports error with descendants' do
+ is_expected.to include 'key:files config should be an array of strings'
+ end
end
- end
- context 'with prefix without files' do
- let(:config) { { key: { prefix: 'a-prefix' } } }
+ context 'with prefix without files' do
+ let(:config) { { key: { prefix: 'a-prefix' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
+ end
end
- end
- context 'when there is an unknown key present' do
- let(:config) { { key: { unknown: 'a-file' } } }
+ context 'when there is an unknown key present' do
+ let(:config) { { key: { unknown: 'a-file' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config contains unknown keys: unknown'
+ it 'reports error with descendants' do
+ is_expected.to include 'key config contains unknown keys: unknown'
+ end
end
end
- end
- context 'when there is an unknown key present' do
- let(:config) { { invalid: true } }
+ context 'when there is an unknown key present' do
+ let(:config) { { invalid: true } }
- it 'reports error with descendants' do
- is_expected.to include 'cache config contains unknown keys: invalid'
+ it 'reports error with descendants' do
+ is_expected.to include 'cache config contains unknown keys: invalid'
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/environment_spec.rb b/spec/lib/gitlab/ci/config/entry/environment_spec.rb
index 0c18a7fb71e..dd8a79f0d84 100644
--- a/spec/lib/gitlab/ci/config/entry/environment_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/environment_spec.rb
@@ -305,4 +305,37 @@ RSpec.describe Gitlab::Ci::Config::Entry::Environment do
it { expect(entry).to be_valid }
end
end
+
+ describe 'deployment_tier' do
+ let(:config) do
+ { name: 'customer-portal', deployment_tier: deployment_tier }
+ end
+
+ context 'is a string' do
+ let(:deployment_tier) { 'production' }
+
+ it { expect(entry).to be_valid }
+ end
+
+ context 'is a hash' do
+ let(:deployment_tier) { Hash(tier: 'production') }
+
+ it { expect(entry).not_to be_valid }
+ end
+
+ context 'is nil' do
+ let(:deployment_tier) { nil }
+
+ it { expect(entry).to be_valid }
+ end
+
+ context 'is unknown value' do
+ let(:deployment_tier) { 'unknown' }
+
+ it 'is invalid and adds an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include("environment deployment tier must be one of #{::Environment.tiers.keys.join(', ')}")
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index a3b5f32b9f9..a4167003987 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -537,7 +537,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'overrides default config' do
expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
+ expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success'])
end
end
@@ -552,7 +552,43 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'uses config from default entry' do
expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
+ expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success'])
+ end
+ end
+
+ context 'with multiple_cache_per_job FF disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+
+ context 'when job config overrides default config' do
+ before do
+ entry.compose!(deps)
+ end
+
+ let(:config) do
+ { script: 'rspec', image: 'some_image', cache: { key: 'test' } }
+ end
+
+ it 'overrides default config' do
+ expect(entry[:image].value).to eq(name: 'some_image')
+ expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
+ end
+ end
+
+ context 'when job config does not override default config' do
+ before do
+ allow(default).to receive('[]').with(:image).and_return(specified)
+
+ entry.compose!(deps)
+ end
+
+ let(:config) { { script: 'ls', cache: { key: 'test' } } }
+
+ it 'uses config from default entry' do
+ expect(entry[:image].value).to eq 'specified'
+ expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/need_spec.rb b/spec/lib/gitlab/ci/config/entry/need_spec.rb
index 983e95fae42..a0a5dd52ad4 100644
--- a/spec/lib/gitlab/ci/config/entry/need_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/need_spec.rb
@@ -23,7 +23,17 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
describe '#value' do
it 'returns job needs configuration' do
- expect(need.value).to eq(name: 'job_name', artifacts: true)
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
+ end
+
+ context 'when the FF ci_needs_optional is disabled' do
+ before do
+ stub_feature_flags(ci_needs_optional: false)
+ end
+
+ it 'returns job needs configuration without `optional`' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true)
+ end
end
end
@@ -58,7 +68,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
describe '#value' do
it 'returns job needs configuration' do
- expect(need.value).to eq(name: 'job_name', artifacts: true)
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
end
end
@@ -74,7 +84,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
describe '#value' do
it 'returns job needs configuration' do
- expect(need.value).to eq(name: 'job_name', artifacts: false)
+ expect(need.value).to eq(name: 'job_name', artifacts: false, optional: false)
end
end
@@ -90,7 +100,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
describe '#value' do
it 'returns job needs configuration' do
- expect(need.value).to eq(name: 'job_name', artifacts: true)
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
end
end
@@ -106,11 +116,77 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
describe '#value' do
it 'returns job needs configuration' do
- expect(need.value).to eq(name: 'job_name', artifacts: true)
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
+ end
+ end
+
+ it_behaves_like 'job type'
+ end
+
+ context 'with job name and optional true' do
+ let(:config) { { job: 'job_name', optional: true } }
+
+ it { is_expected.to be_valid }
+
+ it_behaves_like 'job type'
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: true)
+ end
+
+ context 'when the FF ci_needs_optional is disabled' do
+ before do
+ stub_feature_flags(ci_needs_optional: false)
+ end
+
+ it 'returns job needs configuration without `optional`' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true)
+ end
end
end
+ end
+
+ context 'with job name and optional false' do
+ let(:config) { { job: 'job_name', optional: false } }
+
+ it { is_expected.to be_valid }
it_behaves_like 'job type'
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
+ end
+ end
+ end
+
+ context 'with job name and optional nil' do
+ let(:config) { { job: 'job_name', optional: nil } }
+
+ it { is_expected.to be_valid }
+
+ it_behaves_like 'job type'
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
+ end
+ end
+ end
+
+ context 'without optional key' do
+ let(:config) { { job: 'job_name' } }
+
+ it { is_expected.to be_valid }
+
+ it_behaves_like 'job type'
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false)
+ end
+ end
end
context 'when job name is empty' do
diff --git a/spec/lib/gitlab/ci/config/entry/needs_spec.rb b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
index f11f2a56f5f..489fbac68b2 100644
--- a/spec/lib/gitlab/ci/config/entry/needs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
@@ -111,8 +111,8 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
it 'returns key value' do
expect(needs.value).to eq(
job: [
- { name: 'first_job_name', artifacts: true },
- { name: 'second_job_name', artifacts: true }
+ { name: 'first_job_name', artifacts: true, optional: false },
+ { name: 'second_job_name', artifacts: true, optional: false }
]
)
end
@@ -124,8 +124,8 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
context 'with complex job entries composed' do
let(:config) do
[
- { job: 'first_job_name', artifacts: true },
- { job: 'second_job_name', artifacts: false }
+ { job: 'first_job_name', artifacts: true, optional: false },
+ { job: 'second_job_name', artifacts: false, optional: false }
]
end
@@ -137,8 +137,8 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
it 'returns key value' do
expect(needs.value).to eq(
job: [
- { name: 'first_job_name', artifacts: true },
- { name: 'second_job_name', artifacts: false }
+ { name: 'first_job_name', artifacts: true, optional: false },
+ { name: 'second_job_name', artifacts: false, optional: false }
]
)
end
@@ -163,8 +163,8 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
it 'returns key value' do
expect(needs.value).to eq(
job: [
- { name: 'first_job_name', artifacts: true },
- { name: 'second_job_name', artifacts: false }
+ { name: 'first_job_name', artifacts: true, optional: false },
+ { name: 'second_job_name', artifacts: false, optional: false }
]
)
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
index bc09e20d748..937642f07e7 100644
--- a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
@@ -4,21 +4,23 @@ require 'fast_spec_helper'
require_dependency 'active_model'
RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
- subject(:parallel) { described_class.new(config) }
+ let(:metadata) { {} }
- context 'with invalid config' do
- shared_examples 'invalid config' do |error_message|
- describe '#valid?' do
- it { is_expected.not_to be_valid }
- end
+ subject(:parallel) { described_class.new(config, **metadata) }
- describe '#errors' do
- it 'returns error about invalid type' do
- expect(parallel.errors).to match(a_collection_including(error_message))
- end
+ shared_examples 'invalid config' do |error_message|
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about invalid type' do
+ expect(parallel.errors).to match(a_collection_including(error_message))
end
end
+ end
+ context 'with invalid config' do
context 'when it is not a numeric value' do
let(:config) { true }
@@ -63,6 +65,12 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
expect(parallel.value).to match(number: config)
end
end
+
+ context 'when :numeric is not allowed' do
+ let(:metadata) { { allowed_strategies: [:matrix] } }
+
+ it_behaves_like 'invalid config', /cannot use "parallel: <number>"/
+ end
end
end
@@ -89,6 +97,12 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
])
end
end
+
+ context 'when :matrix is not allowed' do
+ let(:metadata) { { allowed_strategies: [:numeric] } }
+
+ it_behaves_like 'invalid config', /cannot use "parallel: matrix"/
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 54c7a5c3602..7b38c21788f 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -126,49 +126,105 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
expect(root.jobs_value[:rspec]).to eq(
{ name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
+ script: %w[rspec ls],
+ before_script: %w(ls pwd),
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
expect(root.jobs_value[:spinach]).to eq(
{ name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
+ before_script: [],
+ script: %w[spinach],
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
expect(root.jobs_value[:release]).to eq(
{ name: :release,
- stage: 'release',
- before_script: [],
- script: ["make changelog | tee release_changelog.txt"],
- release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
- image: { name: "ruby:2.7" },
- services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
- only: { refs: %w(branches tags) },
- variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
- after_script: [],
- ignore: false,
- scheduling_type: :stage }
+ stage: 'release',
+ before_script: [],
+ script: ["make changelog | tee release_changelog.txt"],
+ release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
+ image: { name: "ruby:2.7" },
+ services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
+ cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }],
+ only: { refs: %w(branches tags) },
+ variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
+ after_script: [],
+ ignore: false,
+ scheduling_type: :stage }
)
end
end
+
+ context 'with multuple_cache_per_job FF disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ root.compose!
+ end
+
+ describe '#jobs_value' do
+ it 'returns jobs configuration' do
+ expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
+ expect(root.jobs_value[:rspec]).to eq(
+ { name: :rspec,
+ script: %w[rspec ls],
+ before_script: %w(ls pwd),
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
+ variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
+ )
+ expect(root.jobs_value[:spinach]).to eq(
+ { name: :spinach,
+ before_script: [],
+ script: %w[spinach],
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
+ variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
+ )
+ expect(root.jobs_value[:release]).to eq(
+ { name: :release,
+ stage: 'release',
+ before_script: [],
+ script: ["make changelog | tee release_changelog.txt"],
+ release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
+ image: { name: "ruby:2.7" },
+ services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
+ cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
+ only: { refs: %w(branches tags) },
+ variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
+ after_script: [],
+ ignore: false,
+ scheduling_type: :stage }
+ )
+ end
+ end
+ end
end
end
@@ -187,6 +243,52 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
+ context 'with multiple_cache_per_job FF disabled' do
+ context 'when composed' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ root.compose!
+ end
+
+ describe '#errors' do
+ it 'has no errors' do
+ expect(root.errors).to be_empty
+ end
+ end
+
+ describe '#jobs_value' do
+ it 'returns jobs configuration' do
+ expect(root.jobs_value).to eq(
+ rspec: { name: :rspec,
+ script: %w[rspec ls],
+ before_script: %w(ls pwd),
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
+ variables: { 'VAR' => 'root' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage },
+ spinach: { name: :spinach,
+ before_script: [],
+ script: %w[spinach],
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
+ variables: { 'VAR' => 'job' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
+ )
+ end
+ end
+ end
+ end
+
context 'when composed' do
before do
root.compose!
@@ -202,29 +304,29 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
it 'returns jobs configuration' do
expect(root.jobs_value).to eq(
rspec: { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root' },
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage },
+ script: %w[rspec ls],
+ before_script: %w(ls pwd),
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ variables: { 'VAR' => 'root' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage },
spinach: { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'job' },
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
+ before_script: [],
+ script: %w[spinach],
+ image: { name: 'ruby:2.7' },
+ services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
+ stage: 'test',
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ variables: { 'VAR' => 'job' },
+ ignore: false,
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] },
+ scheduling_type: :stage }
)
end
end
@@ -265,7 +367,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
describe '#cache_value' do
it 'returns correct cache definition' do
- expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
+ expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success'])
+ end
+ end
+
+ context 'with multiple_cache_per_job FF disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ root.compose!
+ end
+
+ describe '#cache_value' do
+ it 'returns correct cache definition' do
+ expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index 342ca6b8b75..480a4a05379 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -114,17 +114,6 @@ RSpec.describe Gitlab::Ci::Jwt do
expect(payload[:environment]).to eq('production')
expect(payload[:environment_protected]).to eq('false')
end
-
- context ':ci_jwt_include_environment feature flag is disabled' do
- before do
- stub_feature_flags(ci_jwt_include_environment: false)
- end
-
- it 'does not include environment attributes' do
- expect(payload).not_to have_key(:environment)
- expect(payload).not_to have_key(:environment_protected)
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
index cf3644c9ad5..ec7eebdc056 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
@@ -3,17 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do
- subject do
- described_class.new(text, variables)
+ let(:variables) do
+ Gitlab::Ci::Variables::Collection.new
+ .append(key: 'PRESENT_VARIABLE', value: 'my variable')
+ .append(key: 'PATH_VARIABLE', value: 'a/path/variable/value')
+ .append(key: 'FULL_PATH_VARIABLE', value: '/a/full/path/variable/value')
+ .append(key: 'EMPTY_VARIABLE', value: '')
end
- let(:variables) do
- {
- 'PRESENT_VARIABLE' => 'my variable',
- 'PATH_VARIABLE' => 'a/path/variable/value',
- 'FULL_PATH_VARIABLE' => '/a/full/path/variable/value',
- 'EMPTY_VARIABLE' => ''
- }
+ subject do
+ described_class.new(text, variables)
end
describe '.new' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index 570706bfaac..773cb61b946 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -9,8 +9,255 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
let(:processor) { described_class.new(pipeline, config) }
- describe '#build_attributes' do
- subject { processor.build_attributes }
+ context 'with multiple_cache_per_job ff disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+
+ describe '#build_attributes' do
+ subject { processor.build_attributes }
+
+ context 'with cache:key' do
+ let(:config) do
+ {
+ key: 'a-key',
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config }) }
+ end
+
+ context 'with cache:key as a symbol' do
+ let(:config) do
+ {
+ key: :a_key,
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
+ end
+
+ context 'with cache:key:files' do
+ shared_examples 'default key' do
+ let(:config) do
+ { key: { files: files } }
+ end
+
+ it 'uses default key' do
+ expected = { options: { cache: { key: 'default' } } }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ shared_examples 'version and gemfile files' do
+ let(:config) do
+ {
+ key: {
+ files: files
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'builds a string key' do
+ expected = {
+ options: {
+ cache: {
+ key: '703ecc8fef1635427a1f86a8a1a308831c122392',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with existing files' do
+ let(:files) { ['VERSION', 'Gemfile.zip'] }
+
+ it_behaves_like 'version and gemfile files'
+ end
+
+ context 'with files starting with ./' do
+ let(:files) { ['Gemfile.zip', './VERSION'] }
+
+ it_behaves_like 'version and gemfile files'
+ end
+
+ context 'with files ending with /' do
+ let(:files) { ['Gemfile.zip/'] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with new line in filenames' do
+ let(:files) { ["Gemfile.zip\nVERSION"] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with missing files' do
+ let(:files) { ['project-gemfile.lock', ''] }
+
+ it_behaves_like 'default key'
+ end
+
+ context 'with directories' do
+ shared_examples 'foo/bar directory key' do
+ let(:config) do
+ {
+ key: {
+ files: files
+ }
+ }
+ end
+
+ it 'builds a string key' do
+ expected = {
+ options: {
+ cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with directory' do
+ let(:files) { ['foo/bar'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+
+ context 'with directory ending in slash' do
+ let(:files) { ['foo/bar/'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+
+ context 'with directories ending in slash star' do
+ let(:files) { ['foo/bar/*'] }
+
+ it_behaves_like 'foo/bar directory key'
+ end
+ end
+ end
+
+ context 'with cache:key:prefix' do
+ context 'without files' do
+ let(:config) do
+ {
+ key: {
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix to default key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-default',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with existing files' do
+ let(:config) do
+ {
+ key: {
+ files: ['VERSION', 'Gemfile.zip'],
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+
+ context 'with missing files' do
+ let(:config) do
+ {
+ key: {
+ files: ['project-gemfile.lock', ''],
+ prefix: 'a-prefix'
+ },
+ paths: ['vendor/ruby']
+ }
+ end
+
+ it 'adds prefix to default key' do
+ expected = {
+ options: {
+ cache: {
+ key: 'a-prefix-default',
+ paths: ['vendor/ruby']
+ }
+ }
+ }
+
+ is_expected.to include(expected)
+ end
+ end
+ end
+
+ context 'with all cache option keys' do
+ let(:config) do
+ {
+ key: 'a-key',
+ paths: ['vendor/ruby'],
+ untracked: true,
+ policy: 'push',
+ when: 'on_success'
+ }
+ end
+
+ it { is_expected.to include(options: { cache: config }) }
+ end
+
+ context 'with unknown cache option keys' do
+ let(:config) do
+ {
+ key: 'a-key',
+ unknown_key: true
+ }
+ end
+
+ it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
+ end
+
+ context 'with empty config' do
+ let(:config) { {} }
+
+ it { is_expected.to include(options: {}) }
+ end
+ end
+ end
+
+ describe '#attributes' do
+ subject { processor.attributes }
context 'with cache:key' do
let(:config) do
@@ -20,7 +267,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
}
end
- it { is_expected.to include(options: { cache: config }) }
+ it { is_expected.to include(config) }
end
context 'with cache:key as a symbol' do
@@ -31,7 +278,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
}
end
- it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
+ it { is_expected.to include(config.merge(key: "a_key")) }
end
context 'with cache:key:files' do
@@ -41,7 +288,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
end
it 'uses default key' do
- expected = { options: { cache: { key: 'default' } } }
+ expected = { key: 'default' }
is_expected.to include(expected)
end
@@ -59,13 +306,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it 'builds a string key' do
expected = {
- options: {
- cache: {
key: '703ecc8fef1635427a1f86a8a1a308831c122392',
paths: ['vendor/ruby']
- }
}
- }
is_expected.to include(expected)
end
@@ -112,11 +355,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
end
it 'builds a string key' do
- expected = {
- options: {
- cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
- }
- }
+ expected = { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
is_expected.to include(expected)
end
@@ -155,13 +394,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it 'adds prefix to default key' do
expected = {
- options: {
- cache: {
key: 'a-prefix-default',
paths: ['vendor/ruby']
}
- }
- }
is_expected.to include(expected)
end
@@ -180,13 +415,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it 'adds prefix key' do
expected = {
- options: {
- cache: {
key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
paths: ['vendor/ruby']
}
- }
- }
is_expected.to include(expected)
end
@@ -205,13 +436,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it 'adds prefix to default key' do
expected = {
- options: {
- cache: {
key: 'a-prefix-default',
paths: ['vendor/ruby']
}
- }
- }
is_expected.to include(expected)
end
@@ -229,7 +456,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
}
end
- it { is_expected.to include(options: { cache: config }) }
+ it { is_expected.to include(config) }
end
context 'with unknown cache option keys' do
@@ -242,11 +469,5 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
end
-
- context 'with empty config' do
- let(:config) { {} }
-
- it { is_expected.to include(options: {}) }
- end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 0efc7484699..7ec6949f852 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -85,99 +85,169 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
{ key: 'VAR2', value: 'var 2', public: true },
{ key: 'VAR3', value: 'var 3', public: true }])
end
+ end
- context 'when FF ci_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_rules_variables: false)
- end
+ context 'with multiple_cache_per_job FF disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
- it do
- is_expected.to include(yaml_variables: [{ key: 'VAR1', value: 'var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true }])
+ context 'with cache:key' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: 'a-value'
+ }
+ }
end
+
+ it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
end
- end
- context 'with cache:key' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: 'a-value'
+ context 'with cache:key:files' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: {
+ files: ['VERSION']
+ }
+ }
}
- }
- end
+ end
- it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
- end
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: { key: 'f155568ad0933d8358f66b846133614f76dd0ca4' }
+ }
+ }
- context 'with cache:key:files' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION']
+ is_expected.to include(cache_options)
+ end
+ end
+
+ context 'with cache:key:prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: {
+ key: {
+ prefix: 'something'
+ }
}
}
- }
+ end
+
+ it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
end
- it 'includes cache options' do
- cache_options = {
- options: {
+ context 'with cache:key:files and prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
cache: {
- key: 'f155568ad0933d8358f66b846133614f76dd0ca4'
+ key: {
+ files: ['VERSION'],
+ prefix: 'something'
+ }
}
}
- }
+ end
- is_expected.to include(cache_options)
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: { key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4' }
+ }
+ }
+
+ is_expected.to include(cache_options)
+ end
end
end
- context 'with cache:key:prefix' do
+ context 'with cache:key' do
let(:attributes) do
{
name: 'rspec',
ref: 'master',
- cache: {
- key: {
- prefix: 'something'
- }
- }
+ cache: [{
+ key: 'a-value'
+ }]
}
end
- it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
- end
+ it { is_expected.to include(options: { cache: [a_hash_including(key: 'a-value')] }) }
- context 'with cache:key:files and prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION'],
- prefix: 'something'
+ context 'with cache:key:files' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: [{
+ key: {
+ files: ['VERSION']
+ }
+ }]
+ }
+ end
+
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: [a_hash_including(key: 'f155568ad0933d8358f66b846133614f76dd0ca4')]
}
}
- }
+
+ is_expected.to include(cache_options)
+ end
end
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: {
- key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4'
+ context 'with cache:key:prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: [{
+ key: {
+ prefix: 'something'
+ }
+ }]
+ }
+ end
+
+ it { is_expected.to include(options: { cache: [a_hash_including( key: 'something-default' )] }) }
+ end
+
+ context 'with cache:key:files and prefix' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ cache: [{
+ key: {
+ files: ['VERSION'],
+ prefix: 'something'
+ }
+ }]
+ }
+ end
+
+ it 'includes cache options' do
+ cache_options = {
+ options: {
+ cache: [a_hash_including(key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4')]
}
}
- }
- is_expected.to include(cache_options)
+ is_expected.to include(cache_options)
+ end
end
end
@@ -190,7 +260,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
}
end
- it { is_expected.to include(options: {}) }
+ it { is_expected.to include({}) }
end
context 'with allow_failure' do
@@ -307,7 +377,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'does not have environment' do
expect(subject).not_to be_has_environment
expect(subject.environment).to be_nil
- expect(subject.metadata.expanded_environment_name).to be_nil
+ expect(subject.metadata).to be_nil
expect(Environment.exists?(name: expected_environment_name)).to eq(false)
end
end
@@ -979,6 +1049,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
expect(subject.errors).to contain_exactly(
"'rspec' job needs 'build' job, but it was not added to the pipeline")
end
+
+ context 'when the needed job is optional' do
+ let(:needs_attributes) { [{ name: 'build', optional: true }] }
+
+ it "does not return an error" do
+ expect(subject.errors).to be_empty
+ end
+
+ context 'when the FF ci_needs_optional is disabled' do
+ before do
+ stub_feature_flags(ci_needs_optional: false)
+ end
+
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "'rspec' job needs 'build' job, but it was not added to the pipeline")
+ end
+ end
+ end
end
context 'when build job is part of prior stages' do
@@ -1036,4 +1125,75 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
end
+
+ describe 'applying pipeline variables' do
+ subject { seed_build }
+
+ let(:pipeline_variables) { [] }
+ let(:pipeline) do
+ build(:ci_empty_pipeline, project: project, sha: head_sha, variables: pipeline_variables)
+ end
+
+ context 'containing variable references' do
+ let(:pipeline_variables) do
+ [
+ build(:ci_pipeline_variable, key: 'A', value: '$B'),
+ build(:ci_pipeline_variable, key: 'B', value: '$C')
+ ]
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: [project])
+ end
+
+ it "does not have errors" do
+ expect(subject.errors).to be_empty
+ end
+ end
+ end
+
+ context 'containing cyclic reference' do
+ let(:pipeline_variables) do
+ [
+ build(:ci_pipeline_variable, key: 'A', value: '$B'),
+ build(:ci_pipeline_variable, key: 'B', value: '$C'),
+ build(:ci_pipeline_variable, key: 'C', value: '$A')
+ ]
+ end
+
+ context 'when FF :variable_inside_variable is disabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: false)
+ end
+
+ it "does not have errors" do
+ expect(subject.errors).to be_empty
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: [project])
+ end
+
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ 'rspec: circular variable reference detected: ["A", "B", "C"]')
+ end
+
+ context 'with job:rules:[if:]' do
+ let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$C != null', when: 'always' }] } }
+
+ it "included? does not raise" do
+ expect { subject.included? }.not_to raise_error
+ end
+
+ it "included? returns true" do
+ expect(subject.included?).to eq(true)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
index 664aaaedf7b..99196d393c6 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
@@ -88,6 +88,55 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
end
end
+ context 'when job has deployment tier attribute' do
+ let(:attributes) do
+ {
+ environment: 'customer-portal',
+ options: {
+ environment: {
+ name: 'customer-portal',
+ deployment_tier: deployment_tier
+ }
+ }
+ }
+ end
+
+ let(:deployment_tier) { 'production' }
+
+ context 'when environment has not been created yet' do
+ it 'sets the specified deployment tier' do
+ is_expected.to be_production
+ end
+
+ context 'when deployment tier is staging' do
+ let(:deployment_tier) { 'staging' }
+
+ it 'sets the specified deployment tier' do
+ is_expected.to be_staging
+ end
+ end
+
+ context 'when deployment tier is unknown' do
+ let(:deployment_tier) { 'unknown' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, "'unknown' is not a valid tier")
+ end
+ end
+ end
+
+ context 'when environment has already been created' do
+ before do
+ create(:environment, :staging, project: project, name: 'customer-portal')
+ end
+
+ it 'does not overwrite the specified deployment tier' do
+ # This is to be updated when a deployment succeeded i.e. Deployments::UpdateEnvironmentService.
+ is_expected.to be_staging
+ end
+ end
+ end
+
context 'when job starts a review app' do
let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
let(:expected_environment_name) { "review/#{job.ref}" }
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index 90188b56f5a..b322e55cb5a 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -27,6 +27,22 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(report_status).to eq(described_class::STATUS_SUCCESS)
end
end
+
+ context 'when head report does not exist' do
+ let(:head_report) { nil }
+
+ it 'returns status not found' do
+ expect(report_status).to eq(described_class::STATUS_NOT_FOUND)
+ end
+ end
+
+ context 'when base report does not exist' do
+ let(:base_report) { nil }
+
+ it 'returns status success' do
+ expect(report_status).to eq(described_class::STATUS_NOT_FOUND)
+ end
+ end
end
describe '#errors_count' do
@@ -93,6 +109,14 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(resolved_count).to be_zero
end
end
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns zero' do
+ expect(resolved_count).to be_zero
+ end
+ end
end
describe '#total_count' do
@@ -140,6 +164,14 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(total_count).to eq(2)
end
end
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns zero' do
+ expect(total_count).to be_zero
+ end
+ end
end
describe '#existing_errors' do
@@ -177,6 +209,14 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(existing_errors).to be_empty
end
end
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns an empty array' do
+ expect(existing_errors).to be_empty
+ end
+ end
end
describe '#new_errors' do
@@ -213,6 +253,14 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(new_errors).to eq([degradation_1])
end
end
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns an empty array' do
+ expect(new_errors).to be_empty
+ end
+ end
end
describe '#resolved_errors' do
@@ -250,5 +298,13 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
expect(resolved_errors).to be_empty
end
end
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns an empty array' do
+ expect(resolved_errors).to be_empty
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/reports/reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/reports_comparer_spec.rb
index 1e5e4766583..7ed9270e9a0 100644
--- a/spec/lib/gitlab/ci/reports/reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/reports_comparer_spec.rb
@@ -45,6 +45,22 @@ RSpec.describe Gitlab::Ci::Reports::ReportsComparer do
expect(status).to eq('failed')
end
end
+
+ context 'when base_report is nil' do
+ let(:base_report) { nil }
+
+ it 'returns status not_found' do
+ expect(status).to eq('not_found')
+ end
+ end
+
+ context 'when head_report is nil' do
+ let(:head_report) { nil }
+
+ it 'returns status not_found' do
+ expect(status).to eq('not_found')
+ end
+ end
end
describe '#success?' do
@@ -94,4 +110,22 @@ RSpec.describe Gitlab::Ci::Reports::ReportsComparer do
expect { total_count }.to raise_error(NotImplementedError)
end
end
+
+ describe '#not_found?' do
+ subject(:not_found) { comparer.not_found? }
+
+ context 'when base report is nil' do
+ let(:base_report) { nil }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when base report exists' do
+ before do
+ allow(comparer).to receive(:success?).and_return(true)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
index a98d3db4e82..9acea852832 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
@@ -87,12 +87,44 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteSummary do
end
end
+ describe '#suite_error' do
+ subject(:suite_error) { test_suite_summary.suite_error }
+
+ context 'when there are no build report results with suite errors' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there are build report results with suite errors' do
+ let(:build_report_result_1) do
+ build(
+ :ci_build_report_result,
+ :with_junit_suite_error,
+ test_suite_name: 'karma',
+ test_suite_error: 'karma parsing error'
+ )
+ end
+
+ let(:build_report_result_2) do
+ build(
+ :ci_build_report_result,
+ :with_junit_suite_error,
+ test_suite_name: 'karma',
+ test_suite_error: 'another karma parsing error'
+ )
+ end
+
+ it 'includes the first suite error from the collection of build report results' do
+ expect(suite_error).to eq('karma parsing error')
+ end
+ end
+ end
+
describe '#to_h' do
subject { test_suite_summary.to_h }
context 'when test suite summary has several build report results' do
it 'returns the total as a hash' do
- expect(subject).to include(:time, :count, :success, :failed, :skipped, :error)
+ expect(subject).to include(:time, :count, :success, :failed, :skipped, :error, :suite_error)
end
end
end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index bcfb9f19792..543cfe874ca 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -69,6 +69,8 @@ RSpec.describe Gitlab::Ci::Status::Composite do
%i(manual) | false | 'skipped' | false
%i(skipped failed) | false | 'success' | true
%i(skipped failed) | true | 'skipped' | true
+ %i(success manual) | true | 'skipped' | false
+ %i(success manual) | false | 'success' | false
%i(created failed) | false | 'created' | true
%i(preparing manual) | false | 'preparing' | false
end
@@ -80,6 +82,25 @@ RSpec.describe Gitlab::Ci::Status::Composite do
it_behaves_like 'compares status and warnings'
end
+
+ context 'when FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
+ before do
+ stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
+ end
+
+ where(:build_statuses, :dag, :result, :has_warnings) do
+ %i(success manual) | true | 'pending' | false
+ %i(success manual) | false | 'success' | false
+ end
+
+ with_them do
+ let(:all_statuses) do
+ build_statuses.map { |status| @statuses_with_allow_failure[status] }
+ end
+
+ it_behaves_like 'compares status and warnings'
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/status/factory_spec.rb b/spec/lib/gitlab/ci/status/factory_spec.rb
index 641cb0183d3..94a6255f1e2 100644
--- a/spec/lib/gitlab/ci/status/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/factory_spec.rb
@@ -134,4 +134,14 @@ RSpec.describe Gitlab::Ci::Status::Factory do
it_behaves_like 'compound decorator factory'
end
end
+
+ context 'behaviour of FactoryBot traits that create associations' do
+ context 'creating a namespace with an associated aggregation_schedule record' do
+ it 'creates only one Namespace record and one Namespace::AggregationSchedule record' do
+ expect { create(:namespace, :with_aggregation_schedule) }
+ .to change { Namespace.count }.by(1)
+ .and change { Namespace::AggregationSchedule.count }.by(1)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index f9d6fe24e70..6dfcecb853a 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -3,252 +3,260 @@
require 'spec_helper'
RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
+ using RSpec::Parameterized::TableSyntax
+
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps') }
- describe 'the created pipeline' do
- let(:default_branch) { 'master' }
- let(:pipeline_branch) { default_branch }
- let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
- let(:user) { project.owner }
- let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push) }
- let(:build_names) { pipeline.builds.pluck(:name) }
-
- before do
- stub_ci_pipeline_yaml_file(template.content)
- allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
- allow(project).to receive(:default_branch).and_return(default_branch)
- end
+ where(:default_branch) do
+ %w[master main]
+ end
- shared_examples 'no Kubernetes deployment job' do
- it 'does not create any Kubernetes deployment-related builds' do
- expect(build_names).not_to include('production')
- expect(build_names).not_to include('production_manual')
- expect(build_names).not_to include('staging')
- expect(build_names).not_to include('canary')
- expect(build_names).not_to include('review')
- expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
- end
- end
+ with_them do
+ describe 'the created pipeline' do
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
+ let(:user) { project.owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
- it 'creates a build and a test job' do
- expect(build_names).to include('build', 'test')
- end
+ before do
+ stub_application_setting(default_branch_name: default_branch)
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ end
- context 'when the project is set for deployment to AWS' do
- let(:platform_value) { 'ECS' }
- let(:review_prod_build_names) { build_names.select {|n| n.include?('review') || n.include?('production')} }
+ shared_examples 'no Kubernetes deployment job' do
+ it 'does not create any Kubernetes deployment-related builds' do
+ expect(build_names).not_to include('production')
+ expect(build_names).not_to include('production_manual')
+ expect(build_names).not_to include('staging')
+ expect(build_names).not_to include('canary')
+ expect(build_names).not_to include('review')
+ expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
+ end
+ end
- before do
- create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_value)
+ it 'creates a build and a test job' do
+ expect(build_names).to include('build', 'test')
end
- shared_examples 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do |job_name|
- context 'when AUTO_DEVOPS_PLATFORM_TARGET is nil' do
- let(:platform_value) { nil }
+ context 'when the project is set for deployment to AWS' do
+ let(:platform_value) { 'ECS' }
+ let(:review_prod_build_names) { build_names.select {|n| n.include?('review') || n.include?('production')} }
- it 'does not trigger the job' do
- expect(build_names).not_to include(job_name)
- end
+ before do
+ create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_value)
end
- context 'when AUTO_DEVOPS_PLATFORM_TARGET is empty' do
- let(:platform_value) { '' }
+ shared_examples 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do |job_name|
+ context 'when AUTO_DEVOPS_PLATFORM_TARGET is nil' do
+ let(:platform_value) { nil }
- it 'does not trigger the job' do
- expect(build_names).not_to include(job_name)
+ it 'does not trigger the job' do
+ expect(build_names).not_to include(job_name)
+ end
end
- end
- end
- it_behaves_like 'no Kubernetes deployment job'
+ context 'when AUTO_DEVOPS_PLATFORM_TARGET is empty' do
+ let(:platform_value) { '' }
- it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do
- let(:job_name) { 'production_ecs' }
- end
+ it 'does not trigger the job' do
+ expect(build_names).not_to include(job_name)
+ end
+ end
+ end
- it 'creates an ECS deployment job for production only' do
- expect(review_prod_build_names).to contain_exactly('production_ecs')
- end
+ it_behaves_like 'no Kubernetes deployment job'
- context 'with FARGATE as a launch type' do
- let(:platform_value) { 'FARGATE' }
+ it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do
+ let(:job_name) { 'production_ecs' }
+ end
- it 'creates a FARGATE deployment job for production only' do
- expect(review_prod_build_names).to contain_exactly('production_fargate')
+ it 'creates an ECS deployment job for production only' do
+ expect(review_prod_build_names).to contain_exactly('production_ecs')
end
- end
- context 'and we are not on the default branch' do
- let(:platform_value) { 'ECS' }
- let(:pipeline_branch) { 'patch-1' }
+ context 'with FARGATE as a launch type' do
+ let(:platform_value) { 'FARGATE' }
- before do
- project.repository.create_branch(pipeline_branch)
+ it 'creates a FARGATE deployment job for production only' do
+ expect(review_prod_build_names).to contain_exactly('production_fargate')
+ end
end
- %w(review_ecs review_fargate).each do |job|
- it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do
- let(:job_name) { job }
+ context 'and we are not on the default branch' do
+ let(:platform_value) { 'ECS' }
+ let(:pipeline_branch) { 'patch-1' }
+
+ before do
+ project.repository.create_branch(pipeline_branch, default_branch)
end
- end
- it 'creates an ECS deployment job for review only' do
- expect(review_prod_build_names).to contain_exactly('review_ecs', 'stop_review_ecs')
- end
+ %w(review_ecs review_fargate).each do |job|
+ it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do
+ let(:job_name) { job }
+ end
+ end
- context 'with FARGATE as a launch type' do
- let(:platform_value) { 'FARGATE' }
+ it 'creates an ECS deployment job for review only' do
+ expect(review_prod_build_names).to contain_exactly('review_ecs', 'stop_review_ecs')
+ end
+
+ context 'with FARGATE as a launch type' do
+ let(:platform_value) { 'FARGATE' }
- it 'creates an FARGATE deployment job for review only' do
- expect(review_prod_build_names).to contain_exactly('review_fargate', 'stop_review_fargate')
+ it 'creates an FARGATE deployment job for review only' do
+ expect(review_prod_build_names).to contain_exactly('review_fargate', 'stop_review_fargate')
+ end
end
end
- end
- context 'and when the project has an active cluster' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
+ context 'and when the project has an active cluster' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
- before do
- allow(cluster).to receive(:active?).and_return(true)
- end
+ before do
+ allow(cluster).to receive(:active?).and_return(true)
+ end
- context 'on default branch' do
- it 'triggers the deployment to Kubernetes, not to ECS' do
- expect(build_names).not_to include('review')
- expect(build_names).to include('production')
- expect(build_names).not_to include('production_ecs')
- expect(build_names).not_to include('review_ecs')
+ context 'on default branch' do
+ it 'triggers the deployment to Kubernetes, not to ECS' do
+ expect(build_names).not_to include('review')
+ expect(build_names).to include('production')
+ expect(build_names).not_to include('production_ecs')
+ expect(build_names).not_to include('review_ecs')
+ end
end
end
- end
- context 'when the platform target is EC2' do
- let(:platform_value) { 'EC2' }
+ context 'when the platform target is EC2' do
+ let(:platform_value) { 'EC2' }
- it 'contains the build_artifact job, not the build job' do
- expect(build_names).to include('build_artifact')
- expect(build_names).not_to include('build')
+ it 'contains the build_artifact job, not the build job' do
+ expect(build_names).to include('build_artifact')
+ expect(build_names).not_to include('build')
+ end
end
end
- end
-
- context 'when the project has no active cluster' do
- it 'only creates a build and a test stage' do
- expect(pipeline.stages_names).to eq(%w(build test))
- end
- it_behaves_like 'no Kubernetes deployment job'
- end
+ context 'when the project has no active cluster' do
+ it 'only creates a build and a test stage' do
+ expect(pipeline.stages_names).to eq(%w(build test))
+ end
- context 'when the project has an active cluster' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
-
- describe 'deployment-related builds' do
- context 'on default branch' do
- it 'does not include rollout jobs besides production' do
- expect(build_names).to include('production')
- expect(build_names).not_to include('production_manual')
- expect(build_names).not_to include('staging')
- expect(build_names).not_to include('canary')
- expect(build_names).not_to include('review')
- expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
- end
+ it_behaves_like 'no Kubernetes deployment job'
+ end
- context 'when STAGING_ENABLED=1' do
- before do
- create(:ci_variable, project: project, key: 'STAGING_ENABLED', value: '1')
- end
+ context 'when the project has an active cluster' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
- it 'includes a staging job and a production_manual job' do
- expect(build_names).not_to include('production')
- expect(build_names).to include('production_manual')
- expect(build_names).to include('staging')
+ describe 'deployment-related builds' do
+ context 'on default branch' do
+ it 'does not include rollout jobs besides production' do
+ expect(build_names).to include('production')
+ expect(build_names).not_to include('production_manual')
+ expect(build_names).not_to include('staging')
expect(build_names).not_to include('canary')
expect(build_names).not_to include('review')
expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
end
+
+ context 'when STAGING_ENABLED=1' do
+ before do
+ create(:ci_variable, project: project, key: 'STAGING_ENABLED', value: '1')
+ end
+
+ it 'includes a staging job and a production_manual job' do
+ expect(build_names).not_to include('production')
+ expect(build_names).to include('production_manual')
+ expect(build_names).to include('staging')
+ expect(build_names).not_to include('canary')
+ expect(build_names).not_to include('review')
+ expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
+ end
+ end
+
+ context 'when CANARY_ENABLED=1' do
+ before do
+ create(:ci_variable, project: project, key: 'CANARY_ENABLED', value: '1')
+ end
+
+ it 'includes a canary job and a production_manual job' do
+ expect(build_names).not_to include('production')
+ expect(build_names).to include('production_manual')
+ expect(build_names).not_to include('staging')
+ expect(build_names).to include('canary')
+ expect(build_names).not_to include('review')
+ expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
+ end
+ end
end
- context 'when CANARY_ENABLED=1' do
+ context 'outside of default branch' do
+ let(:pipeline_branch) { 'patch-1' }
+
before do
- create(:ci_variable, project: project, key: 'CANARY_ENABLED', value: '1')
+ project.repository.create_branch(pipeline_branch, default_branch)
end
- it 'includes a canary job and a production_manual job' do
+ it 'does not include rollout jobs besides review' do
expect(build_names).not_to include('production')
- expect(build_names).to include('production_manual')
+ expect(build_names).not_to include('production_manual')
expect(build_names).not_to include('staging')
- expect(build_names).to include('canary')
- expect(build_names).not_to include('review')
+ expect(build_names).not_to include('canary')
+ expect(build_names).to include('review')
expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
end
end
end
-
- context 'outside of default branch' do
- let(:pipeline_branch) { 'patch-1' }
-
- before do
- project.repository.create_branch(pipeline_branch)
- end
-
- it 'does not include rollout jobs besides review' do
- expect(build_names).not_to include('production')
- expect(build_names).not_to include('production_manual')
- expect(build_names).not_to include('staging')
- expect(build_names).not_to include('canary')
- expect(build_names).to include('review')
- expect(build_names).not_to include(a_string_matching(/rollout \d+%/))
- end
- end
end
end
- end
- describe 'build-pack detection' do
- using RSpec::Parameterized::TableSyntax
-
- where(:case_name, :files, :variables, :include_build_names, :not_include_build_names) do
- 'No match' | { 'README.md' => '' } | {} | %w() | %w(build test)
- 'Buildpack' | { 'README.md' => '' } | { 'BUILDPACK_URL' => 'http://example.com' } | %w(build test) | %w()
- 'Explicit set' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '1' } | %w(build test) | %w()
- 'Explicit unset' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '0' } | %w() | %w(build test)
- 'DOCKERFILE_PATH' | { 'README.md' => '' } | { 'DOCKERFILE_PATH' => 'Docker.file' } | %w(build test) | %w()
- 'Dockerfile' | { 'Dockerfile' => '' } | {} | %w(build test) | %w()
- 'Clojure' | { 'project.clj' => '' } | {} | %w(build test) | %w()
- 'Go modules' | { 'go.mod' => '' } | {} | %w(build test) | %w()
- 'Go gb' | { 'src/gitlab.com/gopackage.go' => '' } | {} | %w(build test) | %w()
- 'Gradle' | { 'gradlew' => '' } | {} | %w(build test) | %w()
- 'Java' | { 'pom.xml' => '' } | {} | %w(build test) | %w()
- 'Multi-buildpack' | { '.buildpacks' => '' } | {} | %w(build test) | %w()
- 'NodeJS' | { 'package.json' => '' } | {} | %w(build test) | %w()
- 'PHP' | { 'composer.json' => '' } | {} | %w(build test) | %w()
- 'Play' | { 'conf/application.conf' => '' } | {} | %w(build test) | %w()
- 'Python' | { 'Pipfile' => '' } | {} | %w(build test) | %w()
- 'Ruby' | { 'Gemfile' => '' } | {} | %w(build test) | %w()
- 'Scala' | { 'build.sbt' => '' } | {} | %w(build test) | %w()
- 'Static' | { '.static' => '' } | {} | %w(build test) | %w()
- end
+ describe 'build-pack detection' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :files, :variables, :include_build_names, :not_include_build_names) do
+ 'No match' | { 'README.md' => '' } | {} | %w() | %w(build test)
+ 'Buildpack' | { 'README.md' => '' } | { 'BUILDPACK_URL' => 'http://example.com' } | %w(build test) | %w()
+ 'Explicit set' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '1' } | %w(build test) | %w()
+ 'Explicit unset' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '0' } | %w() | %w(build test)
+ 'DOCKERFILE_PATH' | { 'README.md' => '' } | { 'DOCKERFILE_PATH' => 'Docker.file' } | %w(build test) | %w()
+ 'Dockerfile' | { 'Dockerfile' => '' } | {} | %w(build test) | %w()
+ 'Clojure' | { 'project.clj' => '' } | {} | %w(build test) | %w()
+ 'Go modules' | { 'go.mod' => '' } | {} | %w(build test) | %w()
+ 'Go gb' | { 'src/gitlab.com/gopackage.go' => '' } | {} | %w(build test) | %w()
+ 'Gradle' | { 'gradlew' => '' } | {} | %w(build test) | %w()
+ 'Java' | { 'pom.xml' => '' } | {} | %w(build test) | %w()
+ 'Multi-buildpack' | { '.buildpacks' => '' } | {} | %w(build test) | %w()
+ 'NodeJS' | { 'package.json' => '' } | {} | %w(build test) | %w()
+ 'PHP' | { 'composer.json' => '' } | {} | %w(build test) | %w()
+ 'Play' | { 'conf/application.conf' => '' } | {} | %w(build test) | %w()
+ 'Python' | { 'Pipfile' => '' } | {} | %w(build test) | %w()
+ 'Ruby' | { 'Gemfile' => '' } | {} | %w(build test) | %w()
+ 'Scala' | { 'build.sbt' => '' } | {} | %w(build test) | %w()
+ 'Static' | { '.static' => '' } | {} | %w(build test) | %w()
+ end
- with_them do
- let(:project) { create(:project, :custom_repo, files: files) }
- let(:user) { project.owner }
- let(:service) { Ci::CreatePipelineService.new(project, user, ref: 'master' ) }
- let(:pipeline) { service.execute(:push) }
- let(:build_names) { pipeline.builds.pluck(:name) }
+ with_them do
+ let(:project) { create(:project, :custom_repo, files: files) }
+ let(:user) { project.owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: default_branch ) }
+ let(:pipeline) { service.execute(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
- before do
- stub_ci_pipeline_yaml_file(template.content)
- allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
- variables.each do |(key, value)|
- create(:ci_variable, project: project, key: key, value: value)
+ before do
+ stub_application_setting(default_branch_name: default_branch)
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ variables.each do |(key, value)|
+ create(:ci_variable, project: project, key: key, value: value)
+ end
end
- end
- it 'creates a pipeline with the expected jobs' do
- expect(build_names).to include(*include_build_names)
- expect(build_names).not_to include(*not_include_build_names)
+ it 'creates a pipeline with the expected jobs' do
+ expect(build_names).to include(*include_build_names)
+ expect(build_names).not_to include(*not_include_build_names)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 92bf2519588..597e4ca9b03 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do
- let_it_be(:project) { create_default(:project) }
+ let_it_be(:project) { create_default(:project).freeze }
let_it_be_with_reload(:build) { create(:ci_build) }
let(:trace) { described_class.new(build) }
diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
index 2e43f22830a..ca9dc95711d 100644
--- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
it 'saves given value' do
expect(subject[:key]).to eq variable_key
expect(subject[:value]).to eq expected_value
+ expect(subject.value).to eq expected_value
end
end
@@ -69,6 +70,47 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
end
end
+ describe '#depends_on' do
+ let(:item) { Gitlab::Ci::Variables::Collection::Item.new(**variable) }
+
+ subject { item.depends_on }
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "no variable references": {
+ variable: { key: 'VAR', value: 'something' },
+ expected_depends_on: nil
+ },
+ "simple variable reference": {
+ variable: { key: 'VAR', value: 'something_$VAR2' },
+ expected_depends_on: %w(VAR2)
+ },
+ "complex expansion": {
+ variable: { key: 'VAR', value: 'something_${VAR2}_$VAR3' },
+ expected_depends_on: %w(VAR2 VAR3)
+ },
+ "complex expansion in raw variable": {
+ variable: { key: 'VAR', value: 'something_${VAR2}_$VAR3', raw: true },
+ expected_depends_on: nil
+ },
+ "complex expansions for Windows": {
+ variable: { key: 'variable3', value: 'key%variable%%variable2%' },
+ expected_depends_on: %w(variable variable2)
+ }
+ }
+ end
+
+ with_them do
+ it 'contains referenced variable names' do
+ is_expected.to eq(expected_depends_on)
+ end
+ end
+ end
+ end
+
describe '.fabricate' do
it 'supports using a hash' do
resource = described_class.fabricate(variable)
@@ -118,6 +160,26 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
end
end
+ describe '#raw' do
+ it 'returns false when :raw is not specified' do
+ item = described_class.new(**variable)
+
+ expect(item.raw).to eq false
+ end
+
+ context 'when :raw is specified as true' do
+ let(:variable) do
+ { key: variable_key, value: variable_value, public: true, masked: false, raw: true }
+ end
+
+ it 'returns true' do
+ item = described_class.new(**variable)
+
+ expect(item.raw).to eq true
+ end
+ end
+ end
+
describe '#to_runner_variable' do
context 'when variable is not a file-related' do
it 'returns a runner-compatible hash representation' do
@@ -139,5 +201,47 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
.to eq(key: 'VAR', value: 'value', public: true, file: true, masked: false)
end
end
+
+ context 'when variable is raw' do
+ it 'does not export raw value when it is false' do
+ runner_variable = described_class
+ .new(key: 'VAR', value: 'value', raw: false)
+ .to_runner_variable
+
+ expect(runner_variable)
+ .to eq(key: 'VAR', value: 'value', public: true, masked: false)
+ end
+
+ it 'exports raw value when it is true' do
+ runner_variable = described_class
+ .new(key: 'VAR', value: 'value', raw: true)
+ .to_runner_variable
+
+ expect(runner_variable)
+ .to eq(key: 'VAR', value: 'value', public: true, raw: true, masked: false)
+ end
+ end
+
+ context 'when referencing a variable' do
+ it '#depends_on contains names of dependencies' do
+ runner_variable = described_class.new(key: 'CI_VAR', value: '${CI_VAR_2}-123-$CI_VAR_3')
+
+ expect(runner_variable.depends_on).to eq(%w(CI_VAR_2 CI_VAR_3))
+ end
+ end
+
+ context 'when assigned the raw attribute' do
+ it 'retains a true raw attribute' do
+ runner_variable = described_class.new(key: 'CI_VAR', value: '123', raw: true)
+
+ expect(runner_variable).to eq(key: 'CI_VAR', value: '123', public: true, masked: false, raw: true)
+ end
+
+ it 'does not retain a false raw attribute' do
+ runner_variable = described_class.new(key: 'CI_VAR', value: '123', raw: false)
+
+ expect(runner_variable).to eq(key: 'CI_VAR', value: '123', public: true, masked: false)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
new file mode 100644
index 00000000000..73cf0e19d00
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
@@ -0,0 +1,185 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Collection::Sort do
+ describe '#initialize with non-Collection value' do
+ context 'when FF :variable_inside_variable is disabled' do
+ subject { Gitlab::Ci::Variables::Collection::Sort.new([]) }
+
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError, /Collection object was expected/)
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ subject { Gitlab::Ci::Variables::Collection::Sort.new([]) }
+
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError, /Collection object was expected/)
+ end
+ end
+ end
+
+ describe '#errors' do
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ expected_errors: nil
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ expected_errors: nil
+ },
+ "cyclic dependency": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ expected_errors: 'circular variable reference detected: ["variable", "variable2", "variable3"]'
+ },
+ "array with raw variable": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2', raw: true }
+ ],
+ expected_errors: nil
+ },
+ "variable containing escaped variable reference": {
+ variables: [
+ { key: 'variable_a', value: 'value' },
+ { key: 'variable_b', value: '$$variable_a' },
+ { key: 'variable_c', value: '$variable_b' }
+ ],
+ expected_errors: nil
+ }
+ }
+ end
+
+ with_them do
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+
+ subject { Gitlab::Ci::Variables::Collection::Sort.new(collection) }
+
+ it 'errors matches expected errors' do
+ expect(subject.errors).to eq(expected_errors)
+ end
+
+ it 'valid? matches expected errors' do
+ expect(subject.valid?).to eq(expected_errors.nil?)
+ end
+
+ it 'does not raise' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ describe '#tsort' do
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ result: []
+ },
+ "simple expansions, no reordering needed": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ result: %w[variable variable2 variable3]
+ },
+ "complex expansion, reordering needed": {
+ variables: [
+ { key: 'variable2', value: 'key${variable}' },
+ { key: 'variable', value: 'value' }
+ ],
+ result: %w[variable variable2]
+ },
+ "unused variables": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable4', value: 'key$variable$variable3' },
+ { key: 'variable2', value: 'result2' },
+ { key: 'variable3', value: 'result3' }
+ ],
+ result: %w[variable variable3 variable4 variable2]
+ },
+ "missing variable": {
+ variables: [
+ { key: 'variable2', value: 'key$variable' }
+ ],
+ result: %w[variable2]
+ },
+ "complex expansions with missing variable": {
+ variables: [
+ { key: 'variable4', value: 'key${variable}${variable2}${variable3}' },
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'value3' }
+ ],
+ result: %w[variable variable3 variable4]
+ },
+ "raw variable does not get resolved": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2', raw: true }
+ ],
+ result: %w[variable3 variable2 variable]
+ },
+ "variable containing escaped variable reference": {
+ variables: [
+ { key: 'variable_c', value: '$variable_b' },
+ { key: 'variable_b', value: '$$variable_a' },
+ { key: 'variable_a', value: 'value' }
+ ],
+ result: %w[variable_a variable_b variable_c]
+ }
+ }
+ end
+
+ with_them do
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+
+ subject { Gitlab::Ci::Variables::Collection::Sort.new(collection).tsort }
+
+ it 'returns correctly sorted variables' do
+ expect(subject.pluck(:key)).to eq(result)
+ end
+ end
+ end
+
+ context 'cyclic dependency' do
+ let(:variables) do
+ [
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' },
+ { key: 'variable', value: '$variable2' }
+ ]
+ end
+
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+
+ subject { Gitlab::Ci::Variables::Collection::Sort.new(collection).tsort }
+
+ it 'raises TSort::Cyclic' do
+ expect { subject }.to raise_error(TSort::Cyclic)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb b/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb
deleted file mode 100644
index 954273fd41e..00000000000
--- a/spec/lib/gitlab/ci/variables/collection/sorted_spec.rb
+++ /dev/null
@@ -1,259 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Variables::Collection::Sorted do
- describe '#errors' do
- context 'when FF :variable_inside_variable is disabled' do
- let_it_be(:project_with_flag_disabled) { create(:project) }
- let_it_be(:project_with_flag_enabled) { create(:project) }
-
- before do
- stub_feature_flags(variable_inside_variable: [project_with_flag_enabled])
- end
-
- context 'table tests' do
- using RSpec::Parameterized::TableSyntax
-
- where do
- {
- "empty array": {
- variables: []
- },
- "simple expansions": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ]
- },
- "complex expansion": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'key${variable}' }
- ]
- },
- "complex expansions with missing variable for Windows": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable3', value: 'key%variable%%variable2%' }
- ]
- },
- "out-of-order variable reference": {
- variables: [
- { key: 'variable2', value: 'key${variable}' },
- { key: 'variable', value: 'value' }
- ]
- },
- "array with cyclic dependency": {
- variables: [
- { key: 'variable', value: '$variable2' },
- { key: 'variable2', value: '$variable3' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ]
- }
- }
- end
-
- with_them do
- subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables, project_with_flag_disabled) }
-
- it 'does not report error' do
- expect(subject.errors).to eq(nil)
- end
-
- it 'valid? reports true' do
- expect(subject.valid?).to eq(true)
- end
- end
- end
- end
-
- context 'when FF :variable_inside_variable is enabled' do
- let_it_be(:project_with_flag_disabled) { create(:project) }
- let_it_be(:project_with_flag_enabled) { create(:project) }
-
- before do
- stub_feature_flags(variable_inside_variable: [project_with_flag_enabled])
- end
-
- context 'table tests' do
- using RSpec::Parameterized::TableSyntax
-
- where do
- {
- "empty array": {
- variables: [],
- validation_result: nil
- },
- "simple expansions": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ],
- validation_result: nil
- },
- "cyclic dependency": {
- variables: [
- { key: 'variable', value: '$variable2' },
- { key: 'variable2', value: '$variable3' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ],
- validation_result: 'circular variable reference detected: ["variable", "variable2", "variable3"]'
- }
- }
- end
-
- with_them do
- subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables, project_with_flag_enabled) }
-
- it 'errors matches expected validation result' do
- expect(subject.errors).to eq(validation_result)
- end
-
- it 'valid? matches expected validation result' do
- expect(subject.valid?).to eq(validation_result.nil?)
- end
- end
- end
- end
- end
-
- describe '#sort' do
- context 'when FF :variable_inside_variable is disabled' do
- before do
- stub_feature_flags(variable_inside_variable: false)
- end
-
- context 'table tests' do
- using RSpec::Parameterized::TableSyntax
-
- where do
- {
- "empty array": {
- variables: []
- },
- "simple expansions": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ]
- },
- "complex expansion": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'key${variable}' }
- ]
- },
- "complex expansions with missing variable for Windows": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable3', value: 'key%variable%%variable2%' }
- ]
- },
- "out-of-order variable reference": {
- variables: [
- { key: 'variable2', value: 'key${variable}' },
- { key: 'variable', value: 'value' }
- ]
- },
- "array with cyclic dependency": {
- variables: [
- { key: 'variable', value: '$variable2' },
- { key: 'variable2', value: '$variable3' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ]
- }
- }
- end
-
- with_them do
- let_it_be(:project) { create(:project) }
- subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables, project) }
-
- it 'does not expand variables' do
- expect(subject.sort).to eq(variables)
- end
- end
- end
- end
-
- context 'when FF :variable_inside_variable is enabled' do
- before do
- stub_licensed_features(group_saml_group_sync: true)
- stub_feature_flags(saml_group_links: true)
- stub_feature_flags(variable_inside_variable: true)
- end
-
- context 'table tests' do
- using RSpec::Parameterized::TableSyntax
-
- where do
- {
- "empty array": {
- variables: [],
- result: []
- },
- "simple expansions, no reordering needed": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
- { key: 'variable3', value: 'key$variable$variable2' }
- ],
- result: %w[variable variable2 variable3]
- },
- "complex expansion, reordering needed": {
- variables: [
- { key: 'variable2', value: 'key${variable}' },
- { key: 'variable', value: 'value' }
- ],
- result: %w[variable variable2]
- },
- "unused variables": {
- variables: [
- { key: 'variable', value: 'value' },
- { key: 'variable4', value: 'key$variable$variable3' },
- { key: 'variable2', value: 'result2' },
- { key: 'variable3', value: 'result3' }
- ],
- result: %w[variable variable3 variable4 variable2]
- },
- "missing variable": {
- variables: [
- { key: 'variable2', value: 'key$variable' }
- ],
- result: %w[variable2]
- },
- "complex expansions with missing variable": {
- variables: [
- { key: 'variable4', value: 'key${variable}${variable2}${variable3}' },
- { key: 'variable', value: 'value' },
- { key: 'variable3', value: 'value3' }
- ],
- result: %w[variable variable3 variable4]
- },
- "cyclic dependency causes original array to be returned": {
- variables: [
- { key: 'variable2', value: '$variable3' },
- { key: 'variable3', value: 'key$variable$variable2' },
- { key: 'variable', value: '$variable2' }
- ],
- result: %w[variable2 variable3 variable]
- }
- }
- end
-
- with_them do
- let_it_be(:project) { create(:project) }
- subject { Gitlab::Ci::Variables::Collection::Sorted.new(variables, project) }
-
- it 'sort returns correctly sorted variables' do
- expect(subject.sort.map { |var| var[:key] }).to eq(result)
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index ac84313ad9f..7b77754190a 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
it 'can be initialized without an argument' do
- expect(subject).to be_none
+ is_expected.to be_none
end
end
@@ -21,13 +21,13 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
it 'appends a hash' do
subject.append(key: 'VARIABLE', value: 'something')
- expect(subject).to be_one
+ is_expected.to be_one
end
it 'appends a Ci::Variable' do
subject.append(build(:ci_variable))
- expect(subject).to be_one
+ is_expected.to be_one
end
it 'appends an internal resource' do
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
subject.append(collection.first)
- expect(subject).to be_one
+ is_expected.to be_one
end
it 'returns self' do
@@ -98,6 +98,50 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
end
+ describe '#[]' do
+ variable = { key: 'VAR', value: 'value', public: true, masked: false }
+
+ collection = described_class.new([variable])
+
+ it 'returns nil for a non-existent variable name' do
+ expect(collection['UNKNOWN_VAR']).to be_nil
+ end
+
+ it 'returns Item for an existent variable name' do
+ expect(collection['VAR']).to be_an_instance_of(Gitlab::Ci::Variables::Collection::Item)
+ expect(collection['VAR'].to_runner_variable).to eq(variable)
+ end
+ end
+
+ describe '#size' do
+ it 'returns zero for empty collection' do
+ collection = described_class.new([])
+
+ expect(collection.size).to eq(0)
+ end
+
+ it 'returns 2 for collection with 2 variables' do
+ collection = described_class.new(
+ [
+ { key: 'VAR1', value: 'value', public: true, masked: false },
+ { key: 'VAR2', value: 'value', public: true, masked: false }
+ ])
+
+ expect(collection.size).to eq(2)
+ end
+
+ it 'returns 3 for collection with 2 duplicate variables' do
+ collection = described_class.new(
+ [
+ { key: 'VAR1', value: 'value', public: true, masked: false },
+ { key: 'VAR2', value: 'value', public: true, masked: false },
+ { key: 'VAR1', value: 'value', public: true, masked: false }
+ ])
+
+ expect(collection.size).to eq(3)
+ end
+ end
+
describe '#to_runner_variables' do
it 'creates an array of hashes in a runner-compatible format' do
collection = described_class.new([{ key: 'TEST', value: '1' }])
@@ -121,4 +165,338 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
expect(collection.to_hash).not_to include(TEST1: 'test-1')
end
end
+
+ describe '#reject' do
+ let(:collection) do
+ described_class.new
+ .append(key: 'CI_JOB_NAME', value: 'test-1')
+ .append(key: 'CI_BUILD_ID', value: '1')
+ .append(key: 'TEST1', value: 'test-3')
+ end
+
+ subject { collection.reject { |var| var[:key] =~ /\ACI_(JOB|BUILD)/ } }
+
+ it 'returns a Collection instance' do
+ is_expected.to be_an_instance_of(described_class)
+ end
+
+ it 'returns correctly filtered Collection' do
+ comp = collection.to_runner_variables.reject { |var| var[:key] =~ /\ACI_(JOB|BUILD)/ }
+ expect(subject.to_runner_variables).to eq(comp)
+ end
+ end
+
+ describe '#expand_value' do
+ let(:collection) do
+ Gitlab::Ci::Variables::Collection.new
+ .append(key: 'CI_JOB_NAME', value: 'test-1')
+ .append(key: 'CI_BUILD_ID', value: '1')
+ .append(key: 'RAW_VAR', value: '$TEST1', raw: true)
+ .append(key: 'TEST1', value: 'test-3')
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty value": {
+ value: '',
+ result: '',
+ keep_undefined: false
+ },
+ "simple expansions": {
+ value: 'key$TEST1-$CI_BUILD_ID',
+ result: 'keytest-3-1',
+ keep_undefined: false
+ },
+ "complex expansion": {
+ value: 'key${TEST1}-${CI_JOB_NAME}',
+ result: 'keytest-3-test-1',
+ keep_undefined: false
+ },
+ "complex expansions with raw variable": {
+ value: 'key${RAW_VAR}-${CI_JOB_NAME}',
+ result: 'key$TEST1-test-1',
+ keep_undefined: false
+ },
+ "missing variable not keeping original": {
+ value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
+ result: 'key-test-1',
+ keep_undefined: false
+ },
+ "missing variable keeping original": {
+ value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
+ result: 'key${MISSING_VAR}-test-1',
+ keep_undefined: true
+ }
+ }
+ end
+
+ with_them do
+ subject { collection.expand_value(value, keep_undefined: keep_undefined) }
+
+ it 'matches expected expansion' do
+ is_expected.to eq(result)
+ end
+ end
+ end
+ end
+
+ describe '#sort_and_expand_all' do
+ context 'when FF :variable_inside_variable is disabled' do
+ let_it_be(:project_with_flag_disabled) { create(:project) }
+ let_it_be(:project_with_flag_enabled) { create(:project) }
+
+ before do
+ stub_feature_flags(variable_inside_variable: [project_with_flag_enabled])
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ keep_undefined: false
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ keep_undefined: false
+ },
+ "complex expansion": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'key${variable}' }
+ ],
+ keep_undefined: false
+ },
+ "out-of-order variable reference": {
+ variables: [
+ { key: 'variable2', value: 'key${variable}' },
+ { key: 'variable', value: 'value' }
+ ],
+ keep_undefined: false
+ },
+ "complex expansions with raw variable": {
+ variables: [
+ { key: 'variable3', value: 'key_${variable}_${variable2}' },
+ { key: 'variable', value: '$variable2', raw: true },
+ { key: 'variable2', value: 'value2' }
+ ],
+ keep_undefined: false
+ },
+ "array with cyclic dependency": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ keep_undefined: true
+ }
+ }
+ end
+
+ with_them do
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables, keep_undefined: keep_undefined) }
+
+ subject { collection.sort_and_expand_all(project_with_flag_disabled) }
+
+ it 'returns Collection' do
+ is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection)
+ end
+
+ it 'does not expand variables' do
+ var_hash = variables.pluck(:key, :value).to_h
+ expect(subject.to_hash).to eq(var_hash)
+ end
+ end
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ let_it_be(:project_with_flag_disabled) { create(:project) }
+ let_it_be(:project_with_flag_enabled) { create(:project) }
+
+ before do
+ stub_feature_flags(variable_inside_variable: [project_with_flag_enabled])
+ end
+
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "empty array": {
+ variables: [],
+ keep_undefined: false,
+ result: []
+ },
+ "simple expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key$variable$variable2' },
+ { key: 'variable4', value: 'key$variable$variable3' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'keyvalueresult' },
+ { key: 'variable4', value: 'keyvaluekeyvalueresult' }
+ ]
+ },
+ "complex expansion": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'key${variable}' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'keyvalue' }
+ ]
+ },
+ "unused variables": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result2' },
+ { key: 'variable3', value: 'result3' },
+ { key: 'variable4', value: 'key$variable$variable3' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result2' },
+ { key: 'variable3', value: 'result3' },
+ { key: 'variable4', value: 'keyvalueresult3' }
+ ]
+ },
+ "complex expansions": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key${variable}${variable2}' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'keyvalueresult' }
+ ]
+ },
+ "out-of-order expansion": {
+ variables: [
+ { key: 'variable3', value: 'key$variable2$variable' },
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable2', value: 'result' },
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'keyresultvalue' }
+ ]
+ },
+ "out-of-order complex expansion": {
+ variables: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'key${variable2}${variable}' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable2', value: 'result' },
+ { key: 'variable3', value: 'keyresultvalue' }
+ ]
+ },
+ "missing variable": {
+ variables: [
+ { key: 'variable2', value: 'key$variable' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable2', value: 'key' }
+ ]
+ },
+ "missing variable keeping original": {
+ variables: [
+ { key: 'variable2', value: 'key$variable' }
+ ],
+ keep_undefined: true,
+ result: [
+ { key: 'variable2', value: 'key$variable' }
+ ]
+ },
+ "complex expansions with missing variable keeping original": {
+ variables: [
+ { key: 'variable4', value: 'key${variable}${variable2}${variable3}' },
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'value3' }
+ ],
+ keep_undefined: true,
+ result: [
+ { key: 'variable', value: 'value' },
+ { key: 'variable3', value: 'value3' },
+ { key: 'variable4', value: 'keyvalue${variable2}value3' }
+ ]
+ },
+ "complex expansions with raw variable": {
+ variables: [
+ { key: 'variable3', value: 'key_${variable}_${variable2}' },
+ { key: 'variable', value: '$variable2', raw: true },
+ { key: 'variable2', value: 'value2' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: '$variable2', raw: true },
+ { key: 'variable2', value: 'value2' },
+ { key: 'variable3', value: 'key_$variable2_value2' }
+ ]
+ },
+ "cyclic dependency causes original array to be returned": {
+ variables: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable', value: '$variable2' },
+ { key: 'variable2', value: '$variable3' },
+ { key: 'variable3', value: 'key$variable$variable2' }
+ ]
+ }
+ }
+ end
+
+ with_them do
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+
+ subject { collection.sort_and_expand_all(project_with_flag_enabled, keep_undefined: keep_undefined) }
+
+ it 'returns Collection' do
+ is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection)
+ end
+
+ it 'expands variables' do
+ var_hash = result.to_h { |env| [env.fetch(:key), env.fetch(:value)] }
+ .with_indifferent_access
+ expect(subject.to_hash).to eq(var_hash)
+ end
+
+ it 'preserves raw attribute' do
+ expect(subject.pluck(:key, :raw).to_h).to eq(collection.pluck(:key, :raw).to_h)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 9498453852a..5462a587d16 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1368,6 +1368,155 @@ module Gitlab
end
end
+ context 'with multiple_cache_per_job FF disabled' do
+ before do
+ stub_feature_flags(multiple_cache_per_job: false)
+ end
+ describe 'cache' do
+ context 'when cache definition has unknown keys' do
+ let(:config) do
+ YAML.dump(
+ { cache: { untracked: true, invalid: 'key' },
+ rspec: { script: 'rspec' } })
+ end
+
+ it_behaves_like 'returns errors', 'cache config contains unknown keys: invalid'
+ end
+
+ it "returns cache when defined globally" do
+ config = YAML.dump({
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
+ rspec: {
+ script: "rspec"
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes("test").size).to eq(1)
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+
+ it "returns cache when defined in default context" do
+ config = YAML.dump(
+ {
+ default: {
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: { files: ['file'] } }
+ },
+ rspec: {
+ script: "rspec"
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes("test").size).to eq(1)
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: { files: ['file'] },
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+
+ it 'returns cache key when defined in a job' do
+ config = YAML.dump({
+ rspec: {
+ cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
+ script: 'rspec'
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+
+ it 'returns cache files' do
+ config = YAML.dump(
+ rspec: {
+ cache: {
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'] }
+ },
+ script: 'rspec'
+ }
+ )
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'] },
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+
+ it 'returns cache files with prefix' do
+ config = YAML.dump(
+ rspec: {
+ cache: {
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'], prefix: 'prefix' }
+ },
+ script: 'rspec'
+ }
+ )
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes('test').size).to eq(1)
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'], prefix: 'prefix' },
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+
+ it "overwrite cache when defined for a job and globally" do
+ config = YAML.dump({
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
+ rspec: {
+ script: "rspec",
+ cache: { paths: ["test/"], untracked: false, key: 'local' }
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+
+ expect(config_processor.stage_builds_attributes("test").size).to eq(1)
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ paths: ["test/"],
+ untracked: false,
+ key: 'local',
+ policy: 'pull-push',
+ when: 'on_success'
+ )
+ end
+ end
+ end
+
describe 'cache' do
context 'when cache definition has unknown keys' do
let(:config) do
@@ -1381,22 +1530,22 @@ module Gitlab
it "returns cache when defined globally" do
config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- rspec: {
- script: "rspec"
- }
- })
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
+ rspec: {
+ script: "rspec"
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
policy: 'pull-push',
when: 'on_success'
- )
+ ])
end
it "returns cache when defined in default context" do
@@ -1413,32 +1562,46 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
paths: ["logs/", "binaries/"],
untracked: true,
key: { files: ['file'] },
policy: 'pull-push',
when: 'on_success'
- )
+ ])
end
- it 'returns cache key when defined in a job' do
+ it 'returns cache key/s when defined in a job' do
config = YAML.dump({
- rspec: {
- cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
- script: 'rspec'
- }
- })
+ rspec: {
+ cache: [
+ { paths: ['binaries/'], untracked: true, key: 'keya' },
+ { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' }
+ ],
+ script: 'rspec'
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
+ [
+ {
+ paths: ['binaries/'],
+ untracked: true,
+ key: 'keya',
+ policy: 'pull-push',
+ when: 'on_success'
+ },
+ {
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push',
+ when: 'on_success'
+ }
+ ]
)
end
@@ -1446,10 +1609,10 @@ module Gitlab
config = YAML.dump(
rspec: {
cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] }
- },
+ paths: ['binaries/'],
+ untracked: true,
+ key: { files: ['file'] }
+ },
script: 'rspec'
}
)
@@ -1457,13 +1620,13 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq([
+ paths: ['binaries/'],
untracked: true,
key: { files: ['file'] },
policy: 'pull-push',
when: 'on_success'
- )
+ ])
end
it 'returns cache files with prefix' do
@@ -1481,34 +1644,34 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq([
paths: ['logs/', 'binaries/'],
untracked: true,
key: { files: ['file'], prefix: 'prefix' },
policy: 'pull-push',
when: 'on_success'
- )
+ ])
end
it "overwrite cache when defined for a job and globally" do
config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
+ rspec: {
+ script: "rspec",
+ cache: { paths: ["test/"], untracked: false, key: 'local' }
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
+ expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
paths: ["test/"],
untracked: false,
key: 'local',
policy: 'pull-push',
when: 'on_success'
- )
+ ])
end
end
@@ -1926,8 +2089,8 @@ module Gitlab
only: { refs: %w[branches tags] },
options: { script: ["test"] },
needs_attributes: [
- { name: "build1", artifacts: true },
- { name: "build2", artifacts: true }
+ { name: "build1", artifacts: true, optional: false },
+ { name: "build2", artifacts: true, optional: false }
],
when: "on_success",
allow_failure: false,
@@ -1941,7 +2104,7 @@ module Gitlab
let(:needs) do
[
{ job: 'parallel', artifacts: false },
- { job: 'build1', artifacts: true },
+ { job: 'build1', artifacts: true, optional: true },
'build2'
]
end
@@ -1968,10 +2131,10 @@ module Gitlab
only: { refs: %w[branches tags] },
options: { script: ["test"] },
needs_attributes: [
- { name: "parallel 1/2", artifacts: false },
- { name: "parallel 2/2", artifacts: false },
- { name: "build1", artifacts: true },
- { name: "build2", artifacts: true }
+ { name: "parallel 1/2", artifacts: false, optional: false },
+ { name: "parallel 2/2", artifacts: false, optional: false },
+ { name: "build1", artifacts: true, optional: true },
+ { name: "build2", artifacts: true, optional: false }
],
when: "on_success",
allow_failure: false,
@@ -1993,8 +2156,8 @@ module Gitlab
only: { refs: %w[branches tags] },
options: { script: ["test"] },
needs_attributes: [
- { name: "parallel 1/2", artifacts: true },
- { name: "parallel 2/2", artifacts: true }
+ { name: "parallel 1/2", artifacts: true, optional: false },
+ { name: "parallel 2/2", artifacts: true, optional: false }
],
when: "on_success",
allow_failure: false,
@@ -2022,10 +2185,10 @@ module Gitlab
only: { refs: %w[branches tags] },
options: { script: ["test"] },
needs_attributes: [
- { name: "build1", artifacts: true },
- { name: "build2", artifacts: true },
- { name: "parallel 1/2", artifacts: true },
- { name: "parallel 2/2", artifacts: true }
+ { name: "build1", artifacts: true, optional: false },
+ { name: "build2", artifacts: true, optional: false },
+ { name: "parallel 1/2", artifacts: true, optional: false },
+ { name: "parallel 2/2", artifacts: true, optional: false }
],
when: "on_success",
allow_failure: false,
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 76578340f7b..2cdf95ea101 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -230,34 +230,13 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
context 'when `from` and `to` are within a day' do
- context 'when query_deploymenys_via_finished_at_in_vsa feature flag is off' do
- before do
- stub_feature_flags(query_deploymenys_via_finished_at_in_vsa: false)
- end
-
- it 'returns the number of deployments made on that day' do
- freeze_time do
- create(:deployment, :success, project: project)
- options[:from] = options[:to] = Time.zone.now
-
- expect(subject).to eq('1')
- end
- end
- end
-
- context 'when query_deploymenys_via_finished_at_in_vsa feature flag is off' do
- before do
- stub_feature_flags(query_deploymenys_via_finished_at_in_vsa: true)
- end
-
- it 'returns the number of deployments made on that day' do
- freeze_time do
- create(:deployment, :success, project: project, finished_at: Time.zone.now)
- options[:from] = Time.zone.now.at_beginning_of_day
- options[:to] = Time.zone.now.at_end_of_day
+ it 'returns the number of deployments made on that day' do
+ freeze_time do
+ create(:deployment, :success, project: project, finished_at: Time.zone.now)
+ options[:from] = Time.zone.now.at_beginning_of_day
+ options[:to] = Time.zone.now.at_end_of_day
- expect(subject).to eq('1')
- end
+ expect(subject).to eq('1')
end
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 4242469b3db..ab1728414bb 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:runner][:id]).to eq(build.runner.id) }
it { expect(data[:runner][:tags]).to match_array(tag_names) }
it { expect(data[:runner][:description]).to eq(build.runner.description) }
+ it { expect(data[:environment]).to be_nil }
context 'commit author_url' do
context 'when no commit present' do
@@ -63,6 +64,13 @@ RSpec.describe Gitlab::DataBuilder::Build do
expect(data[:commit][:author_url]).to eq(Gitlab::Routing.url_helpers.user_url(username: build.commit.author.username))
end
end
+
+ context 'with environment' do
+ let(:build) { create(:ci_build, :teardown_environment) }
+
+ it { expect(data[:environment][:name]).to eq(build.expanded_environment_name) }
+ it { expect(data[:environment][:action]).to eq(build.environment_action) }
+ end
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index fd7cadeb89e..cf04f560ceb 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(build_data[:id]).to eq(build.id)
expect(build_data[:status]).to eq(build.status)
expect(build_data[:allow_failure]).to eq(build.allow_failure)
+ expect(build_data[:environment]).to be_nil
expect(runner_data).to eq(nil)
expect(project_data).to eq(project.hook_attrs(backward: false))
expect(data[:merge_request]).to be_nil
@@ -115,5 +116,12 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(build_data[:id]).to eq(build.id)
end
end
+
+ context 'build with environment' do
+ let!(:build) { create(:ci_build, :teardown_environment, pipeline: pipeline) }
+
+ it { expect(build_data[:environment][:name]).to eq(build.expanded_environment_name) }
+ it { expect(build_data[:environment][:action]).to eq(build.environment_action) }
+ end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
new file mode 100644
index 00000000000..1020aafcf08
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model do
+ it_behaves_like 'having unique enum values'
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
+ end
+
+ describe 'delegated batched_migration attributes' do
+ let(:batched_job) { build(:batched_background_migration_job) }
+ let(:batched_migration) { batched_job.batched_migration }
+
+ describe '#migration_aborted?' do
+ before do
+ batched_migration.status = :aborted
+ end
+
+ it 'returns the migration aborted?' do
+ expect(batched_job.migration_aborted?).to eq(batched_migration.aborted?)
+ end
+ end
+
+ describe '#migration_job_class' do
+ it 'returns the migration job_class' do
+ expect(batched_job.migration_job_class).to eq(batched_migration.job_class)
+ end
+ end
+
+ describe '#migration_table_name' do
+ it 'returns the migration table_name' do
+ expect(batched_job.migration_table_name).to eq(batched_migration.table_name)
+ end
+ end
+
+ describe '#migration_column_name' do
+ it 'returns the migration column_name' do
+ expect(batched_job.migration_column_name).to eq(batched_migration.column_name)
+ end
+ end
+
+ describe '#migration_job_arguments' do
+ it 'returns the migration job_arguments' do
+ expect(batched_job.migration_job_arguments).to eq(batched_migration.job_arguments)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
new file mode 100644
index 00000000000..f4a939e7c1f
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model do
+ it_behaves_like 'having unique enum values'
+
+ describe 'associations' do
+ it { is_expected.to have_many(:batched_jobs).with_foreign_key(:batched_background_migration_id) }
+
+ describe '#last_job' do
+ let!(:batched_migration) { create(:batched_background_migration) }
+ let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration) }
+ let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration) }
+
+ it 'returns the most recent (in order of id) batched job' do
+ expect(batched_migration.last_job).to eq(batched_job2)
+ end
+ end
+ end
+
+ describe '.queue_order' do
+ let!(:migration1) { create(:batched_background_migration) }
+ let!(:migration2) { create(:batched_background_migration) }
+ let!(:migration3) { create(:batched_background_migration) }
+
+ it 'returns batched migrations ordered by their id' do
+ expect(described_class.queue_order.all).to eq([migration1, migration2, migration3])
+ end
+ end
+
+ describe '#interval_elapsed?' do
+ context 'when the migration has no last_job' do
+ let(:batched_migration) { build(:batched_background_migration) }
+
+ it 'returns true' do
+ expect(batched_migration.interval_elapsed?).to eq(true)
+ end
+ end
+
+ context 'when the migration has a last_job' do
+ let(:interval) { 2.minutes }
+ let(:batched_migration) { create(:batched_background_migration, interval: interval) }
+
+ context 'when the last_job is less than an interval old' do
+ it 'returns false' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 1.minute)
+
+ expect(batched_migration.interval_elapsed?).to eq(false)
+ end
+ end
+ end
+
+ context 'when the last_job is exactly an interval old' do
+ it 'returns true' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 2.minutes)
+
+ expect(batched_migration.interval_elapsed?).to eq(true)
+ end
+ end
+ end
+
+ context 'when the last_job is more than an interval old' do
+ it 'returns true' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 3.minutes)
+
+ expect(batched_migration.interval_elapsed?).to eq(true)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#create_batched_job!' do
+ let(:batched_migration) { create(:batched_background_migration) }
+
+ it 'creates a batched_job with the correct batch configuration' do
+ batched_job = batched_migration.create_batched_job!(1, 5)
+
+ expect(batched_job).to have_attributes(
+ min_value: 1,
+ max_value: 5,
+ batch_size: batched_migration.batch_size,
+ sub_batch_size: batched_migration.sub_batch_size)
+ end
+ end
+
+ describe '#next_min_value' do
+ let!(:batched_migration) { create(:batched_background_migration) }
+
+ context 'when a previous job exists' do
+ let!(:batched_job) { create(:batched_background_migration_job, batched_migration: batched_migration) }
+
+ it 'returns the next value after the previous maximum' do
+ expect(batched_migration.next_min_value).to eq(batched_job.max_value + 1)
+ end
+ end
+
+ context 'when a previous job does not exist' do
+ it 'returns the migration minimum value' do
+ expect(batched_migration.next_min_value).to eq(batched_migration.min_value)
+ end
+ end
+ end
+
+ describe '#job_class' do
+ let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
+ let(:batched_migration) { build(:batched_background_migration) }
+
+ it 'returns the class of the job for the migration' do
+ expect(batched_migration.job_class).to eq(job_class)
+ end
+ end
+
+ describe '#batch_class' do
+ let(:batch_class) { Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy}
+ let(:batched_migration) { build(:batched_background_migration) }
+
+ it 'returns the class of the batch strategy for the migration' do
+ expect(batched_migration.batch_class).to eq(batch_class)
+ end
+ end
+
+ shared_examples_for 'an attr_writer that demodulizes assigned class names' do |attribute_name|
+ let(:batched_migration) { build(:batched_background_migration) }
+
+ context 'when a module name exists' do
+ it 'removes the module name' do
+ batched_migration.public_send(:"#{attribute_name}=", '::Foo::Bar')
+
+ expect(batched_migration[attribute_name]).to eq('Bar')
+ end
+ end
+
+ context 'when a module name does not exist' do
+ it 'does not change the given class name' do
+ batched_migration.public_send(:"#{attribute_name}=", 'Bar')
+
+ expect(batched_migration[attribute_name]).to eq('Bar')
+ end
+ end
+ end
+
+ describe '#job_class_name=' do
+ it_behaves_like 'an attr_writer that demodulizes assigned class names', :job_class_name
+ end
+
+ describe '#batch_class_name=' do
+ it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
new file mode 100644
index 00000000000..17cceb35ff7
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '#perform' do
+ let(:migration_wrapper) { described_class.new }
+ let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
+
+ let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
+
+ let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
+
+ it 'runs the migration job' do
+ expect_next_instance_of(job_class) do |job_instance|
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ end
+
+ migration_wrapper.perform(job_record)
+ end
+
+ it 'updates the the tracking record in the database' do
+ expect(job_record).to receive(:update!).with(hash_including(attempts: 1, status: :running)).and_call_original
+
+ freeze_time do
+ migration_wrapper.perform(job_record)
+
+ reloaded_job_record = job_record.reload
+
+ expect(reloaded_job_record).not_to be_pending
+ expect(reloaded_job_record.attempts).to eq(1)
+ expect(reloaded_job_record.started_at).to eq(Time.current)
+ end
+ end
+
+ context 'when the migration job does not raise an error' do
+ it 'marks the tracking record as succeeded' do
+ expect_next_instance_of(job_class) do |job_instance|
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ end
+
+ freeze_time do
+ migration_wrapper.perform(job_record)
+
+ reloaded_job_record = job_record.reload
+
+ expect(reloaded_job_record).to be_succeeded
+ expect(reloaded_job_record.finished_at).to eq(Time.current)
+ end
+ end
+ end
+
+ context 'when the migration job raises an error' do
+ it 'marks the tracking record as failed before raising the error' do
+ expect_next_instance_of(job_class) do |job_instance|
+ expect(job_instance).to receive(:perform)
+ .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ .and_raise(RuntimeError, 'Something broke!')
+ end
+
+ freeze_time do
+ expect { migration_wrapper.perform(job_record) }.to raise_error(RuntimeError, 'Something broke!')
+
+ reloaded_job_record = job_record.reload
+
+ expect(reloaded_job_record).to be_failed
+ expect(reloaded_job_record.finished_at).to eq(Time.current)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/scheduler_spec.rb b/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
new file mode 100644
index 00000000000..ba745acdf8a
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
@@ -0,0 +1,182 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::Scheduler, '#perform' do
+ let(:scheduler) { described_class.new }
+
+ shared_examples_for 'it has no jobs to run' do
+ it 'does not create and run a migration job' do
+ test_wrapper = double('test wrapper')
+
+ expect(test_wrapper).not_to receive(:perform)
+
+ expect do
+ scheduler.perform(migration_wrapper: test_wrapper)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
+ end
+ end
+
+ context 'when there are no active migrations' do
+ let!(:migration) { create(:batched_background_migration, :finished) }
+
+ it_behaves_like 'it has no jobs to run'
+ end
+
+ shared_examples_for 'it has completed the migration' do
+ it 'marks the migration as finished' do
+ relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: first_migration.id)
+
+ expect { scheduler.perform }.to change { relation.count }.by(1)
+ end
+ end
+
+ context 'when there are active migrations' do
+ let!(:first_migration) { create(:batched_background_migration, :active, batch_size: 2) }
+ let!(:last_migration) { create(:batched_background_migration, :active) }
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: first_migration.id)
+ end
+
+ context 'when the migration interval has not elapsed' do
+ before do
+ expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
+ expect(migration).to receive(:interval_elapsed?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'it has no jobs to run'
+ end
+
+ context 'when the interval has elapsed' do
+ before do
+ expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
+ expect(migration).to receive(:interval_elapsed?).and_return(true)
+ end
+ end
+
+ context 'when the first migration has no previous jobs' do
+ context 'when the migration has batches to process' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ it 'runs the job for the first batch' do
+ first_migration.update!(min_value: event1.id, max_value: event3.id)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
+ expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
+ expect(job_record).to eq(job_relation.first)
+ end
+ end
+
+ expect { scheduler.perform }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: first_migration.batch_size,
+ sub_batch_size: first_migration.sub_batch_size)
+ end
+ end
+
+ context 'when the migration has no batches to process' do
+ it_behaves_like 'it has no jobs to run'
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the first migration has previous jobs' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ let!(:previous_job) do
+ create(:batched_background_migration_job,
+ batched_migration: first_migration,
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ context 'when the migration is ready to process another job' do
+ it 'runs the migration job for the next batch' do
+ first_migration.update!(min_value: event1.id, max_value: event3.id)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
+ expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
+ expect(job_record).to eq(job_relation.last)
+ end
+ end
+
+ expect { scheduler.perform }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.last).to have_attributes(
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: first_migration.batch_size,
+ sub_batch_size: first_migration.sub_batch_size)
+ end
+ end
+
+ context 'when the migration has no batches remaining' do
+ let!(:final_job) do
+ create(:batched_background_migration_job,
+ batched_migration: first_migration,
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ it_behaves_like 'it has no jobs to run'
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the bounds of the next batch exceed the migration maximum value' do
+ let!(:events) { create_list(:event, 3) }
+ let(:event1) { events[0] }
+ let(:event2) { events[1] }
+
+ context 'when the batch maximum exceeds the migration maximum' do
+ it 'clamps the batch maximum to the migration maximum' do
+ first_migration.update!(batch_size: 5, min_value: event1.id, max_value: event2.id)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
+ expect(wrapper).to receive(:perform)
+ end
+
+ expect { scheduler.perform }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: first_migration.batch_size,
+ sub_batch_size: first_migration.sub_batch_size)
+ end
+ end
+
+ context 'when the batch minimum exceeds the migration maximum' do
+ let!(:previous_job) do
+ create(:batched_background_migration_job,
+ batched_migration: first_migration,
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: 5,
+ sub_batch_size: 1)
+ end
+
+ before do
+ first_migration.update!(batch_size: 5, min_value: 1, max_value: event2.id)
+ end
+
+ it_behaves_like 'it has no jobs to run'
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
index f2a7d6e69d8..dbafada26ca 100644
--- a/spec/lib/gitlab/database/bulk_update_spec.rb
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe Gitlab::Database::BulkUpdate do
i_a, i_b = create_list(:issue, 2)
{
- i_a => { title: 'Issue a' },
- i_b => { title: 'Issue b' }
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
}
end
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::Database::BulkUpdate do
it 'is possible to update all objects in a single query' do
users = create_list(:user, 3)
- mapping = users.zip(%w(foo bar baz)).to_h do |u, name|
+ mapping = users.zip(%w[foo bar baz]).to_h do |u, name|
[u, { username: name, admin: true }]
end
@@ -61,13 +61,13 @@ RSpec.describe Gitlab::Database::BulkUpdate do
# We have optimistically updated the values
expect(users).to all(be_admin)
- expect(users.map(&:username)).to eq(%w(foo bar baz))
+ expect(users.map(&:username)).to eq(%w[foo bar baz])
users.each(&:reset)
# The values are correct on reset
expect(users).to all(be_admin)
- expect(users.map(&:username)).to eq(%w(foo bar baz))
+ expect(users.map(&:username)).to eq(%w[foo bar baz])
end
it 'is possible to update heterogeneous sets' do
@@ -79,8 +79,8 @@ RSpec.describe Gitlab::Database::BulkUpdate do
mapping = {
mr_a => { title: 'MR a' },
- i_a => { title: 'Issue a' },
- i_b => { title: 'Issue b' }
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
}
expect do
@@ -99,8 +99,8 @@ RSpec.describe Gitlab::Database::BulkUpdate do
i_a, i_b = create_list(:issue, 2)
mapping = {
- i_a => { title: 'Issue a' },
- i_b => { title: 'Issue b' }
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
}
described_class.execute(%i[title], mapping)
@@ -113,23 +113,19 @@ RSpec.describe Gitlab::Database::BulkUpdate do
include_examples 'basic functionality'
context 'when prepared statements are configured differently to the normal test environment' do
- # rubocop: disable RSpec/LeakyConstantDeclaration
- # This cop is disabled because you cannot call establish_connection on
- # an anonymous class.
- class ActiveRecordBasePreparedStatementsInverted < ActiveRecord::Base
- def self.abstract_class?
- true # So it gets its own connection
+ before do
+ klass = Class.new(ActiveRecord::Base) do
+ def self.abstract_class?
+ true # So it gets its own connection
+ end
end
- end
- # rubocop: enable RSpec/LeakyConstantDeclaration
- before_all do
+ stub_const('ActiveRecordBasePreparedStatementsInverted', klass)
+
c = ActiveRecord::Base.connection.instance_variable_get(:@config)
inverted = c.merge(prepared_statements: !ActiveRecord::Base.connection.prepared_statements)
ActiveRecordBasePreparedStatementsInverted.establish_connection(inverted)
- end
- before do
allow(ActiveRecord::Base).to receive(:connection_specification_name)
.and_return(ActiveRecordBasePreparedStatementsInverted.connection_specification_name)
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 6de7fc3a50e..9178707a3d0 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -180,6 +180,32 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ context 'when with_lock_retries re-runs the block' do
+ it 'only creates constraint for unique definitions' do
+ expected_sql = <<~SQL
+ ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255)
+ SQL
+
+ expect(model).to receive(:create_table).twice.and_call_original
+
+ expect(model).to receive(:execute).with(expected_sql).and_raise(ActiveRecord::LockWaitTimeout)
+ expect(model).to receive(:execute).with(expected_sql).and_call_original
+
+ model.create_table_with_constraints table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name
+
+ t.text_limit :name, 255
+ end
+
+ expect_table_columns_to_match(column_attributes, table_name)
+
+ expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255')
+ end
+ end
+
context 'when constraints are given invalid names' do
let(:expected_max_length) { described_class::MAX_IDENTIFIER_NAME_LENGTH }
let(:expected_error_message) { "The maximum allowed constraint name is #{expected_max_length} characters" }
@@ -1720,7 +1746,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.with(
2.minutes,
'CopyColumnUsingBackgroundMigrationJob',
- [event.id, event.id, :events, :id, :id, 'id_convert_to_bigint', 100]
+ [event.id, event.id, :events, :id, 100, :id, 'id_convert_to_bigint']
)
expect(Gitlab::BackgroundMigration)
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index 3e8563376ce..e25e4af2e86 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
context 'with enough rows to bulk queue jobs more than once' do
before do
- stub_const('Gitlab::Database::Migrations::BackgroundMigrationHelpers::BACKGROUND_MIGRATION_JOB_BUFFER_SIZE', 1)
+ stub_const('Gitlab::Database::Migrations::BackgroundMigrationHelpers::JOB_BUFFER_SIZE', 1)
end
it 'queues jobs correctly' do
@@ -262,6 +262,120 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
end
+ describe '#queue_batched_background_migration' do
+ it 'creates the database record for the migration' do
+ expect do
+ model.queue_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ job_interval: 5.minutes,
+ batch_min_value: 5,
+ batch_max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ sub_batch_size: 10)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 5,
+ max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ sub_batch_size: 10,
+ job_arguments: %w[],
+ status: 'active')
+ end
+
+ context 'when the job interval is lower than the minimum' do
+ let(:minimum_delay) { described_class::BATCH_MIN_DELAY }
+
+ it 'sets the job interval to the minimum value' do
+ expect do
+ model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.interval).to eq(minimum_delay)
+ end
+ end
+
+ context 'when additional arguments are passed to the method' do
+ it 'saves the arguments on the database record' do
+ expect do
+ model.queue_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ 'my',
+ 'arguments',
+ job_interval: 5.minutes,
+ batch_max_value: 1000)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 1,
+ max_value: 1000,
+ job_arguments: %w[my arguments])
+ end
+ end
+
+ context 'when the max_value is not given' do
+ context 'when records exist in the database' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ it 'creates the record with the current max value' do
+ expect do
+ model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.max_value).to eq(event3.id)
+ end
+
+ it 'creates the record with an active status' do
+ expect do
+ model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active
+ end
+ end
+
+ context 'when the database is empty' do
+ it 'sets the max value to the min value' do
+ expect do
+ model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.max_value).to eq(created_migration.min_value)
+ end
+
+ it 'creates the record with a finished status' do
+ expect do
+ model.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
+ end
+ end
+ end
+ end
+
describe '#migrate_async' do
it 'calls BackgroundMigrationWorker.perform_async' do
expect(BackgroundMigrationWorker).to receive(:perform_async).with("Class", "hello", "world")
diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
new file mode 100644
index 00000000000..a3b03050b33
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
+ subject { described_class.new }
+
+ let(:connection) { ActiveRecord::Base.connection }
+
+ def mock_pgss(enabled: true)
+ if enabled
+ allow(subject).to receive(:function_exists?).with(:pg_stat_statements_reset).and_return(true)
+ allow(connection).to receive(:view_exists?).with(:pg_stat_statements).and_return(true)
+ else
+ allow(subject).to receive(:function_exists?).with(:pg_stat_statements_reset).and_return(false)
+ allow(connection).to receive(:view_exists?).with(:pg_stat_statements).and_return(false)
+ end
+ end
+
+ describe '#before' do
+ context 'with pgss available' do
+ it 'resets pg_stat_statements' do
+ mock_pgss(enabled: true)
+ expect(connection).to receive(:execute).with('select pg_stat_statements_reset()').once
+
+ subject.before
+ end
+ end
+
+ context 'without pgss available' do
+ it 'executes nothing' do
+ mock_pgss(enabled: false)
+ expect(connection).not_to receive(:execute)
+
+ subject.before
+ end
+ end
+ end
+
+ describe '#record' do
+ let(:observation) { Gitlab::Database::Migrations::Observation.new }
+ let(:result) { double }
+ let(:pgss_query) do
+ <<~SQL
+ SELECT query, calls, total_time, max_time, mean_time, rows
+ FROM pg_stat_statements
+ ORDER BY total_time DESC
+ SQL
+ end
+
+ context 'with pgss available' do
+ it 'fetches data from pg_stat_statements and stores on the observation' do
+ mock_pgss(enabled: true)
+ expect(connection).to receive(:execute).with(pgss_query).once.and_return(result)
+
+ expect { subject.record(observation) }.to change { observation.query_statistics }.from(nil).to(result)
+ end
+ end
+
+ context 'without pgss available' do
+ it 'executes nothing' do
+ mock_pgss(enabled: false)
+ expect(connection).not_to receive(:execute)
+
+ expect { subject.record(observation) }.not_to change { observation.query_statistics }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 76b1be1e497..757da2d9092 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :
end
describe '#rename_path_for_routable' do
- context 'for namespaces' do
+ context 'for personal namespaces' do
let(:namespace) { create(:namespace, path: 'the-path') }
it "renames namespaces called the-path" do
@@ -119,13 +119,16 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :
expect(project.route.reload.path).to eq('the-path-but-not-really/the-project')
end
+ end
- context "the-path namespace -> subgroup -> the-path0 project" do
+ context 'for groups' do
+ context "the-path group -> subgroup -> the-path0 project" do
it "updates the route of the project correctly" do
- subgroup = create(:group, path: "subgroup", parent: namespace)
+ group = create(:group, path: 'the-path')
+ subgroup = create(:group, path: "subgroup", parent: group)
project = create(:project, :repository, path: "the-path0", namespace: subgroup)
- subject.rename_path_for_routable(migration_namespace(namespace))
+ subject.rename_path_for_routable(migration_namespace(group))
expect(project.route.reload.path).to eq("the-path0/subgroup/the-path0")
end
@@ -158,23 +161,27 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :
end
describe '#perform_rename' do
- describe 'for namespaces' do
- let(:namespace) { create(:namespace, path: 'the-path') }
-
+ context 'for personal namespaces' do
it 'renames the path' do
+ namespace = create(:namespace, path: 'the-path')
+
subject.perform_rename(migration_namespace(namespace), 'the-path', 'renamed')
expect(namespace.reload.path).to eq('renamed')
+ expect(namespace.reload.route.path).to eq('renamed')
end
+ end
- it 'renames all the routes for the namespace' do
- child = create(:group, path: 'child', parent: namespace)
+ context 'for groups' do
+ it 'renames all the routes for the group' do
+ group = create(:group, path: 'the-path')
+ child = create(:group, path: 'child', parent: group)
project = create(:project, :repository, namespace: child, path: 'the-project')
- other_one = create(:namespace, path: 'the-path-is-similar')
+ other_one = create(:group, path: 'the-path-is-similar')
- subject.perform_rename(migration_namespace(namespace), 'the-path', 'renamed')
+ subject.perform_rename(migration_namespace(group), 'the-path', 'renamed')
- expect(namespace.reload.route.path).to eq('renamed')
+ expect(group.reload.route.path).to eq('renamed')
expect(child.reload.route.path).to eq('renamed/child')
expect(project.reload.route.path).to eq('renamed/child/the-project')
expect(other_one.reload.route.path).to eq('the-path-is-similar')
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index cf75e5a72d9..b7b66494390 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Database::SimilarityScore do
let(:search) { 'xyz' }
it 'results have 0 similarity score' do
- expect(query_result.map { |row| row['similarity'] }).to all(eq(0))
+ expect(query_result.map { |row| row['similarity'].to_f }).to all(eq(0))
end
end
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 3175040167b..1553a989dba 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -441,4 +441,112 @@ RSpec.describe Gitlab::Database do
end
end
end
+
+ describe 'ActiveRecordBaseTransactionMetrics' do
+ def subscribe_events
+ events = []
+
+ begin
+ subscriber = ActiveSupport::Notifications.subscribe('transaction.active_record') do |e|
+ events << e
+ end
+
+ yield
+ ensure
+ ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
+ end
+
+ events
+ end
+
+ context 'without a transaction block' do
+ it 'does not publish a transaction event' do
+ events = subscribe_events do
+ User.first
+ end
+
+ expect(events).to be_empty
+ end
+ end
+
+ context 'within a transaction block' do
+ it 'publishes a transaction event' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ User.first
+ end
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+
+ context 'within an empty transaction block' do
+ it 'publishes a transaction event' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction {}
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+
+ context 'within a nested transaction block' do
+ it 'publishes multiple transaction events' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do
+ User.first
+ end
+ end
+ end
+ end
+
+ expect(events.length).to be(3)
+
+ events.each do |event|
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+ end
+
+ context 'within a cancelled transaction block' do
+ it 'publishes multiple transaction events' do
+ events = subscribe_events do
+ ActiveRecord::Base.transaction do
+ User.first
+ raise ActiveRecord::Rollback
+ end
+ end
+
+ expect(events.length).to be(1)
+
+ event = events.first
+ expect(event).not_to be_nil
+ expect(event.duration).to be > 0.0
+ expect(event.payload).to a_hash_including(
+ connection: be_a(ActiveRecord::ConnectionAdapters::AbstractAdapter)
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 94717152488..d26bc5fc9a8 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -237,17 +237,17 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
describe '#key' do
subject { cache.key }
- it 'returns the next version of the cache' do
- is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:2")
+ it 'returns cache key' do
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true")
end
context 'when feature flag is disabled' do
before do
- stub_feature_flags(improved_merge_diff_highlighting: false)
+ stub_feature_flags(introduce_marker_ranges: false)
end
it 'returns the original version of the cache' do
- is_expected.to start_with("highlighted-diff-files:#{cache.diffable.cache_key}:1")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false")
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index 283437e7fbd..e613674af3a 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -50,11 +50,23 @@ RSpec.describe Gitlab::Diff::Highlight do
end
it 'highlights and marks added lines' do
- code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
+ code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left addition">RuntimeError</span></span><span class="p"><span class="idiff addition">,</span></span><span class="idiff right addition"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
expect(subject[5].rich_text).to eq(code)
end
+ context 'when introduce_marker_ranges is false' do
+ before do
+ stub_feature_flags(introduce_marker_ranges: false)
+ end
+
+ it 'keeps the old bevavior (without mode classes)' do
+ code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
+
+ expect(subject[5].rich_text).to eq(code)
+ end
+ end
+
context 'when no diff_refs' do
before do
allow(diff_file).to receive(:diff_refs).and_return(nil)
@@ -93,7 +105,7 @@ RSpec.describe Gitlab::Diff::Highlight do
end
it 'marks added lines' do
- code = %q{+ raise <span class="idiff left right">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;}
+ code = %q{+ raise <span class="idiff left right addition">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;}
expect(subject[5].rich_text).to eq(code)
expect(subject[5].rich_text).to be_html_safe
diff --git a/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb
index 60f7f3a103f..3670074cc21 100644
--- a/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::InlineDiffMarkdownMarker do
describe '#mark' do
let(:raw) { "abc 'def'" }
- let(:inline_diffs) { [2..5] }
- let(:subject) { described_class.new(raw).mark(inline_diffs, mode: :deletion) }
+ let(:inline_diffs) { [Gitlab::MarkerRange.new(2, 5, mode: Gitlab::MarkerRange::DELETION)] }
+ let(:subject) { described_class.new(raw).mark(inline_diffs) }
it 'does not escape html etities and marks the range' do
expect(subject).to eq("ab{-c 'd-}ef'")
diff --git a/spec/lib/gitlab/diff/inline_diff_spec.rb b/spec/lib/gitlab/diff/inline_diff_spec.rb
index dce655d5690..714b5d813c4 100644
--- a/spec/lib/gitlab/diff/inline_diff_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_spec.rb
@@ -52,17 +52,6 @@ RSpec.describe Gitlab::Diff::InlineDiff do
expect(subject[0]).to eq([3..6])
expect(subject[1]).to eq([3..3, 17..22])
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(improved_merge_diff_highlighting: false)
- end
-
- it 'finds all inline diffs' do
- expect(subject[0]).to eq([3..19])
- expect(subject[1]).to eq([3..22])
- end
- end
end
end
diff --git a/spec/lib/gitlab/diff/pair_selector_spec.rb b/spec/lib/gitlab/diff/pair_selector_spec.rb
new file mode 100644
index 00000000000..da5707bc377
--- /dev/null
+++ b/spec/lib/gitlab/diff/pair_selector_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Diff::PairSelector do
+ subject(:selector) { described_class.new(lines) }
+
+ describe '#to_a' do
+ subject { selector.to_a }
+
+ let(:lines) { diff.lines }
+
+ let(:diff) do
+ <<-EOF.strip_heredoc
+ class Test # 0
+ - def initialize(test = true) # 1
+ + def initialize(test = false) # 2
+ @test = test # 3
+ - if true # 4
+ - @foo = "bar" # 5
+ + unless false # 6
+ + @foo = "baz" # 7
+ end
+ end
+ end
+ EOF
+ end
+
+ it 'finds all pairs' do
+ is_expected.to match_array([[1, 2], [4, 6], [5, 7]])
+ end
+
+ context 'when there are empty lines' do
+ let(:lines) { ['- bar', '+ baz', ''] }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'when there are only removals' do
+ let(:diff) do
+ <<-EOF.strip_heredoc
+ - class Test
+ - def initialize(test = true)
+ - end
+ - end
+ EOF
+ end
+
+ it 'returns empty collection' do
+ is_expected.to eq([])
+ end
+ end
+
+ context 'when there are only additions' do
+ let(:diff) do
+ <<-EOF.strip_heredoc
+ + class Test
+ + def initialize(test = true)
+ + end
+ + end
+ EOF
+ end
+
+ it 'returns empty collection' do
+ is_expected.to eq([])
+ end
+ end
+
+ context 'when there are no changes' do
+ let(:diff) do
+ <<-EOF.strip_heredoc
+ class Test
+ def initialize(test = true)
+ end
+ end
+ EOF
+ end
+
+ it 'returns empty collection' do
+ is_expected.to eq([])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index eb11c051adc..7436765e8ee 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(new_issue.author).to eql(User.support_bot)
expect(new_issue.confidential?).to be true
expect(new_issue.all_references.all).to be_empty
- expect(new_issue.title).to eq("Service Desk (from jake@adventuretime.ooo): The message subject! @all")
+ expect(new_issue.title).to eq("The message subject! @all")
expect(new_issue.description).to eq(expected_description.strip)
end
diff --git a/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
new file mode 100644
index 00000000000..0e72dd7ec5e
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::ErrorTracking::ContextPayloadGenerator do
+ subject(:generator) { described_class.new }
+
+ let(:extra) do
+ {
+ some_other_info: 'info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1'
+ }
+ end
+
+ let(:exception) { StandardError.new("Dummy exception") }
+
+ before do
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+ allow(I18n).to receive(:locale).and_return('en')
+ end
+
+ context 'user metadata' do
+ let(:user) { create(:user) }
+
+ it 'appends user metadata to the payload' do
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(user: user) do
+ payload = generator.generate(exception, extra)
+ end
+
+ expect(payload[:user]).to eql(
+ username: user.username
+ )
+ end
+ end
+
+ context 'tags metadata' do
+ context 'when the GITLAB_SENTRY_EXTRA_TAGS env is not set' do
+ before do
+ stub_env('GITLAB_SENTRY_EXTRA_TAGS', nil)
+ end
+
+ it 'does not log into AppLogger' do
+ expect(Gitlab::AppLogger).not_to receive(:debug)
+
+ generator.generate(exception, extra)
+ end
+
+ it 'does not send any extra tags' do
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
+ payload = generator.generate(exception, extra)
+ end
+
+ expect(payload[:tags]).to eql(
+ correlation_id: 'cid',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a'
+ )
+ end
+ end
+
+ context 'when the GITLAB_SENTRY_EXTRA_TAGS env is a JSON hash' do
+ it 'includes those tags in all events' do
+ stub_env('GITLAB_SENTRY_EXTRA_TAGS', { foo: 'bar', baz: 'quux' }.to_json)
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
+ payload = generator.generate(exception, extra)
+ end
+
+ expect(payload[:tags]).to eql(
+ correlation_id: 'cid',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a',
+ 'foo' => 'bar',
+ 'baz' => 'quux'
+ )
+ end
+
+ it 'does not log into AppLogger' do
+ expect(Gitlab::AppLogger).not_to receive(:debug)
+
+ generator.generate(exception, extra)
+ end
+ end
+
+ context 'when the GITLAB_SENTRY_EXTRA_TAGS env is not a JSON hash' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:env_var, :error) do
+ { foo: 'bar', baz: 'quux' }.inspect | 'JSON::ParserError'
+ [].to_json | 'NoMethodError'
+ [%w[foo bar]].to_json | 'NoMethodError'
+ %w[foo bar].to_json | 'NoMethodError'
+ '"string"' | 'NoMethodError'
+ end
+
+ with_them do
+ before do
+ stub_env('GITLAB_SENTRY_EXTRA_TAGS', env_var)
+ end
+
+ it 'logs into AppLogger' do
+ expect(Gitlab::AppLogger).to receive(:debug).with(a_string_matching(error))
+
+ generator.generate({})
+ end
+
+ it 'does not include any extra tags' do
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
+ payload = generator.generate(exception, extra)
+ end
+
+ expect(payload[:tags]).to eql(
+ correlation_id: 'cid',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a'
+ )
+ end
+ end
+ end
+ end
+
+ context 'extra metadata' do
+ it 'appends extra metadata to the payload' do
+ payload = generator.generate(exception, extra)
+
+ expect(payload[:extra]).to eql(
+ some_other_info: 'info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1'
+ )
+ end
+
+ it 'appends exception embedded extra metadata to the payload' do
+ allow(exception).to receive(:sentry_extra_data).and_return(
+ some_other_info: 'another_info',
+ mr_url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1'
+ )
+
+ payload = generator.generate(exception, extra)
+
+ expect(payload[:extra]).to eql(
+ some_other_info: 'another_info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1',
+ mr_url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1'
+ )
+ end
+
+ it 'filters sensitive extra info' do
+ extra[:my_token] = '456'
+ allow(exception).to receive(:sentry_extra_data).and_return(
+ mr_url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1',
+ another_token: '1234'
+ )
+
+ payload = generator.generate(exception, extra)
+
+ expect(payload[:extra]).to eql(
+ some_other_info: 'info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/-/issues/1',
+ mr_url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1',
+ my_token: '[FILTERED]',
+ another_token: '[FILTERED]'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking/log_formatter_spec.rb b/spec/lib/gitlab/error_tracking/log_formatter_spec.rb
new file mode 100644
index 00000000000..188ccd000a1
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/log_formatter_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::LogFormatter do
+ let(:exception) { StandardError.new('boom') }
+ let(:context_payload) do
+ {
+ server: 'local-hostname-of-the-server',
+ user: {
+ ip_address: '127.0.0.1',
+ username: 'root'
+ },
+ tags: {
+ locale: 'en',
+ feature_category: 'category_a'
+ },
+ extra: {
+ some_other_info: 'other_info',
+ sidekiq: {
+ 'class' => 'HelloWorker',
+ 'args' => ['senstive string', 1, 2],
+ 'another_field' => 'field'
+ }
+ }
+ }
+ end
+
+ before do
+ Raven.context.user[:user_flag] = 'flag'
+ Raven.context.tags[:shard] = 'catchall'
+ Raven.context.extra[:some_info] = 'info'
+
+ allow(exception).to receive(:backtrace).and_return(
+ [
+ 'lib/gitlab/file_a.rb:1',
+ 'lib/gitlab/file_b.rb:2'
+ ]
+ )
+ end
+
+ after do
+ ::Raven::Context.clear!
+ end
+
+ it 'appends error-related log fields and filters sensitive Sidekiq arguments' do
+ payload = described_class.new.generate_log(exception, context_payload)
+
+ expect(payload).to eql(
+ 'exception.class' => 'StandardError',
+ 'exception.message' => 'boom',
+ 'exception.backtrace' => [
+ 'lib/gitlab/file_a.rb:1',
+ 'lib/gitlab/file_b.rb:2'
+ ],
+ 'user.ip_address' => '127.0.0.1',
+ 'user.username' => 'root',
+ 'user.user_flag' => 'flag',
+ 'tags.locale' => 'en',
+ 'tags.feature_category' => 'category_a',
+ 'tags.shard' => 'catchall',
+ 'extra.some_other_info' => 'other_info',
+ 'extra.some_info' => 'info',
+ "extra.sidekiq" => {
+ "another_field" => "field",
+ "args" => ["[FILTERED]", "1", "2"],
+ "class" => "HelloWorker"
+ }
+ )
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
new file mode 100644
index 00000000000..0db40eca989
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
+ subject(:processor) { described_class.new }
+
+ before do
+ allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
+ allow(generator).to receive(:generate).and_return(
+ user: { username: 'root' },
+ tags: { locale: 'en', program: 'test', feature_category: 'feature_a', correlation_id: 'cid' },
+ extra: { some_info: 'info' }
+ )
+ end
+ end
+
+ it 'merges the context payload into event payload' do
+ payload = {
+ user: { ip_address: '127.0.0.1' },
+ tags: { priority: 'high' },
+ extra: { sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] } }
+ }
+
+ processor.process(payload)
+
+ expect(payload).to eql(
+ user: {
+ ip_address: '127.0.0.1',
+ username: 'root'
+ },
+ tags: {
+ priority: 'high',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a',
+ correlation_id: 'cid'
+ },
+ extra: {
+ some_info: 'info',
+ sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] }
+ }
+ )
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 764478ad1d7..a905b9f8d40 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -8,116 +8,55 @@ RSpec.describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
- let(:expected_payload_includes) do
- [
- { 'exception.class' => 'RuntimeError' },
- { 'exception.message' => 'boom' },
- { 'tags.correlation_id' => 'cid' },
- { 'extra.some_other_info' => 'info' },
- { 'extra.issue_url' => 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
- ]
+ let(:user) { create(:user) }
+
+ let(:sentry_payload) do
+ {
+ tags: {
+ program: 'test',
+ locale: 'en',
+ feature_category: 'feature_a',
+ correlation_id: 'cid'
+ },
+ user: {
+ username: user.username
+ },
+ extra: {
+ some_other_info: 'info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1'
+ }
+ }
end
- let(:sentry_event) { Gitlab::Json.parse(Raven.client.transport.events.last[1]) }
+ let(:logger_payload) do
+ {
+ 'exception.class' => 'RuntimeError',
+ 'exception.message' => 'boom',
+ 'tags.program' => 'test',
+ 'tags.locale' => 'en',
+ 'tags.feature_category' => 'feature_a',
+ 'tags.correlation_id' => 'cid',
+ 'user.username' => user.username,
+ 'extra.some_other_info' => 'info',
+ 'extra.issue_url' => 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1'
+ }
+ end
before do
stub_sentry_settings
allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+ allow(I18n).to receive(:locale).and_return('en')
described_class.configure do |config|
config.encoding = 'json'
end
end
- describe '.configure' do
- context 'default tags from GITLAB_SENTRY_EXTRA_TAGS' do
- context 'when the value is a JSON hash' do
- it 'includes those tags in all events' do
- stub_env('GITLAB_SENTRY_EXTRA_TAGS', { foo: 'bar', baz: 'quux' }.to_json)
-
- described_class.configure do |config|
- config.encoding = 'json'
- end
-
- described_class.track_exception(StandardError.new)
-
- expect(sentry_event['tags'].except('correlation_id', 'locale', 'program'))
- .to eq('foo' => 'bar', 'baz' => 'quux')
- end
- end
-
- context 'when the value is not set' do
- before do
- stub_env('GITLAB_SENTRY_EXTRA_TAGS', nil)
- end
-
- it 'does not log an error' do
- expect(Gitlab::AppLogger).not_to receive(:debug)
-
- described_class.configure do |config|
- config.encoding = 'json'
- end
- end
-
- it 'does not send any extra tags' do
- described_class.configure do |config|
- config.encoding = 'json'
- end
-
- described_class.track_exception(StandardError.new)
-
- expect(sentry_event['tags'].keys).to contain_exactly('correlation_id', 'locale', 'program')
- end
- end
-
- context 'when the value is not a JSON hash' do
- using RSpec::Parameterized::TableSyntax
-
- where(:env_var, :error) do
- { foo: 'bar', baz: 'quux' }.inspect | 'JSON::ParserError'
- [].to_json | 'NoMethodError'
- [%w[foo bar]].to_json | 'NoMethodError'
- %w[foo bar].to_json | 'NoMethodError'
- '"string"' | 'NoMethodError'
- end
-
- with_them do
- before do
- stub_env('GITLAB_SENTRY_EXTRA_TAGS', env_var)
- end
-
- it 'does not include any extra tags' do
- described_class.configure do |config|
- config.encoding = 'json'
- end
-
- described_class.track_exception(StandardError.new)
-
- expect(sentry_event['tags'].except('correlation_id', 'locale', 'program'))
- .to be_empty
- end
-
- it 'logs the error class' do
- expect(Gitlab::AppLogger).to receive(:debug).with(a_string_matching(error))
-
- described_class.configure do |config|
- config.encoding = 'json'
- end
- end
- end
- end
- end
- end
-
- describe '.with_context' do
- it 'adds the expected tags' do
- described_class.with_context {}
-
- expect(Raven.tags_context[:locale].to_s).to eq(I18n.locale.to_s)
- expect(Raven.tags_context[Labkit::Correlation::CorrelationId::LOG_KEY.to_sym].to_s)
- .to eq('cid')
+ around do |example|
+ Gitlab::ApplicationContext.with_context(user: user, feature_category: 'feature_a') do
+ example.run
end
end
@@ -128,10 +67,15 @@ RSpec.describe Gitlab::ErrorTracking do
end
it 'raises the exception' do
- expect(Raven).to receive(:capture_exception)
-
- expect { described_class.track_and_raise_for_dev_exception(exception) }
- .to raise_error(RuntimeError)
+ expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+
+ expect do
+ described_class.track_and_raise_for_dev_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end.to raise_error(RuntimeError, /boom/)
end
end
@@ -141,19 +85,7 @@ RSpec.describe Gitlab::ErrorTracking do
end
it 'logs the exception with all attributes passed' do
- expected_extras = {
- some_other_info: 'info',
- issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1'
- }
-
- expected_tags = {
- correlation_id: 'cid'
- }
-
- expect(Raven).to receive(:capture_exception)
- .with(exception,
- tags: a_hash_including(expected_tags),
- extra: a_hash_including(expected_extras))
+ expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
described_class.track_and_raise_for_dev_exception(
exception,
@@ -163,8 +95,7 @@ RSpec.describe Gitlab::ErrorTracking do
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
- expect(Gitlab::ErrorTracking::Logger).to receive(:error)
- .with(a_hash_including(*expected_payload_includes))
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
described_class.track_and_raise_for_dev_exception(
exception,
@@ -177,15 +108,19 @@ RSpec.describe Gitlab::ErrorTracking do
describe '.track_and_raise_exception' do
it 'always raises the exception' do
- expect(Raven).to receive(:capture_exception)
+ expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
- expect { described_class.track_and_raise_exception(exception) }
- .to raise_error(RuntimeError)
+ expect do
+ described_class.track_and_raise_for_dev_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end.to raise_error(RuntimeError, /boom/)
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
- expect(Gitlab::ErrorTracking::Logger).to receive(:error)
- .with(a_hash_including(*expected_payload_includes))
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
expect do
described_class.track_and_raise_exception(
@@ -210,17 +145,16 @@ RSpec.describe Gitlab::ErrorTracking do
it 'calls Raven.capture_exception' do
track_exception
- expect(Raven).to have_received(:capture_exception)
- .with(exception,
- tags: a_hash_including(correlation_id: 'cid'),
- extra: a_hash_including(some_other_info: 'info', issue_url: issue_url))
+ expect(Raven).to have_received(:capture_exception).with(
+ exception,
+ sentry_payload
+ )
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
track_exception
- expect(Gitlab::ErrorTracking::Logger).to have_received(:error)
- .with(a_hash_including(*expected_payload_includes))
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
end
context 'with filterable parameters' do
@@ -229,8 +163,9 @@ RSpec.describe Gitlab::ErrorTracking do
it 'filters parameters' do
track_exception
- expect(Gitlab::ErrorTracking::Logger).to have_received(:error)
- .with(hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' }))
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
+ hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' })
+ )
end
end
@@ -241,8 +176,9 @@ RSpec.describe Gitlab::ErrorTracking do
it 'includes the extra data from the exception in the tracking information' do
track_exception
- expect(Raven).to have_received(:capture_exception)
- .with(exception, a_hash_including(extra: a_hash_including(extra_info)))
+ expect(Raven).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra_info))
+ )
end
end
@@ -253,8 +189,9 @@ RSpec.describe Gitlab::ErrorTracking do
it 'just includes the other extra info' do
track_exception
- expect(Raven).to have_received(:capture_exception)
- .with(exception, a_hash_including(extra: a_hash_including(extra)))
+ expect(Raven).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra))
+ )
end
end
@@ -266,7 +203,13 @@ RSpec.describe Gitlab::ErrorTracking do
track_exception
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
- hash_including({ 'extra.sidekiq' => { 'class' => 'PostReceive', 'args' => ['1', '{"id"=>2, "name"=>"hello"}', 'some-value', 'another-value'] } }))
+ hash_including(
+ 'extra.sidekiq' => {
+ 'class' => 'PostReceive',
+ 'args' => ['1', '{"id"=>2, "name"=>"hello"}', 'some-value', 'another-value']
+ }
+ )
+ )
end
end
@@ -276,9 +219,17 @@ RSpec.describe Gitlab::ErrorTracking do
it 'filters sensitive arguments before sending' do
track_exception
+ sentry_event = Gitlab::Json.parse(Raven.client.transport.events.last[1])
+
expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
- hash_including('extra.sidekiq' => { 'class' => 'UnknownWorker', 'args' => ['[FILTERED]', '1', '2'] }))
+ hash_including(
+ 'extra.sidekiq' => {
+ 'class' => 'UnknownWorker',
+ 'args' => ['[FILTERED]', '1', '2']
+ }
+ )
+ )
end
end
end
diff --git a/spec/lib/gitlab/etag_caching/router/graphql_spec.rb b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb
new file mode 100644
index 00000000000..d151dcba413
--- /dev/null
+++ b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EtagCaching::Router::Graphql do
+ it 'matches pipelines endpoint' do
+ result = match_route('/api/graphql', 'pipelines/id/1')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'pipelines_graph'
+ end
+
+ it 'has a valid feature category for every route', :aggregate_failures do
+ feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
+
+ described_class::ROUTES.each do |route|
+ expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
+ end
+ end
+
+ def match_route(path, header)
+ described_class.match(
+ double(path_info: path,
+ headers: { 'X-GITLAB-GRAPHQL-RESOURCE-ETAG' => header }))
+ end
+
+ describe '.cache_key' do
+ let(:path) { '/api/graphql' }
+ let(:header_value) { 'pipelines/id/1' }
+ let(:headers) do
+ { 'X-GITLAB-GRAPHQL-RESOURCE-ETAG' => header_value }.compact
+ end
+
+ subject do
+ described_class.cache_key(double(path: path, headers: headers))
+ end
+
+ it 'uses request path and headers as cache key' do
+ is_expected.to eq '/api/graphql:pipelines/id/1'
+ end
+
+ context 'when the header is missing' do
+ let(:header_value) {}
+
+ it 'does not raise errors' do
+ is_expected.to eq '/api/graphql'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/etag_caching/router/restful_spec.rb b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
new file mode 100644
index 00000000000..877789b320f
--- /dev/null
+++ b/spec/lib/gitlab/etag_caching/router/restful_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EtagCaching::Router::Restful do
+ it 'matches issue notes endpoint' do
+ result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'issue_notes'
+ end
+
+ it 'matches MR notes endpoint' do
+ result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/merge_request/1/notes')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'merge_request_notes'
+ end
+
+ it 'matches issue title endpoint' do
+ result = match_route('/my-group/my-project/-/issues/123/realtime_changes')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'issue_title'
+ end
+
+ it 'matches with a project name that includes a suffix of create' do
+ result = match_route('/group/test-create/-/issues/123/realtime_changes')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'issue_title'
+ end
+
+ it 'matches with a project name that includes a prefix of create' do
+ result = match_route('/group/create-test/-/issues/123/realtime_changes')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'issue_title'
+ end
+
+ it 'matches project pipelines endpoint' do
+ result = match_route('/my-group/my-project/-/pipelines.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'project_pipelines'
+ end
+
+ it 'matches commit pipelines endpoint' do
+ result = match_route('/my-group/my-project/-/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'commit_pipelines'
+ end
+
+ it 'matches new merge request pipelines endpoint' do
+ result = match_route('/my-group/my-project/-/merge_requests/new.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'new_merge_request_pipelines'
+ end
+
+ it 'matches merge request pipelines endpoint' do
+ result = match_route('/my-group/my-project/-/merge_requests/234/pipelines.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'merge_request_pipelines'
+ end
+
+ it 'matches build endpoint' do
+ result = match_route('/my-group/my-project/builds/234.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'project_build'
+ end
+
+ it 'does not match blob with confusing name' do
+ result = match_route('/my-group/my-project/-/blob/master/pipelines.json')
+
+ expect(result).to be_blank
+ end
+
+ it 'matches the cluster environments path' do
+ result = match_route('/my-group/my-project/-/clusters/47/environments')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'cluster_environments'
+ end
+
+ it 'matches the environments path' do
+ result = match_route('/my-group/my-project/environments.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'environments'
+ end
+
+ it 'matches pipeline#show endpoint' do
+ result = match_route('/my-group/my-project/-/pipelines/2.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'project_pipeline'
+ end
+
+ it 'has a valid feature category for every route', :aggregate_failures do
+ feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
+
+ described_class::ROUTES.each do |route|
+ expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
+ end
+ end
+
+ def match_route(path)
+ described_class.match(double(path_info: path))
+ end
+
+ describe '.cache_key' do
+ subject do
+ described_class.cache_key(double(path: '/my-group/my-project/builds/234.json'))
+ end
+
+ it 'uses request path as cache key' do
+ is_expected.to eq '/my-group/my-project/builds/234.json'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index dbd9cc230f1..c748ee00721 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -3,136 +3,33 @@
require 'spec_helper'
RSpec.describe Gitlab::EtagCaching::Router do
- it 'matches issue notes endpoint' do
- result = described_class.match(
- '/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'issue_notes'
- end
-
- it 'matches MR notes endpoint' do
- result = described_class.match(
- '/my-group/and-subgroup/here-comes-the-project/noteable/merge_request/1/notes'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'merge_request_notes'
- end
-
- it 'matches issue title endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/issues/123/realtime_changes'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'issue_title'
- end
-
- it 'matches with a project name that includes a suffix of create' do
- result = described_class.match(
- '/group/test-create/-/issues/123/realtime_changes'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'issue_title'
- end
-
- it 'matches with a project name that includes a prefix of create' do
- result = described_class.match(
- '/group/create-test/-/issues/123/realtime_changes'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'issue_title'
- end
-
- it 'matches project pipelines endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/pipelines.json'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'project_pipelines'
- end
-
- it 'matches commit pipelines endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'commit_pipelines'
- end
-
- it 'matches new merge request pipelines endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/merge_requests/new.json'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'new_merge_request_pipelines'
- end
-
- it 'matches merge request pipelines endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/merge_requests/234/pipelines.json'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'merge_request_pipelines'
- end
-
- it 'matches build endpoint' do
- result = described_class.match(
- '/my-group/my-project/builds/234.json'
- )
-
- expect(result).to be_present
- expect(result.name).to eq 'project_build'
- end
-
- it 'does not match blob with confusing name' do
- result = described_class.match(
- '/my-group/my-project/-/blob/master/pipelines.json'
- )
-
- expect(result).to be_blank
- end
+ describe '.match', :aggregate_failures do
+ context 'with RESTful routes' do
+ it 'matches project pipelines endpoint' do
+ result = match_route('/my-group/my-project/-/pipelines.json')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'project_pipelines'
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Restful
+ end
+ end
- it 'matches the cluster environments path' do
- result = described_class.match(
- '/my-group/my-project/-/clusters/47/environments'
- )
+ context 'with GraphQL routes' do
+ it 'matches pipelines endpoint' do
+ result = match_route('/api/graphql', 'pipelines/id/12')
- expect(result).to be_present
- expect(result.name).to eq 'cluster_environments'
+ expect(result).to be_present
+ expect(result.name).to eq 'pipelines_graph'
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql
+ end
+ end
end
- it 'matches the environments path' do
- result = described_class.match(
- '/my-group/my-project/environments.json'
- )
+ def match_route(path, header = nil)
+ headers = { 'X-GITLAB-GRAPHQL-RESOURCE-ETAG' => header }.compact
- expect(result).to be_present
- expect(result.name).to eq 'environments'
- end
-
- it 'matches pipeline#show endpoint' do
- result = described_class.match(
- '/my-group/my-project/-/pipelines/2.json'
+ described_class.match(
+ double(path_info: path, headers: headers)
)
-
- expect(result).to be_present
- expect(result.name).to eq 'project_pipeline'
- end
-
- it 'has a valid feature category for every route', :aggregate_failures do
- feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
-
- described_class::ROUTES.each do |route|
- expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
- end
end
end
diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb
new file mode 100644
index 00000000000..46195e64715
--- /dev/null
+++ b/spec/lib/gitlab/etag_caching/store_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
+ let(:store) { described_class.new }
+
+ describe '#get' do
+ subject { store.get(key) }
+
+ context 'with invalid keys' do
+ let(:key) { 'a' }
+
+ it 'raises errors' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
+
+ expect { subject }.to raise_error Gitlab::EtagCaching::Store::InvalidKeyError
+ end
+
+ it 'does not raise errors in production' do
+ expect(store).to receive(:skip_validation?).and_return true
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+
+ subject
+ end
+ end
+
+ context 'with GraphQL keys' do
+ let(:key) { '/api/graphql:pipelines/id/5' }
+
+ it 'returns a stored value' do
+ etag = store.touch(key)
+
+ is_expected.to eq(etag)
+ end
+ end
+
+ context 'with RESTful keys' do
+ let(:key) { '/my-group/my-project/builds/234.json' }
+
+ it 'returns a stored value' do
+ etag = store.touch(key)
+
+ is_expected.to eq(etag)
+ end
+ end
+ end
+
+ describe '#touch' do
+ subject { store.touch(key) }
+
+ context 'with invalid keys' do
+ let(:key) { 'a' }
+
+ it 'raises errors' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
+
+ expect { subject }.to raise_error Gitlab::EtagCaching::Store::InvalidKeyError
+ end
+ end
+
+ context 'with GraphQL keys' do
+ let(:key) { '/api/graphql:pipelines/id/5' }
+
+ it 'stores and returns a value' do
+ etag = store.touch(key)
+
+ expect(etag).to be_present
+ expect(store.get(key)).to eq(etag)
+ end
+ end
+
+ context 'with RESTful keys' do
+ let(:key) { '/my-group/my-project/builds/234.json' }
+
+ it 'stores and returns a value' do
+ etag = store.touch(key)
+
+ expect(etag).to be_present
+ expect(store.get(key)).to eq(etag)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 1cebe37bea5..3678aeb18b0 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -520,6 +520,78 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
end
+ describe '#record_experiment_group' do
+ let(:group) { 'a group object' }
+ let(:experiment_key) { :some_experiment_key }
+ let(:dnt_enabled) { false }
+ let(:experiment_active) { true }
+ let(:rollout_strategy) { :whatever }
+ let(:variant) { 'variant' }
+
+ before do
+ allow(controller).to receive(:dnt_enabled?).and_return(dnt_enabled)
+ allow(::Gitlab::Experimentation).to receive(:active?).and_return(experiment_active)
+ allow(::Gitlab::Experimentation).to receive(:rollout_strategy).and_return(rollout_strategy)
+ allow(controller).to receive(:tracking_group).and_return(variant)
+ allow(::Experiment).to receive(:add_group)
+ end
+
+ subject(:record_experiment_group) { controller.record_experiment_group(experiment_key, group) }
+
+ shared_examples 'exits early without recording' do
+ it 'returns early without recording the group as an ExperimentSubject' do
+ expect(::Experiment).not_to receive(:add_group)
+ record_experiment_group
+ end
+ end
+
+ shared_examples 'calls tracking_group' do |using_cookie_rollout|
+ it "calls tracking_group with #{using_cookie_rollout ? 'a nil' : 'the group as the'} subject" do
+ expect(controller).to receive(:tracking_group).with(experiment_key, nil, subject: using_cookie_rollout ? nil : group).and_return(variant)
+ record_experiment_group
+ end
+ end
+
+ shared_examples 'records the group' do
+ it 'records the group' do
+ expect(::Experiment).to receive(:add_group).with(experiment_key, group: group, variant: variant)
+ record_experiment_group
+ end
+ end
+
+ context 'when DNT is enabled' do
+ let(:dnt_enabled) { true }
+
+ include_examples 'exits early without recording'
+ end
+
+ context 'when the experiment is not active' do
+ let(:experiment_active) { false }
+
+ include_examples 'exits early without recording'
+ end
+
+ context 'when a nil group is given' do
+ let(:group) { nil }
+
+ include_examples 'exits early without recording'
+ end
+
+ context 'when the experiment uses a cookie-based rollout strategy' do
+ let(:rollout_strategy) { :cookie }
+
+ include_examples 'calls tracking_group', true
+ include_examples 'records the group'
+ end
+
+ context 'when the experiment uses a non-cookie-based rollout strategy' do
+ let(:rollout_strategy) { :group }
+
+ include_examples 'calls tracking_group', false
+ include_examples 'records the group'
+ end
+ end
+
describe '#record_experiment_conversion_event' do
let(:user) { build(:user) }
@@ -534,7 +606,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
it 'records the conversion event for the experiment & user' do
- expect(::Experiment).to receive(:record_conversion_event).with(:test_experiment, user)
+ expect(::Experiment).to receive(:record_conversion_event).with(:test_experiment, user, {})
record_conversion_event
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 7eeae3f3f33..83c6b556fc6 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -7,14 +7,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
it 'temporarily ensures we know what experiments exist for backwards compatibility' do
expected_experiment_keys = [
- :ci_notification_dot,
:upgrade_link_in_user_menu_a,
- :invite_members_version_a,
:invite_members_version_b,
:invite_members_empty_group_version_a,
- :contact_sales_btn_in_app,
- :customize_homepage,
- :group_only_trials
+ :contact_sales_btn_in_app
]
backwards_compatible_experiment_keys = described_class.filter { |_, v| v[:use_backwards_compatible_subject_index] }.keys
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index 8ba43b2967c..68cef558f6f 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Gitlab::Git::Push do
it { is_expected.to be_force_push }
end
- context 'when called muiltiple times' do
+ context 'when called mulitiple times' do
it 'does not make make multiple calls to the force push check' do
expect(Gitlab::Checks::ForcePush).to receive(:force_push?).once
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
index 2999dc5bb41..e42b6d89c30 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
@@ -5,37 +5,46 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :clean_gitlab_redis_cache do
let_it_be(:merge_request) { create(:merged_merge_request) }
let(:project) { merge_request.project }
- let(:created_at) { Time.new(2017, 1, 1, 12, 00).utc }
+ let(:merged_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:client_double) { double(user: double(id: 999, login: 'merger', email: 'merger@email.com')) }
let(:pull_request) do
instance_double(
Gitlab::GithubImport::Representation::PullRequest,
iid: merge_request.iid,
- created_at: created_at,
+ merged_at: merged_at,
merged_by: double(id: 999, login: 'merger')
)
end
subject { described_class.new(pull_request, project, client_double) }
- it 'assigns the merged by user when mapped' do
- merge_user = create(:user, email: 'merger@email.com')
+ context 'when the merger user can be mapped' do
+ it 'assigns the merged by user when mapped' do
+ merge_user = create(:user, email: 'merger@email.com')
- subject.execute
+ subject.execute
- expect(merge_request.metrics.reload.merged_by).to eq(merge_user)
+ metrics = merge_request.metrics.reload
+ expect(metrics.merged_by).to eq(merge_user)
+ expect(metrics.merged_at).to eq(merged_at)
+ end
end
- it 'adds a note referencing the merger user when the user cannot be mapped' do
- expect { subject.execute }
- .to change(Note, :count).by(1)
- .and not_change(merge_request, :updated_at)
-
- last_note = merge_request.notes.last
-
- expect(last_note.note).to eq("*Merged by: merger*")
- expect(last_note.created_at).to eq(created_at)
- expect(last_note.author).to eq(project.creator)
+ context 'when the merger user cannot be mapped to a gitlab user' do
+ it 'adds a note referencing the merger user' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+ .and not_change(merge_request, :updated_at)
+
+ metrics = merge_request.metrics.reload
+ expect(metrics.merged_by).to be_nil
+ expect(metrics.merged_at).to eq(merged_at)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to eq("*Merged by: merger at 2017-01-01 12:00:00 UTC*")
+ expect(last_note.created_at).to eq(merged_at)
+ expect(last_note.author).to eq(project.creator)
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index b2f993ac47c..290f3f51202 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -19,8 +19,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED', note: '') }
- it 'creates a note for the review' do
- expect { subject.execute }.to change(Note, :count)
+ it 'creates a note for the review and approves the Merge Request' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+ .and change(Approval, :count).by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq('approved this merge request')
@@ -31,6 +33,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
expect(merge_request.approved_by_users.reload).to include(author)
expect(merge_request.approvals.last.created_at).to eq(submitted_at)
end
+
+ it 'does nothing if the user already approved the merge request' do
+ create(:approval, merge_request: merge_request, user: author)
+
+ expect { subject.execute }
+ .to change(Note, :count).by(0)
+ .and change(Approval, :count).by(0)
+ end
end
context 'when the review is "COMMENTED"' do
diff --git a/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb b/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb
new file mode 100644
index 00000000000..1d8849f7e38
--- /dev/null
+++ b/spec/lib/gitlab/graphql/calls_gitaly/field_extension_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::CallsGitaly::FieldExtension, :request_store do
+ include GraphqlHelpers
+
+ let(:field_args) { {} }
+ let(:owner) { fresh_object_type }
+ let(:field) do
+ ::Types::BaseField.new(name: 'value', type: GraphQL::STRING_TYPE, null: true, owner: owner, **field_args)
+ end
+
+ def resolve_value
+ resolve_field(field, { value: 'foo' }, object_type: owner)
+ end
+
+ context 'when the field calls gitaly' do
+ before do
+ owner.define_method :value do
+ Gitlab::SafeRequestStore['gitaly_call_actual'] = 1
+ 'fresh-from-the-gitaly-mines!'
+ end
+ end
+
+ context 'when the field has a constant complexity' do
+ let(:field_args) { { complexity: 100 } }
+
+ it 'allows the call' do
+ expect { resolve_value }.not_to raise_error
+ end
+ end
+
+ context 'when the field declares that it calls gitaly' do
+ let(:field_args) { { calls_gitaly: true } }
+
+ it 'allows the call' do
+ expect { resolve_value }.not_to raise_error
+ end
+ end
+
+ context 'when the field does not have these arguments' do
+ let(:field_args) { {} }
+
+ it 'notices, and raises, mentioning the field' do
+ expect { resolve_value }.to raise_error(include('Object.value'))
+ end
+ end
+ end
+
+ context 'when it does not call gitaly' do
+ let(:field_args) { {} }
+
+ it 'does not raise' do
+ value = resolve_value
+
+ expect(value).to eq 'foo'
+ end
+ end
+
+ context 'when some field calls gitaly while we were waiting' do
+ let(:extension) { described_class.new(field: field, options: {}) }
+
+ it 'is acceptable if all are accounted for' do
+ object = :anything
+ arguments = :any_args
+
+ ::Gitlab::SafeRequestStore['gitaly_call_actual'] = 3
+ ::Gitlab::SafeRequestStore['graphql_gitaly_accounted_for'] = 0
+
+ expect do |b|
+ extension.resolve(object: object, arguments: arguments, &b)
+ end.to yield_with_args(object, arguments, [3, 0])
+
+ ::Gitlab::SafeRequestStore['gitaly_call_actual'] = 13
+ ::Gitlab::SafeRequestStore['graphql_gitaly_accounted_for'] = 10
+
+ expect { extension.after_resolve(value: 'foo', memo: [3, 0]) }.not_to raise_error
+ end
+
+ it 'is unacceptable if some of the calls are unaccounted for' do
+ ::Gitlab::SafeRequestStore['gitaly_call_actual'] = 10
+ ::Gitlab::SafeRequestStore['graphql_gitaly_accounted_for'] = 9
+
+ expect { extension.after_resolve(value: 'foo', memo: [0, 0]) }.to raise_error(include('Object.value'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/calls_gitaly/instrumentation_spec.rb b/spec/lib/gitlab/graphql/calls_gitaly/instrumentation_spec.rb
deleted file mode 100644
index f16767f7d14..00000000000
--- a/spec/lib/gitlab/graphql/calls_gitaly/instrumentation_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::CallsGitaly::Instrumentation do
- subject { described_class.new }
-
- describe '#calls_gitaly_check' do
- let(:gitaly_field) { Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, calls_gitaly: true) }
- let(:no_gitaly_field) { Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, calls_gitaly: false) }
-
- context 'if there are no Gitaly calls' do
- it 'does not raise an error if calls_gitaly is false' do
- expect { subject.send(:calls_gitaly_check, no_gitaly_field, 0) }.not_to raise_error
- end
- end
-
- context 'if there is at least 1 Gitaly call' do
- it 'raises an error if calls_gitaly: is false or not defined' do
- expect { subject.send(:calls_gitaly_check, no_gitaly_field, 1) }.to raise_error(/specify a constant complexity or add `calls_gitaly: true`/)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
index 064e0c6828b..5afed8c3390 100644
--- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -5,27 +5,50 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe '#contents' do
# Returns a Schema that uses the given `type`
- def mock_schema(type)
+ def mock_schema(type, field_description)
query_type = Class.new(Types::BaseObject) do
- graphql_name 'QueryType'
+ graphql_name 'Query'
- field :foo, type, null: true
+ field :foo, type, null: true do
+ description field_description
+ argument :id, GraphQL::ID_TYPE, required: false, description: 'ID of the object.'
+ end
end
- GraphQL::Schema.define(query: query_type)
+ GraphQL::Schema.define(
+ query: query_type,
+ resolve_type: ->(obj, ctx) { raise 'Not a real schema' }
+ )
end
- let_it_be(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/', 'default.md.haml') }
+ let_it_be(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') }
+ let(:field_description) { 'List of objects.' }
subject(:contents) do
described_class.new(
- mock_schema(type).graphql_definition,
+ mock_schema(type, field_description).graphql_definition,
output_dir: nil,
template: template
).contents
end
- context 'A type with a field with a [Array] return type' do
+ describe 'headings' do
+ let(:type) { ::GraphQL::INT_TYPE }
+
+ it 'contains the expected sections' do
+ expect(contents.lines.map(&:chomp)).to include(
+ '## `Query` type',
+ '## Object types',
+ '## Enumeration types',
+ '## Scalar types',
+ '## Abstract types',
+ '### Unions',
+ '### Interfaces'
+ )
+ end
+ end
+
+ context 'when a field has a list type' do
let(:type) do
Class.new(Types::BaseObject) do
graphql_name 'ArrayTest'
@@ -35,19 +58,51 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
specify do
+ type_name = '[String!]!'
+ inner_type = 'string'
expectation = <<~DOC
- ### ArrayTest
+ ### `ArrayTest`
| Field | Type | Description |
| ----- | ---- | ----------- |
- | `foo` | String! => Array | A description. |
+ | `foo` | [`#{type_name}`](##{inner_type}) | A description. |
DOC
is_expected.to include(expectation)
end
+
+ describe 'a top level query field' do
+ let(:expectation) do
+ <<~DOC
+ ### `foo`
+
+ List of objects.
+
+ Returns [`ArrayTest`](#arraytest).
+
+ #### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | `id` | [`ID`](#id) | ID of the object. |
+ DOC
+ end
+
+ it 'generates the query with arguments' do
+ expect(subject).to include(expectation)
+ end
+
+ context 'when description does not end with `.`' do
+ let(:field_description) { 'List of objects' }
+
+ it 'adds the `.` to the end' do
+ expect(subject).to include(expectation)
+ end
+ end
+ end
end
- context 'A type with fields defined in reverse alphabetical order' do
+ describe 'when fields are not defined in alphabetical order' do
let(:type) do
Class.new(Types::BaseObject) do
graphql_name 'OrderingTest'
@@ -57,49 +112,56 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- specify do
+ it 'lists the fields in alphabetical order' do
expectation = <<~DOC
- ### OrderingTest
+ ### `OrderingTest`
| Field | Type | Description |
| ----- | ---- | ----------- |
- | `bar` | String! | A description of bar field. |
- | `foo` | String! | A description of foo field. |
+ | `bar` | [`String!`](#string) | A description of bar field. |
+ | `foo` | [`String!`](#string) | A description of foo field. |
DOC
is_expected.to include(expectation)
end
end
- context 'A type with a deprecated field' do
+ context 'when a field is deprecated' do
let(:type) do
Class.new(Types::BaseObject) do
graphql_name 'DeprecatedTest'
- field :foo, GraphQL::STRING_TYPE, null: false, deprecated: { reason: 'This is deprecated', milestone: '1.10' }, description: 'A description.'
+ field :foo,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ deprecated: { reason: 'This is deprecated', milestone: '1.10' },
+ description: 'A description.'
end
end
- specify do
+ it 'includes the deprecation' do
expectation = <<~DOC
- ### DeprecatedTest
+ ### `DeprecatedTest`
| Field | Type | Description |
| ----- | ---- | ----------- |
- | `foo` **{warning-solid}** | String! | **Deprecated:** This is deprecated. Deprecated in 1.10. |
+ | `foo` **{warning-solid}** | [`String!`](#string) | **Deprecated:** This is deprecated. Deprecated in 1.10. |
DOC
is_expected.to include(expectation)
end
end
- context 'A type with an emum field' do
+ context 'when a field has an Enumeration type' do
let(:type) do
enum_type = Class.new(Types::BaseEnum) do
graphql_name 'MyEnum'
- value 'BAZ', description: 'A description of BAZ.'
- value 'BAR', description: 'A description of BAR.', deprecated: { reason: 'This is deprecated', milestone: '1.10' }
+ value 'BAZ',
+ description: 'A description of BAZ.'
+ value 'BAR',
+ description: 'A description of BAR.',
+ deprecated: { reason: 'This is deprecated', milestone: '1.10' }
end
Class.new(Types::BaseObject) do
@@ -109,9 +171,9 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- specify do
+ it 'includes the description of the Enumeration' do
expectation = <<~DOC
- ### MyEnum
+ ### `MyEnum`
| Value | Description |
| ----- | ----------- |
@@ -122,5 +184,129 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
is_expected.to include(expectation)
end
end
+
+ context 'when a field has a global ID type' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name 'IDTest'
+ description 'A test for rendering IDs.'
+
+ field :foo, ::Types::GlobalIDType[::User], null: true, description: 'A user foo.'
+ end
+ end
+
+ it 'includes the field and the description of the ID, so we can link to it' do
+ type_section = <<~DOC
+ ### `IDTest`
+
+ A test for rendering IDs.
+
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
+ | `foo` | [`UserID`](#userid) | A user foo. |
+ DOC
+
+ id_section = <<~DOC
+ ### `UserID`
+
+ A `UserID` is a global ID. It is encoded as a string.
+
+ An example `UserID` is: `"gid://gitlab/User/1"`.
+ DOC
+
+ is_expected.to include(type_section, id_section)
+ end
+ end
+
+ context 'when there is an interface and a union' do
+ let(:type) do
+ user = Class.new(::Types::BaseObject)
+ user.graphql_name 'User'
+ user.field :user_field, ::GraphQL::STRING_TYPE, null: true
+ group = Class.new(::Types::BaseObject)
+ group.graphql_name 'Group'
+ group.field :group_field, ::GraphQL::STRING_TYPE, null: true
+
+ union = Class.new(::Types::BaseUnion)
+ union.graphql_name 'UserOrGroup'
+ union.description 'Either a user or a group.'
+ union.possible_types user, group
+
+ interface = Module.new
+ interface.include(::Types::BaseInterface)
+ interface.graphql_name 'Flying'
+ interface.description 'Something that can fly.'
+ interface.field :flight_speed, GraphQL::INT_TYPE, null: true, description: 'Speed in mph.'
+
+ african_swallow = Class.new(::Types::BaseObject)
+ african_swallow.graphql_name 'AfricanSwallow'
+ african_swallow.description 'A swallow from Africa.'
+ african_swallow.implements interface
+ interface.orphan_types african_swallow
+
+ Class.new(::Types::BaseObject) do
+ graphql_name 'AbstactTypeTest'
+ description 'A test for abstract types.'
+
+ field :foo, union, null: true, description: 'The foo.'
+ field :flying, interface, null: true, description: 'A flying thing.'
+ end
+ end
+
+ it 'lists the fields correctly, and includes descriptions of all the types' do
+ type_section = <<~DOC
+ ### `AbstactTypeTest`
+
+ A test for abstract types.
+
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
+ | `flying` | [`Flying`](#flying) | A flying thing. |
+ | `foo` | [`UserOrGroup`](#userorgroup) | The foo. |
+ DOC
+
+ union_section = <<~DOC
+ #### `UserOrGroup`
+
+ Either a user or a group.
+
+ One of:
+
+ - [`Group`](#group)
+ - [`User`](#user)
+ DOC
+
+ interface_section = <<~DOC
+ #### `Flying`
+
+ Something that can fly.
+
+ Implementations:
+
+ - [`AfricanSwallow`](#africanswallow)
+
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
+ | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ DOC
+
+ implementation_section = <<~DOC
+ ### `AfricanSwallow`
+
+ A swallow from Africa.
+
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
+ | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ DOC
+
+ is_expected.to include(
+ type_section,
+ union_section,
+ interface_section,
+ implementation_section
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
index b45bb8b79d9..ec2ec4bf50d 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::Keyset::LastItems do
let_it_be(:merge_request) { create(:merge_request) }
- let(:scope) { MergeRequest.order_merged_at_asc.with_order_id_desc }
+ let(:scope) { MergeRequest.order_merged_at_asc }
subject { described_class.take_items(*args) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
index eb28e6c8c0a..40ee47ece49 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
@@ -52,18 +52,6 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
end
end
- context 'when ordering by SIMILARITY' do
- let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
-
- it 'assigns the right attribute name, named function, and direction' do
- expect(order_list.count).to eq 2
- expect(order_list.first.attribute_name).to eq 'similarity'
- expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Addition)
- expect(order_list.first.named_function.to_sql).to include 'SIMILARITY('
- expect(order_list.first.sort_direction).to eq :desc
- end
- end
-
context 'when ordering by CASE', :aggregate_failuers do
let(:relation) { Project.order(Arel::Nodes::Case.new(Project.arel_table[:pending_delete]).when(true).then(100).else(1000).asc) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
index fa631aa5666..31c02fd43e8 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
@@ -131,43 +131,5 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
end
end
end
-
- context 'when sorting using SIMILARITY' do
- let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
- let(:arel_table) { Project.arel_table }
- let(:decoded_cursor) { { 'similarity' => 0.5, 'id' => 100 } }
- let(:similarity_function_call) { Gitlab::Database::SimilarityScore::SIMILARITY_FUNCTION_CALL_WITH_ANNOTATION }
- let(:similarity_sql) do
- [
- "(#{similarity_function_call}(COALESCE(\"projects\".\"path\", ''), 'test') * CAST('1' AS numeric))",
- "(#{similarity_function_call}(COALESCE(\"projects\".\"name\", ''), 'test') * CAST('0.7' AS numeric))",
- "(#{similarity_function_call}(COALESCE(\"projects\".\"description\", ''), 'test') * CAST('0.2' AS numeric))"
- ].join(' + ')
- end
-
- context 'when no values are nil' do
- context 'when :after' do
- it 'generates the correct condition' do
- conditions = builder.conditions.gsub(/\s+/, ' ')
-
- expect(conditions).to include "(#{similarity_sql} < 0.5)"
- expect(conditions).to include '"projects"."id" < 100'
- expect(conditions).to include "OR (#{similarity_sql} IS NULL)"
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- conditions = builder.conditions.gsub(/\s+/, ' ')
-
- expect(conditions).to include "(#{similarity_sql} > 0.5)"
- expect(conditions).to include '"projects"."id" > 100'
- expect(conditions).to include "OR ( #{similarity_sql} = 0.5"
- end
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
new file mode 100644
index 00000000000..5e66e16d655
--- /dev/null
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Present::FieldExtension do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:object) { double(value: 'foo') }
+ let(:owner) { fresh_object_type }
+ let(:field_name) { 'value' }
+ let(:field) do
+ ::Types::BaseField.new(name: field_name, type: GraphQL::STRING_TYPE, null: true, owner: owner)
+ end
+
+ let(:base_presenter) do
+ Class.new(SimpleDelegator) do
+ def initialize(object, **options)
+ super(object)
+ @object = object
+ @options = options
+ end
+ end
+ end
+
+ def resolve_value
+ resolve_field(field, object, current_user: user, object_type: owner)
+ end
+
+ context 'when the object does not declare a presenter' do
+ it 'does not affect normal resolution' do
+ expect(resolve_value).to eq 'foo'
+ end
+ end
+
+ describe 'interactions with inheritance' do
+ def parent
+ type = fresh_object_type('Parent')
+ type.present_using(provide_foo)
+ type.field :foo, ::GraphQL::INT_TYPE, null: true
+ type.field :value, ::GraphQL::STRING_TYPE, null: true
+ type
+ end
+
+ def child
+ type = Class.new(parent)
+ type.graphql_name 'Child'
+ type.present_using(provide_bar)
+ type.field :bar, ::GraphQL::INT_TYPE, null: true
+ type
+ end
+
+ def provide_foo
+ Class.new(base_presenter) do
+ def foo
+ 100
+ end
+ end
+ end
+
+ def provide_bar
+ Class.new(base_presenter) do
+ def bar
+ 101
+ end
+ end
+ end
+
+ it 'can resolve value, foo and bar' do
+ type = child
+ value = resolve_field(:value, object, object_type: type)
+ foo = resolve_field(:foo, object, object_type: type)
+ bar = resolve_field(:bar, object, object_type: type)
+
+ expect([value, foo, bar]).to eq ['foo', 100, 101]
+ end
+ end
+
+ shared_examples 'calling the presenter method' do
+ it 'calls the presenter method' do
+ expect(resolve_value).to eq presenter.new(object, current_user: user).send(field_name)
+ end
+ end
+
+ context 'when the object declares a presenter' do
+ before do
+ owner.present_using(presenter)
+ end
+
+ context 'when the presenter overrides the original method' do
+ def twice
+ Class.new(base_presenter) do
+ def value
+ @object.value * 2
+ end
+ end
+ end
+
+ let(:presenter) { twice }
+
+ it_behaves_like 'calling the presenter method'
+ end
+
+ # This is exercised here using an explicit `resolve:` proc, but
+ # @resolver_proc values are used in field instrumentation as well.
+ context 'when the field uses a resolve proc' do
+ let(:presenter) { base_presenter }
+ let(:field) do
+ ::Types::BaseField.new(
+ name: field_name,
+ type: GraphQL::STRING_TYPE,
+ null: true,
+ owner: owner,
+ resolve: ->(obj, args, ctx) { 'Hello from a proc' }
+ )
+ end
+
+ specify { expect(resolve_value).to eq 'Hello from a proc' }
+ end
+
+ context 'when the presenter provides a new method' do
+ def presenter
+ Class.new(base_presenter) do
+ def current_username
+ "Hello #{@options[:current_user]&.username} from the presenter!"
+ end
+ end
+ end
+
+ context 'when we select the original field' do
+ it 'is unaffected' do
+ expect(resolve_value).to eq 'foo'
+ end
+ end
+
+ context 'when we select the new field' do
+ let(:field_name) { 'current_username' }
+
+ it_behaves_like 'calling the presenter method'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
index 138765afd8a..8450396284a 100644
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
@@ -5,42 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
subject { described_class.new }
- describe '#analyze?' do
- context 'feature flag disabled' do
- before do
- stub_feature_flags(graphql_logging: false)
- end
-
- it 'disables the analyzer' do
- expect(subject.analyze?(anything)).to be_falsey
- end
- end
-
- context 'feature flag enabled by default' do
- let(:monotonic_time_before) { 42 }
- let(:monotonic_time_after) { 500 }
- let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
-
- it 'enables the analyzer' do
- expect(subject.analyze?(anything)).to be_truthy
- end
-
- it 'returns a duration in seconds' do
- allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
- allow(Gitlab::GraphqlLogger).to receive(:info)
-
- expected_duration = monotonic_time_duration
- memo = subject.initial_value(spy('query'))
-
- subject.final_value(memo)
-
- expect(memo).to have_key(:duration_s)
- expect(memo[:duration_s]).to eq(expected_duration)
- end
- end
- end
-
describe '#initial_value' do
it 'filters out sensitive variables' do
doc = GraphQL.parse <<-GRAPHQL
@@ -58,4 +22,24 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
expect(subject.initial_value(query)[:variables]).to eq('{:body=>"[FILTERED]"}')
end
end
+
+ describe '#final_value' do
+ let(:monotonic_time_before) { 42 }
+ let(:monotonic_time_after) { 500 }
+ let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
+
+ it 'returns a duration in seconds' do
+ allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
+ allow(Gitlab::GraphqlLogger).to receive(:info)
+
+ expected_duration = monotonic_time_duration
+ memo = subject.initial_value(spy('query'))
+
+ subject.final_value(memo)
+
+ expect(memo).to have_key(:duration_s)
+ expect(memo[:duration_s]).to eq(expected_duration)
+ end
+ end
end
diff --git a/spec/lib/gitlab/hook_data/project_member_builder_spec.rb b/spec/lib/gitlab/hook_data/project_member_builder_spec.rb
new file mode 100644
index 00000000000..3fb84223581
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/project_member_builder_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::ProjectMemberBuilder do
+ let_it_be(:project) { create(:project, :internal, name: 'gitlab') }
+ let_it_be(:user) { create(:user, name: 'John Doe', username: 'johndoe', email: 'john@example.com') }
+ let_it_be(:project_member) { create(:project_member, :developer, user: user, project: project) }
+
+ describe '#build' do
+ let(:data) { described_class.new(project_member).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :project_name, :project_path, :project_path_with_namespace, :project_id, :user_username, :user_name, :user_email, :user_id, :access_level, :project_visibility
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+ expect(data[:project_name]).to eq('gitlab')
+ expect(data[:project_path]).to eq(project.path)
+ expect(data[:project_path_with_namespace]).to eq(project.full_path)
+ expect(data[:project_id]).to eq(project.id)
+ expect(data[:user_username]).to eq('johndoe')
+ expect(data[:user_name]).to eq('John Doe')
+ expect(data[:user_id]).to eq(user.id)
+ expect(data[:user_email]).to eq('john@example.com')
+ expect(data[:access_level]).to eq('Developer')
+ expect(data[:project_visibility]).to eq('internal')
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('user_add_to_team') }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on update' do
+ let(:event) { :update }
+
+ it { expect(event_name).to eq('user_update_for_team') }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('user_remove_from_team') }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb
index 389bc1a85f4..96e6e485841 100644
--- a/spec/lib/gitlab/http_connection_adapter_spec.rb
+++ b/spec/lib/gitlab/http_connection_adapter_spec.rb
@@ -5,17 +5,32 @@ require 'spec_helper'
RSpec.describe Gitlab::HTTPConnectionAdapter do
include StubRequests
+ let(:uri) { URI('https://example.org') }
+ let(:options) { {} }
+
+ subject(:connection) { described_class.new(uri, options).connection }
+
describe '#connection' do
before do
stub_all_dns('https://example.org', ip_address: '93.184.216.34')
end
- context 'when local requests are not allowed' do
+ context 'when local requests are allowed' do
+ let(:options) { { allow_local_requests: true } }
+
it 'sets up the connection' do
- uri = URI('https://example.org')
+ expect(connection).to be_a(Net::HTTP)
+ expect(connection.address).to eq('93.184.216.34')
+ expect(connection.hostname_override).to eq('example.org')
+ expect(connection.addr_port).to eq('example.org')
+ expect(connection.port).to eq(443)
+ end
+ end
- connection = described_class.new(uri).connection
+ context 'when local requests are not allowed' do
+ let(:options) { { allow_local_requests: false } }
+ it 'sets up the connection' do
expect(connection).to be_a(Net::HTTP)
expect(connection.address).to eq('93.184.216.34')
expect(connection.hostname_override).to eq('example.org')
@@ -23,28 +38,57 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
expect(connection.port).to eq(443)
end
- it 'raises error when it is a request to local address' do
- uri = URI('http://172.16.0.0/12')
+ context 'when it is a request to local network' do
+ let(:uri) { URI('http://172.16.0.0/12') }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://172.16.0.0/12' is blocked: Requests to the local network are not allowed"
+ )
+ end
+
+ context 'when local request allowed' do
+ let(:options) { { allow_local_requests: true } }
- expect { described_class.new(uri).connection }
- .to raise_error(Gitlab::HTTP::BlockedUrlError,
- "URL 'http://172.16.0.0/12' is blocked: Requests to the local network are not allowed")
+ it 'sets up the connection' do
+ expect(connection).to be_a(Net::HTTP)
+ expect(connection.address).to eq('172.16.0.0')
+ expect(connection.hostname_override).to be(nil)
+ expect(connection.addr_port).to eq('172.16.0.0')
+ expect(connection.port).to eq(80)
+ end
+ end
end
- it 'raises error when it is a request to localhost address' do
- uri = URI('http://127.0.0.1')
+ context 'when it is a request to local address' do
+ let(:uri) { URI('http://127.0.0.1') }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed"
+ )
+ end
- expect { described_class.new(uri).connection }
- .to raise_error(Gitlab::HTTP::BlockedUrlError,
- "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed")
+ context 'when local request allowed' do
+ let(:options) { { allow_local_requests: true } }
+
+ it 'sets up the connection' do
+ expect(connection).to be_a(Net::HTTP)
+ expect(connection.address).to eq('127.0.0.1')
+ expect(connection.hostname_override).to be(nil)
+ expect(connection.addr_port).to eq('127.0.0.1')
+ expect(connection.port).to eq(80)
+ end
+ end
end
context 'when port different from URL scheme is used' do
- it 'sets up the addr_port accordingly' do
- uri = URI('https://example.org:8080')
-
- connection = described_class.new(uri).connection
+ let(:uri) { URI('https://example.org:8080') }
+ it 'sets up the addr_port accordingly' do
+ expect(connection).to be_a(Net::HTTP)
expect(connection.address).to eq('93.184.216.34')
expect(connection.hostname_override).to eq('example.org')
expect(connection.addr_port).to eq('example.org:8080')
@@ -54,13 +98,11 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
end
context 'when DNS rebinding protection is disabled' do
- it 'sets up the connection' do
+ before do
stub_application_setting(dns_rebinding_protection_enabled: false)
+ end
- uri = URI('https://example.org')
-
- connection = described_class.new(uri).connection
-
+ it 'sets up the connection' do
expect(connection).to be_a(Net::HTTP)
expect(connection.address).to eq('example.org')
expect(connection.hostname_override).to eq(nil)
@@ -70,13 +112,11 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
end
context 'when http(s) environment variable is set' do
- it 'sets up the connection' do
+ before do
stub_env('https_proxy' => 'https://my.proxy')
+ end
- uri = URI('https://example.org')
-
- connection = described_class.new(uri).connection
-
+ it 'sets up the connection' do
expect(connection).to be_a(Net::HTTP)
expect(connection.address).to eq('example.org')
expect(connection.hostname_override).to eq(nil)
@@ -85,41 +125,128 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
end
end
- context 'when local requests are allowed' do
- it 'sets up the connection' do
- uri = URI('https://example.org')
+ context 'when proxy settings are configured' do
+ let(:options) do
+ {
+ http_proxyaddr: 'https://proxy.org',
+ http_proxyport: 1557,
+ http_proxyuser: 'user',
+ http_proxypass: 'pass'
+ }
+ end
- connection = described_class.new(uri, allow_local_requests: true).connection
+ before do
+ stub_all_dns('https://proxy.org', ip_address: '166.84.12.54')
+ end
- expect(connection).to be_a(Net::HTTP)
- expect(connection.address).to eq('93.184.216.34')
- expect(connection.hostname_override).to eq('example.org')
- expect(connection.addr_port).to eq('example.org')
- expect(connection.port).to eq(443)
+ it 'sets up the proxy settings' do
+ expect(connection.proxy_address).to eq('https://166.84.12.54')
+ expect(connection.proxy_port).to eq(1557)
+ expect(connection.proxy_user).to eq('user')
+ expect(connection.proxy_pass).to eq('pass')
end
- it 'sets up the connection when it is a local network' do
- uri = URI('http://172.16.0.0/12')
+ context 'when the address has path' do
+ before do
+ options[:http_proxyaddr] = 'https://proxy.org/path'
+ end
- connection = described_class.new(uri, allow_local_requests: true).connection
+ it 'sets up the proxy settings' do
+ expect(connection.proxy_address).to eq('https://166.84.12.54/path')
+ expect(connection.proxy_port).to eq(1557)
+ end
+ end
- expect(connection).to be_a(Net::HTTP)
- expect(connection.address).to eq('172.16.0.0')
- expect(connection.hostname_override).to be(nil)
- expect(connection.addr_port).to eq('172.16.0.0')
- expect(connection.port).to eq(80)
+ context 'when the port is in the address and port' do
+ before do
+ options[:http_proxyaddr] = 'https://proxy.org:1422'
+ end
+
+ it 'sets up the proxy settings' do
+ expect(connection.proxy_address).to eq('https://166.84.12.54')
+ expect(connection.proxy_port).to eq(1557)
+ end
+
+ context 'when the port is only in the address' do
+ before do
+ options[:http_proxyport] = nil
+ end
+
+ it 'sets up the proxy settings' do
+ expect(connection.proxy_address).to eq('https://166.84.12.54')
+ expect(connection.proxy_port).to eq(1422)
+ end
+ end
end
- it 'sets up the connection when it is localhost' do
- uri = URI('http://127.0.0.1')
+ context 'when it is a request to local network' do
+ before do
+ options[:http_proxyaddr] = 'http://172.16.0.0/12'
+ end
+
+ it 'raises error' do
+ expect { subject }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://172.16.0.0:1557/12' is blocked: Requests to the local network are not allowed"
+ )
+ end
- connection = described_class.new(uri, allow_local_requests: true).connection
+ context 'when local request allowed' do
+ before do
+ options[:allow_local_requests] = true
+ end
- expect(connection).to be_a(Net::HTTP)
- expect(connection.address).to eq('127.0.0.1')
- expect(connection.hostname_override).to be(nil)
- expect(connection.addr_port).to eq('127.0.0.1')
- expect(connection.port).to eq(80)
+ it 'sets up the connection' do
+ expect(connection.proxy_address).to eq('http://172.16.0.0/12')
+ expect(connection.proxy_port).to eq(1557)
+ end
+ end
+ end
+
+ context 'when it is a request to local address' do
+ before do
+ options[:http_proxyaddr] = 'http://127.0.0.1'
+ end
+
+ it 'raises error' do
+ expect { subject }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://127.0.0.1:1557' is blocked: Requests to localhost are not allowed"
+ )
+ end
+
+ context 'when local request allowed' do
+ before do
+ options[:allow_local_requests] = true
+ end
+
+ it 'sets up the connection' do
+ expect(connection.proxy_address).to eq('http://127.0.0.1')
+ expect(connection.proxy_port).to eq(1557)
+ end
+ end
+ end
+
+ context 'when http(s) environment variable is set' do
+ before do
+ stub_env('https_proxy' => 'https://my.proxy')
+ end
+
+ it 'sets up the connection' do
+ expect(connection.proxy_address).to eq('https://proxy.org')
+ expect(connection.proxy_port).to eq(1557)
+ end
+ end
+
+ context 'when DNS rebinding protection is disabled' do
+ before do
+ stub_application_setting(dns_rebinding_protection_enabled: false)
+ end
+
+ it 'sets up the connection' do
+ expect(connection.proxy_address).to eq('https://proxy.org')
+ expect(connection.proxy_port).to eq(1557)
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index d0282e14d5f..37b43066a62 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -335,6 +335,7 @@ container_repositories:
- project
- name
project:
+- external_approval_rules
- taggings
- base_tags
- tag_taggings
diff --git a/spec/lib/gitlab/import_export/import_export_spec.rb b/spec/lib/gitlab/import_export/import_export_spec.rb
index 62b4717fc96..87757b07572 100644
--- a/spec/lib/gitlab/import_export/import_export_spec.rb
+++ b/spec/lib/gitlab/import_export/import_export_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport do
describe 'export filename' do
- let(:group) { create(:group, :nested) }
- let(:project) { create(:project, :public, path: 'project-path', namespace: group) }
+ let(:group) { build(:group, path: 'child', parent: build(:group, path: 'parent')) }
+ let(:project) { build(:project, :public, path: 'project-path', namespace: group) }
it 'contains the project path' do
expect(described_class.export_filename(exportable: project)).to include(project.path)
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index ece261e0882..50494433c5d 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -349,14 +349,22 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
project_tree_saver.save
end
- it 'exports group members as admin' do
- expect(member_emails).to include('group@member.com')
- end
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
- it 'exports group members as project members' do
- member_types = subject.map { |pm| pm['source_type'] }
+ it 'exports group members as project members' do
+ member_types = subject.map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
- expect(member_types).to all(eq('Project'))
+ context 'when admin mode is disabled' do
+ it 'does not export group members' do
+ expect(member_emails).not_to include('group@member.com')
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index e301be47d68..b159d0cfc76 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -84,6 +84,7 @@ Note:
- discussion_id
- original_discussion_id
- confidential
+- last_edited_at
LabelLink:
- id
- target_type
@@ -500,6 +501,7 @@ ProtectedBranch:
- name
- created_at
- updated_at
+- allow_force_push
- code_owner_approval_required
ProtectedTag:
- id
@@ -584,6 +586,7 @@ ProjectFeature:
- analytics_access_level
- operations_access_level
- security_and_compliance_access_level
+- container_registry_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
diff --git a/spec/lib/gitlab/marker_range_spec.rb b/spec/lib/gitlab/marker_range_spec.rb
new file mode 100644
index 00000000000..5f73d2a5048
--- /dev/null
+++ b/spec/lib/gitlab/marker_range_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::MarkerRange do
+ subject(:marker_range) { described_class.new(first, last, mode: mode) }
+
+ let(:first) { 1 }
+ let(:last) { 10 }
+ let(:mode) { nil }
+
+ it { is_expected.to eq(first..last) }
+
+ it 'behaves like a Range' do
+ is_expected.to be_kind_of(Range)
+ end
+
+ describe '#mode' do
+ subject { marker_range.mode }
+
+ it { is_expected.to be_nil }
+
+ context 'when mode is provided' do
+ let(:mode) { :deletion }
+
+ it { is_expected.to eq(mode) }
+ end
+ end
+
+ describe '#to_range' do
+ subject { marker_range.to_range }
+
+ it { is_expected.to eq(first..last) }
+
+ context 'when mode is provided' do
+ let(:mode) { :deletion }
+
+ it 'is omitted during transformation' do
+ is_expected.not_to respond_to(:mode)
+ end
+ end
+ end
+
+ describe '.from_range' do
+ subject { described_class.from_range(range) }
+
+ let(:range) { 1..3 }
+
+ it 'converts Range to MarkerRange object' do
+ is_expected.to be_a(described_class)
+ end
+
+ it 'keeps correct range' do
+ is_expected.to eq(range)
+ end
+
+ context 'when range excludes end' do
+ let(:range) { 1...3 }
+
+ it 'keeps correct range' do
+ is_expected.to eq(range)
+ end
+ end
+
+ context 'when range is already a MarkerRange' do
+ let(:range) { marker_range }
+
+ it { is_expected.to be(marker_range) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
new file mode 100644
index 00000000000..b31a2f7549a
--- /dev/null
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::BackgroundTransaction do
+ let(:transaction) { described_class.new }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) }
+
+ before do
+ allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
+ end
+
+ describe '#run' do
+ it 'yields the supplied block' do
+ expect { |b| transaction.run(&b) }.to yield_control
+ end
+
+ it 'stores the transaction in the current thread' do
+ transaction.run do
+ expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to eq(transaction)
+ end
+ end
+
+ it 'removes the transaction from the current thread upon completion' do
+ transaction.run { }
+
+ expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to be_nil
+ end
+ end
+
+ describe '#labels' do
+ it 'provides labels with endpoint_id and feature_category' do
+ Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects' })
+ end
+ end
+ end
+
+ RSpec.shared_examples 'metric with labels' do |metric_method|
+ it 'measures with correct labels and value' do
+ value = 1
+ expect(prometheus_metric).to receive(metric_method).with({ endpoint_id: 'TestWorker', feature_category: 'projects' }, value)
+
+ Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ transaction.send(metric_method, :test_metric, value)
+ end
+ end
+ end
+
+ describe '#increment' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :increment
+ end
+
+ describe '#set' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :set
+ end
+
+ describe '#observe' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :observe
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
new file mode 100644
index 00000000000..153cf43be0a
--- /dev/null
+++ b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
+ let(:subscriber) { described_class.new }
+ let(:counter) { double(:counter) }
+ let(:data) { { data: { event: 'updated' } } }
+ let(:channel_class) { 'IssuesChannel' }
+ let(:event) do
+ double(
+ :event,
+ name: name,
+ payload: payload
+ )
+ end
+
+ describe '#transmit' do
+ let(:name) { 'transmit.action_cable' }
+ let(:via) { 'streamed from issues:Z2lkOi8vZs2l0bGFiL0lzc3VlLzQ0Ng' }
+ let(:payload) do
+ {
+ channel_class: channel_class,
+ via: via,
+ data: data
+ }
+ end
+
+ it 'tracks the transmit event' do
+ allow(::Gitlab::Metrics).to receive(:counter).with(
+ :action_cable_single_client_transmissions_total, /transmit/
+ ).and_return(counter)
+
+ expect(counter).to receive(:increment)
+
+ subscriber.transmit(event)
+ end
+ end
+
+ describe '#broadcast' do
+ let(:name) { 'broadcast.action_cable' }
+ let(:coder) { ActiveSupport::JSON }
+ let(:message) do
+ { event: :updated }
+ end
+
+ let(:broadcasting) { 'issues:Z2lkOi8vZ2l0bGFiL0lzc3VlLzQ0Ng' }
+ let(:payload) do
+ {
+ broadcasting: broadcasting,
+ message: message,
+ coder: coder
+ }
+ end
+
+ it 'tracks the broadcast event' do
+ allow(::Gitlab::Metrics).to receive(:counter).with(
+ :action_cable_broadcasts_total, /broadcast/
+ ).and_return(counter)
+
+ expect(counter).to receive(:increment)
+
+ subscriber.broadcast(event)
+ end
+ end
+
+ describe '#transmit_subscription_confirmation' do
+ let(:name) { 'transmit_subscription_confirmation.action_cable' }
+ let(:channel_class) { 'IssuesChannel' }
+ let(:payload) do
+ {
+ channel_class: channel_class
+ }
+ end
+
+ it 'tracks the subscription confirmation event' do
+ allow(::Gitlab::Metrics).to receive(:counter).with(
+ :action_cable_subscription_confirmations_total, /confirm/
+ ).and_return(counter)
+
+ expect(counter).to receive(:increment)
+
+ subscriber.transmit_subscription_confirmation(event)
+ end
+ end
+
+ describe '#transmit_subscription_rejection' do
+ let(:name) { 'transmit_subscription_rejection.action_cable' }
+ let(:channel_class) { 'IssuesChannel' }
+ let(:payload) do
+ {
+ channel_class: channel_class
+ }
+ end
+
+ it 'tracks the subscription rejection event' do
+ allow(::Gitlab::Metrics).to receive(:counter).with(
+ :action_cable_subscription_rejections_total, /reject/
+ ).and_return(counter)
+
+ expect(counter).to receive(:increment)
+
+ subscriber.transmit_subscription_rejection(event)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index edcd5b31941..dffd37eeb9d 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -3,10 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
+ using RSpec::Parameterized::TableSyntax
+
let(:env) { {} }
- let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
- let(:subscriber) { described_class.new }
- let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10' } }
+ let(:subscriber) { described_class.new }
+ let(:connection) { double(:connection) }
+ let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10', connection: connection } }
let(:event) do
double(
@@ -17,82 +19,32 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
)
end
- describe '#sql' do
- shared_examples 'track query in metrics' do
- before do
- allow(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
- end
-
- it 'increments only db count value' do
- described_class::DB_COUNTERS.each do |counter|
- prometheus_counter = "gitlab_transaction_#{counter}_total".to_sym
- if expected_counters[counter] > 0
- expect(transaction).to receive(:increment).with(prometheus_counter, 1)
- else
- expect(transaction).not_to receive(:increment).with(prometheus_counter, 1)
- end
- end
-
- subscriber.sql(event)
- end
+ # Emulate Marginalia pre-pending comments
+ def sql(query, comments: true)
+ if comments && !%w[BEGIN COMMIT].include?(query)
+ "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}"
+ else
+ query
end
+ end
- shared_examples 'track query in RequestStore' do
- context 'when RequestStore is enabled' do
- it 'caches db count value', :request_store, :aggregate_failures do
- subscriber.sql(event)
-
- described_class::DB_COUNTERS.each do |counter|
- expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
- end
- end
-
- it 'prevents db counters from leaking to the next transaction' do
- 2.times do
- Gitlab::WithRequestStore.with_request_store do
- subscriber.sql(event)
-
- described_class::DB_COUNTERS.each do |counter|
- expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
- end
- end
- end
- end
- end
- end
-
- describe 'without a current transaction' do
- it 'does not track any metrics' do
- expect_any_instance_of(Gitlab::Metrics::Transaction)
- .not_to receive(:increment)
-
- subscriber.sql(event)
- end
-
- context 'with read query' do
- let(:expected_counters) do
- {
- db_count: 1,
- db_write_count: 0,
- db_cached_count: 0
- }
- end
-
- it_behaves_like 'track query in RequestStore'
- end
+ shared_examples 'track generic sql events' do
+ where(:name, :sql_query, :record_query, :record_write_query, :record_cached_query) do
+ 'SQL' | 'SELECT * FROM users WHERE id = 10' | true | false | false
+ 'SQL' | 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' | true | false | false
+ 'SQL' | 'SELECT * FROM users WHERE id = 10 FOR UPDATE' | true | true | false
+ 'SQL' | 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' | true | true | false
+ 'SQL' | 'DELETE FROM users where id = 10' | true | true | false
+ 'SQL' | 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' | true | true | false
+ 'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false
+ 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true
+ 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false
+ nil | 'BEGIN' | false | false | false
+ nil | 'COMMIT' | false | false | false
end
- describe 'with a current transaction' do
- it 'observes sql_duration metric' do
- expect(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
- expect(transaction).to receive(:observe).with(:gitlab_sql_duration_seconds, 0.002)
-
- subscriber.sql(event)
- end
+ with_them do
+ let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } }
it 'marks the current thread as using the database' do
# since it would already have been toggled by other specs
@@ -101,215 +53,20 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
end
- context 'with read query' do
- let(:expected_counters) do
- {
- db_count: 1,
- db_write_count: 0,
- db_cached_count: 0
- }
- end
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
-
- context 'with only select' do
- let(:payload) { { sql: 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
- end
-
- context 'write query' do
- let(:expected_counters) do
- {
- db_count: 1,
- db_write_count: 1,
- db_cached_count: 0
- }
- end
-
- context 'with select for update sql event' do
- let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10 FOR UPDATE' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
-
- context 'with common table expression' do
- context 'with insert' do
- let(:payload) { { sql: 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
- end
-
- context 'with delete sql event' do
- let(:payload) { { sql: 'DELETE FROM users where id = 10' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
-
- context 'with insert sql event' do
- let(:payload) { { sql: 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
-
- context 'with update sql event' do
- let(:payload) { { sql: 'UPDATE users SET admin = true WHERE id = 10' } }
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
- end
-
- context 'with cached query' do
- let(:expected_counters) do
- {
- db_count: 1,
- db_write_count: 0,
- db_cached_count: 1
- }
- end
-
- context 'with cached payload ' do
- let(:payload) do
- {
- sql: 'SELECT * FROM users WHERE id = 10',
- cached: true
- }
- end
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
-
- context 'with cached payload name' do
- let(:payload) do
- {
- sql: 'SELECT * FROM users WHERE id = 10',
- name: 'CACHE'
- }
- end
-
- it_behaves_like 'track query in metrics'
- it_behaves_like 'track query in RequestStore'
- end
- end
-
- context 'events are internal to Rails or irrelevant' do
- let(:schema_event) do
- double(
- :event,
- name: 'sql.active_record',
- payload: {
- sql: "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass",
- name: 'SCHEMA',
- connection_id: 135,
- statement_name: nil,
- binds: []
- },
- duration: 0.7
- )
- end
-
- let(:begin_event) do
- double(
- :event,
- name: 'sql.active_record',
- payload: {
- sql: "BEGIN",
- name: nil,
- connection_id: 231,
- statement_name: nil,
- binds: []
- },
- duration: 1.1
- )
- end
-
- let(:commit_event) do
- double(
- :event,
- name: 'sql.active_record',
- payload: {
- sql: "COMMIT",
- name: nil,
- connection_id: 212,
- statement_name: nil,
- binds: []
- },
- duration: 1.6
- )
- end
-
- it 'skips schema/begin/commit sql commands' do
- allow(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
-
- expect(transaction).not_to receive(:increment)
-
- subscriber.sql(schema_event)
- subscriber.sql(begin_event)
- subscriber.sql(commit_event)
- end
- end
+ it_behaves_like 'record ActiveRecord metrics'
+ it_behaves_like 'store ActiveRecord info in RequestStore'
end
end
- describe 'self.db_counter_payload' do
- before do
- allow(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
- end
-
- context 'when RequestStore is enabled', :request_store do
- context 'when query is executed' do
- let(:expected_payload) do
- {
- db_count: 1,
- db_cached_count: 0,
- db_write_count: 0
- }
- end
-
- it 'returns correct payload' do
- subscriber.sql(event)
-
- expect(described_class.db_counter_payload).to eq(expected_payload)
- end
- end
+ context 'without Marginalia comments' do
+ let(:comments) { false }
- context 'when query is not executed' do
- let(:expected_payload) do
- {
- db_count: 0,
- db_cached_count: 0,
- db_write_count: 0
- }
- end
-
- it 'returns correct payload' do
- expect(described_class.db_counter_payload).to eq(expected_payload)
- end
- end
- end
-
- context 'when RequestStore is disabled' do
- let(:expected_payload) { {} }
+ it_behaves_like 'track generic sql events'
+ end
- it 'returns empty payload' do
- subscriber.sql(event)
+ context 'with Marginalia comments' do
+ let(:comments) { true }
- expect(described_class.db_counter_payload).to eq(expected_payload)
- end
- end
+ it_behaves_like 'track generic sql events'
end
end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index ef2d4fa0cbf..08e1a5ee0a3 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -7,178 +7,206 @@ RSpec.describe Gitlab::ObjectHierarchy do
let!(:child1) { create(:group, parent: parent) }
let!(:child2) { create(:group, parent: child1) }
- describe '#base_and_ancestors' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id)).base_and_ancestors
- end
-
- it 'includes the base rows' do
- expect(relation).to include(child2)
- end
+ shared_context 'Gitlab::ObjectHierarchy test cases' do
+ describe '#base_and_ancestors' do
+ let(:relation) do
+ described_class.new(Group.where(id: child2.id)).base_and_ancestors
+ end
- it 'includes all of the ancestors' do
- expect(relation).to include(parent, child1)
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child2)
+ end
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2)).base_and_ancestors(upto: child1)
+ it 'includes all of the ancestors' do
+ expect(relation).to include(parent, child1)
+ end
- expect(relation).to contain_exactly(child2)
- end
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).base_and_ancestors(upto: child1)
- it 'uses ancestors_base #initialize argument' do
- relation = described_class.new(Group.where(id: child2.id), Group.none).base_and_ancestors
+ expect(relation).to contain_exactly(child2)
+ end
- expect(relation).to include(parent, child1, child2)
- end
+ it 'uses ancestors_base #initialize argument' do
+ relation = described_class.new(Group.where(id: child2.id), Group.none).base_and_ancestors
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ expect(relation).to include(parent, child1, child2)
+ end
- describe 'hierarchy_order option' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id)).base_and_ancestors(hierarchy_order: hierarchy_order)
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- context ':asc' do
- let(:hierarchy_order) { :asc }
+ describe 'hierarchy_order option' do
+ let(:relation) do
+ described_class.new(Group.where(id: child2.id)).base_and_ancestors(hierarchy_order: hierarchy_order)
+ end
+
+ context ':asc' do
+ let(:hierarchy_order) { :asc }
- it 'orders by child to parent' do
- expect(relation).to eq([child2, child1, parent])
+ it 'orders by child to parent' do
+ expect(relation).to eq([child2, child1, parent])
+ end
end
- end
- context ':desc' do
- let(:hierarchy_order) { :desc }
+ context ':desc' do
+ let(:hierarchy_order) { :desc }
- it 'orders by parent to child' do
- expect(relation).to eq([parent, child1, child2])
+ it 'orders by parent to child' do
+ expect(relation).to eq([parent, child1, child2])
+ end
end
end
end
- end
-
- describe '#base_and_descendants' do
- let(:relation) do
- described_class.new(Group.where(id: parent.id)).base_and_descendants
- end
- it 'includes the base rows' do
- expect(relation).to include(parent)
- end
+ describe '#base_and_descendants' do
+ let(:relation) do
+ described_class.new(Group.where(id: parent.id)).base_and_descendants
+ end
- it 'includes all the descendants' do
- expect(relation).to include(child1, child2)
- end
+ it 'includes the base rows' do
+ expect(relation).to include(parent)
+ end
- it 'uses descendants_base #initialize argument' do
- relation = described_class.new(Group.none, Group.where(id: parent.id)).base_and_descendants
+ it 'includes all the descendants' do
+ expect(relation).to include(child1, child2)
+ end
- expect(relation).to include(parent, child1, child2)
- end
+ it 'uses descendants_base #initialize argument' do
+ relation = described_class.new(Group.none, Group.where(id: parent.id)).base_and_descendants
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ expect(relation).to include(parent, child1, child2)
+ end
- context 'when with_depth is true' do
- let(:relation) do
- described_class.new(Group.where(id: parent.id)).base_and_descendants(with_depth: true)
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- it 'includes depth in the results' do
- object_depths = {
- parent.id => 1,
- child1.id => 2,
- child2.id => 3
- }
+ context 'when with_depth is true' do
+ let(:relation) do
+ described_class.new(Group.where(id: parent.id)).base_and_descendants(with_depth: true)
+ end
+
+ it 'includes depth in the results' do
+ object_depths = {
+ parent.id => 1,
+ child1.id => 2,
+ child2.id => 3
+ }
- relation.each do |object|
- expect(object.depth).to eq(object_depths[object.id])
+ relation.each do |object|
+ expect(object.depth).to eq(object_depths[object.id])
+ end
end
end
end
- end
- describe '#descendants' do
- it 'includes only the descendants' do
- relation = described_class.new(Group.where(id: parent)).descendants
+ describe '#descendants' do
+ it 'includes only the descendants' do
+ relation = described_class.new(Group.where(id: parent)).descendants
- expect(relation).to contain_exactly(child1, child2)
+ expect(relation).to contain_exactly(child1, child2)
+ end
end
- end
- describe '#max_descendants_depth' do
- subject { described_class.new(base_relation).max_descendants_depth }
+ describe '#max_descendants_depth' do
+ subject { described_class.new(base_relation).max_descendants_depth }
- context 'when base relation is empty' do
- let(:base_relation) { Group.where(id: nil) }
+ context 'when base relation is empty' do
+ let(:base_relation) { Group.where(id: nil) }
- it { expect(subject).to be_nil }
- end
+ it { expect(subject).to be_nil }
+ end
- context 'when base has no children' do
- let(:base_relation) { Group.where(id: child2) }
+ context 'when base has no children' do
+ let(:base_relation) { Group.where(id: child2) }
- it { expect(subject).to eq(1) }
- end
+ it { expect(subject).to eq(1) }
+ end
- context 'when base has grandchildren' do
- let(:base_relation) { Group.where(id: parent) }
+ context 'when base has grandchildren' do
+ let(:base_relation) { Group.where(id: parent) }
- it { expect(subject).to eq(3) }
+ it { expect(subject).to eq(3) }
+ end
end
- end
- describe '#ancestors' do
- it 'includes only the ancestors' do
- relation = described_class.new(Group.where(id: child2)).ancestors
+ describe '#ancestors' do
+ it 'includes only the ancestors' do
+ relation = described_class.new(Group.where(id: child2)).ancestors
- expect(relation).to contain_exactly(child1, parent)
- end
+ expect(relation).to contain_exactly(child1, parent)
+ end
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2)).ancestors(upto: child1)
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).ancestors(upto: child1)
- expect(relation).to be_empty
+ expect(relation).to be_empty
+ end
end
- end
- describe '#all_objects' do
- let(:relation) do
- described_class.new(Group.where(id: child1.id)).all_objects
- end
+ describe '#all_objects' do
+ let(:relation) do
+ described_class.new(Group.where(id: child1.id)).all_objects
+ end
- it 'includes the base rows' do
- expect(relation).to include(child1)
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child1)
+ end
+
+ it 'includes the ancestors' do
+ expect(relation).to include(parent)
+ end
+
+ it 'includes the descendants' do
+ expect(relation).to include(child2)
+ end
+
+ it 'uses ancestors_base #initialize argument for ancestors' do
+ relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id)).all_objects
+
+ expect(relation).to include(parent)
+ end
- it 'includes the ancestors' do
- expect(relation).to include(parent)
+ it 'uses descendants_base #initialize argument for descendants' do
+ relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id)).all_objects
+
+ expect(relation).to include(child2)
+ end
+
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
+ end
end
+ end
- it 'includes the descendants' do
- expect(relation).to include(child2)
+ context 'when the use_distinct_in_object_hierarchy feature flag is enabled' do
+ before do
+ stub_feature_flags(use_distinct_in_object_hierarchy: true)
end
- it 'uses ancestors_base #initialize argument for ancestors' do
- relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id)).all_objects
+ it_behaves_like 'Gitlab::ObjectHierarchy test cases'
- expect(relation).to include(parent)
+ it 'calls DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
+ expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
end
+ end
- it 'uses descendants_base #initialize argument for descendants' do
- relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id)).all_objects
-
- expect(relation).to include(child2)
+ context 'when the use_distinct_in_object_hierarchy feature flag is disabled' do
+ before do
+ stub_feature_flags(use_distinct_in_object_hierarchy: false)
end
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
+ it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+
+ it 'does not call DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
+ expect(child2.self_and_ancestors.to_sql).not_to include("DISTINCT")
end
end
end
diff --git a/spec/lib/gitlab/optimistic_locking_spec.rb b/spec/lib/gitlab/optimistic_locking_spec.rb
index 0862a9c880e..1d669573b74 100644
--- a/spec/lib/gitlab/optimistic_locking_spec.rb
+++ b/spec/lib/gitlab/optimistic_locking_spec.rb
@@ -5,37 +5,108 @@ require 'spec_helper'
RSpec.describe Gitlab::OptimisticLocking do
let!(:pipeline) { create(:ci_pipeline) }
let!(:pipeline2) { Ci::Pipeline.find(pipeline.id) }
+ let(:histogram) { spy('prometheus metric') }
+
+ before do
+ allow(described_class)
+ .to receive(:retry_lock_histogram)
+ .and_return(histogram)
+ end
describe '#retry_lock' do
- it 'does not reload object if state changes' do
- expect(pipeline).not_to receive(:reset)
- expect(pipeline).to receive(:succeed).and_call_original
+ let(:name) { 'optimistic_locking_spec' }
- described_class.retry_lock(pipeline) do |subject|
- subject.succeed
+ context 'when state changed successfully without retries' do
+ subject do
+ described_class.retry_lock(pipeline, name: name) do |lock_subject|
+ lock_subject.succeed
+ end
end
- end
- it 'retries action if exception is raised' do
- pipeline.succeed
+ it 'does not reload object' do
+ expect(pipeline).not_to receive(:reset)
+ expect(pipeline).to receive(:succeed).and_call_original
+
+ subject
+ end
+
+ it 'does not create log record' do
+ expect(described_class.retry_lock_logger).not_to receive(:info)
+
+ subject
+ end
- expect(pipeline2).to receive(:reset).and_call_original
- expect(pipeline2).to receive(:drop).twice.and_call_original
+ it 'adds number of retries to histogram' do
+ subject
- described_class.retry_lock(pipeline2) do |subject|
- subject.drop
+ expect(histogram).to have_received(:observe).with({}, 0)
end
end
- it 'raises exception when too many retries' do
- expect(pipeline).to receive(:drop).twice.and_call_original
+ context 'when at least one retry happened, the change succeeded' do
+ subject do
+ described_class.retry_lock(pipeline2, name: 'optimistic_locking_spec') do |lock_subject|
+ lock_subject.drop
+ end
+ end
+
+ before do
+ pipeline.succeed
+ end
+
+ it 'completes the action' do
+ expect(pipeline2).to receive(:reset).and_call_original
+ expect(pipeline2).to receive(:drop).twice.and_call_original
+
+ subject
+ end
+
+ it 'creates a single log record' do
+ expect(described_class.retry_lock_logger)
+ .to receive(:info)
+ .once
+ .with(hash_including(:time_s, name: name, retries: 1))
- expect do
- described_class.retry_lock(pipeline, 1) do |subject|
- subject.lock_version = 100
- subject.drop
+ subject
+ end
+
+ it 'adds number of retries to histogram' do
+ subject
+
+ expect(histogram).to have_received(:observe).with({}, 1)
+ end
+ end
+
+ context 'when MAX_RETRIES attempts exceeded' do
+ subject do
+ described_class.retry_lock(pipeline, max_retries, name: name) do |lock_subject|
+ lock_subject.lock_version = 100
+ lock_subject.drop
end
- end.to raise_error(ActiveRecord::StaleObjectError)
+ end
+
+ let(:max_retries) { 2 }
+
+ it 'raises an exception' do
+ expect(pipeline).to receive(:drop).exactly(max_retries + 1).times.and_call_original
+
+ expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
+ end
+
+ it 'creates a single log record' do
+ expect(described_class.retry_lock_logger)
+ .to receive(:info)
+ .once
+ .with(hash_including(:time_s, name: name, retries: max_retries))
+
+ expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
+ end
+
+ it 'adds number of retries to histogram' do
+ expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
+
+ expect(histogram).to have_received(:observe).with({}, max_retries)
+ end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
new file mode 100644
index 00000000000..6e9e987f90c
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
@@ -0,0 +1,188 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
+ let_it_be(:project_name_column) do
+ described_class.new(
+ attribute_name: :name,
+ order_expression: Project.arel_table[:name].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ let_it_be(:project_name_lower_column) do
+ described_class.new(
+ attribute_name: :name,
+ order_expression: Project.arel_table[:name].lower.desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ let_it_be(:project_calculated_column_expression) do
+ # COALESCE("projects"."description", 'No Description')
+ Arel::Nodes::NamedFunction.new('COALESCE', [
+ Project.arel_table[:description],
+ Arel.sql("'No Description'")
+ ])
+ end
+
+ let_it_be(:project_calculated_column) do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: project_calculated_column_expression,
+ order_expression: project_calculated_column_expression.asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end
+
+ describe '#order_direction' do
+ context 'inferring order_direction from order_expression' do
+ it { expect(project_name_column).to be_ascending_order }
+ it { expect(project_name_column).not_to be_descending_order }
+
+ it { expect(project_name_lower_column).to be_descending_order }
+ it { expect(project_name_lower_column).not_to be_ascending_order }
+
+ it { expect(project_calculated_column).to be_ascending_order }
+ it { expect(project_calculated_column).not_to be_descending_order }
+
+ it 'raises error when order direction cannot be infered' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Invalid or missing `order_direction`/)
+ end
+
+ it 'does not raise error when order direction is explicitly given' do
+ column_order_definition = described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+
+ expect(column_order_definition).to be_ascending_order
+ end
+ end
+ end
+
+ describe '#column_expression' do
+ context 'inferring column_expression from order_expression' do
+ it 'infers the correct column expression' do
+ column_order_definition = described_class.new(attribute_name: :name, order_expression: Project.arel_table[:name].asc)
+
+ expect(column_order_definition.column_expression).to eq(Project.arel_table[:name])
+ end
+
+ it 'raises error when raw string is given as order expression' do
+ expect do
+ described_class.new(attribute_name: :name, order_expression: 'name DESC')
+ end.to raise_error(RuntimeError, /Couldn't calculate the column expression. Please pass an ARel node/)
+ end
+ end
+ end
+
+ describe '#reversed_order_expression' do
+ it 'raises error when order cannot be reversed automatically' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Couldn't determine reversed order/)
+ end
+ end
+
+ describe '#reverse' do
+ it { expect(project_name_column.reverse.order_expression).to eq(Project.arel_table[:name].desc) }
+ it { expect(project_name_column.reverse).to be_descending_order }
+
+ it { expect(project_calculated_column.reverse.order_expression).to eq(project_calculated_column_expression.desc) }
+ it { expect(project_calculated_column.reverse).to be_descending_order }
+
+ context 'when reversed_order_expression is given' do
+ it 'uses the given expression' do
+ column_order_definition = described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: 'name asc',
+ reversed_order_expression: 'name desc',
+ order_direction: :asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+
+ expect(column_order_definition.reverse.order_expression).to eq('name desc')
+ end
+ end
+ end
+
+ describe '#nullable' do
+ context 'when the column is nullable' do
+ let(:nulls_last_order) do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last, # null values are always last
+ distinct: false
+ )
+ end
+
+ it 'requires the position of the null values in the result' do
+ expect(nulls_last_order).to be_nulls_last
+ end
+
+ it 'reverses nullable correctly' do
+ expect(nulls_last_order.reverse).to be_nulls_first
+ end
+
+ it 'raises error when invalid nullable value is given' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: true,
+ distinct: false
+ )
+ end.to raise_error(RuntimeError, /Invalid `nullable` is given/)
+ end
+
+ it 'raises error when the column is nullable and distinct' do
+ expect do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: true
+ )
+ end.to raise_error(RuntimeError, /Invalid column definition/)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
new file mode 100644
index 00000000000..665f790ee47
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -0,0 +1,420 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::Order do
+ let(:table) { Arel::Table.new(:my_table) }
+ let(:order) { nil }
+
+ def run_query(query)
+ ActiveRecord::Base.connection.execute(query).to_a
+ end
+
+ def build_query(order:, where_conditions: nil, limit: nil)
+ <<-SQL
+ SELECT id, year, month
+ FROM (#{table_data}) my_table (id, year, month)
+ WHERE #{where_conditions || '1=1'}
+ ORDER BY #{order}
+ LIMIT #{limit || 999};
+ SQL
+ end
+
+ def iterate_and_collect(order:, page_size:, where_conditions: nil)
+ all_items = []
+
+ loop do
+ paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
+ break if paginated_items.empty?
+
+ all_items.concat(paginated_items)
+ last_item = paginated_items.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.build_where_values(cursor_attributes).to_sql
+ end
+
+ all_items
+ end
+
+ subject do
+ run_query(build_query(order: order))
+ end
+
+ shared_examples 'order examples' do
+ it { expect(subject).to eq(expected) }
+
+ context 'when paginating forwards' do
+ subject { iterate_and_collect(order: order, page_size: 2) }
+
+ it { expect(subject).to eq(expected) }
+
+ context 'with different page size' do
+ subject { iterate_and_collect(order: order, page_size: 5) }
+
+ it { expect(subject).to eq(expected) }
+ end
+ end
+
+ context 'when paginating backwards' do
+ subject do
+ last_item = expected.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.reversed_order.build_where_values(cursor_attributes)
+
+ iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ end
+
+ it do
+ expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ end
+ end
+ end
+
+ context 'when ordering by a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 0, 0),
+ (2, 0, 0),
+ (3, 0, 0),
+ (4, 0, 0),
+ (5, 0, 0),
+ (6, 0, 0),
+ (7, 0, 0),
+ (8, 0, 0),
+ (9, 0, 0)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 9, "year" => 0, "month" => 0 },
+ { "id" => 8, "year" => 0, "month" => 0 },
+ { "id" => 7, "year" => 0, "month" => 0 },
+ { "id" => 6, "year" => 0, "month" => 0 },
+ { "id" => 5, "year" => 0, "month" => 0 },
+ { "id" => 4, "year" => 0, "month" => 0 },
+ { "id" => 3, "year" => 0, "month" => 0 },
+ { "id" => 2, "year" => 0, "month" => 0 },
+ { "id" => 1, "year" => 0, "month" => 0 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by two non-nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, 2),
+ (2, 2011, 1),
+ (3, 2009, 2),
+ (4, 2011, 1),
+ (5, 2011, 1),
+ (6, 2009, 2),
+ (7, 2010, 3),
+ (8, 2012, 4),
+ (9, 2013, 5)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table['month'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { 'year' => 2009, 'month' => 2, 'id' => 3 },
+ { 'year' => 2009, 'month' => 2, 'id' => 6 },
+ { 'year' => 2010, 'month' => 2, 'id' => 1 },
+ { 'year' => 2010, 'month' => 3, 'id' => 7 },
+ { 'year' => 2011, 'month' => 1, 'id' => 2 },
+ { 'year' => 2011, 'month' => 1, 'id' => 4 },
+ { 'year' => 2011, 'month' => 1, 'id' => 5 },
+ { 'year' => 2012, 'month' => 4, 'id' => 8 },
+ { 'year' => 2013, 'month' => 5, 'id' => 9 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, null),
+ (2, 2011, 2),
+ (3, null, null),
+ (4, null, 5),
+ (5, 2010, null),
+ (6, 2011, 2),
+ (7, 2010, 2),
+ (8, 2012, 2),
+ (9, null, 2),
+ (10, null, null),
+ (11, 2010, 2)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_last_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_last_order('month', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, null),
+ (2, 2011, 2),
+ (3, null, null),
+ (4, null, 5),
+ (5, 2010, null),
+ (6, 2011, 2),
+ (7, 2010, 2),
+ (8, 2012, 2),
+ (9, null, 2),
+ (10, null, null),
+ (11, 2010, 2)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_first_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_first_order('month', :asc),
+ order_direction: :asc,
+ reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 }
+ ]
+ end
+
+ it_behaves_like 'order examples'
+ end
+
+ context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
+ let(:table_data) do
+ <<-SQL
+ VALUES (1, 2010, 0),
+ (2, 2011, 0),
+ (3, 2010, 0),
+ (4, 2010, 0),
+ (5, 2012, 0),
+ (6, 2012, 0),
+ (7, 2010, 0),
+ (8, 2011, 0),
+ (9, 2013, 0),
+ (10, 2014, 0),
+ (11, 2013, 0)
+ SQL
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 0 },
+ { "id" => 4, "year" => 2010, "month" => 0 },
+ { "id" => 3, "year" => 2010, "month" => 0 },
+ { "id" => 1, "year" => 2010, "month" => 0 },
+ { "id" => 8, "year" => 2011, "month" => 0 },
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 },
+ { "id" => 5, "year" => 2012, "month" => 0 },
+ { "id" => 11, "year" => 2013, "month" => 0 },
+ { "id" => 9, "year" => 2013, "month" => 0 },
+ { "id" => 10, "year" => 2014, "month" => 0 }
+ ]
+ end
+
+ it 'takes out a slice between two cursors' do
+ after_cursor = { "id" => 8, "year" => 2011 }
+ before_cursor = { "id" => 5, "year" => 2012 }
+
+ after_conditions = order.build_where_values(after_cursor)
+ reversed = order.reversed_order
+ before_conditions = reversed.build_where_values(before_cursor)
+
+ query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
+
+ expect(run_query(query)).to eq([
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 }
+ ])
+ end
+ end
+
+ context 'when the passed cursor values do not match with the order definition' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ context 'when values are missing' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ end
+ end
+
+ context 'when extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ end
+ end
+
+ context 'when values are missing and extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ end
+ end
+
+ context 'when no values are passed' do
+ it 'returns nil' do
+ expect(order.build_where_values({})).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
index a8dd482c7b8..1ab8e22d6d1 100644
--- a/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
+++ b/spec/lib/gitlab/query_limiting/active_support_subscriber_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::QueryLimiting::ActiveSupportSubscriber do
- let(:transaction) { instance_double(Gitlab::QueryLimiting::Transaction, increment: true) }
+ let(:transaction) { instance_double(Gitlab::QueryLimiting::Transaction, executed_sql: true, increment: true) }
before do
allow(Gitlab::QueryLimiting::Transaction)
@@ -18,6 +18,11 @@ RSpec.describe Gitlab::QueryLimiting::ActiveSupportSubscriber do
expect(transaction)
.to have_received(:increment)
.once
+
+ expect(transaction)
+ .to have_received(:executed_sql)
+ .once
+ .with(String)
end
context 'when the query is actually a rails cache hit' do
@@ -30,6 +35,11 @@ RSpec.describe Gitlab::QueryLimiting::ActiveSupportSubscriber do
expect(transaction)
.to have_received(:increment)
.once
+
+ expect(transaction)
+ .to have_received(:executed_sql)
+ .once
+ .with(String)
end
end
end
diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb
index 331c3c1d8b0..40804736b86 100644
--- a/spec/lib/gitlab/query_limiting/transaction_spec.rb
+++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb
@@ -118,6 +118,30 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
)
end
+ it 'includes a list of executed queries' do
+ transaction = described_class.new
+ transaction.count = max = described_class::THRESHOLD
+ %w[foo bar baz].each { |sql| transaction.executed_sql(sql) }
+
+ message = transaction.error_message
+
+ expect(message).to start_with(
+ "Too many SQL queries were executed: a maximum of #{max} " \
+ "is allowed but #{max} SQL queries were executed"
+ )
+
+ expect(message).to include("0: foo", "1: bar", "2: baz")
+ end
+
+ it 'indicates if the log is truncated' do
+ transaction = described_class.new
+ transaction.count = described_class::THRESHOLD * 2
+
+ message = transaction.error_message
+
+ expect(message).to end_with('...')
+ end
+
it 'includes the action name in the error message when present' do
transaction = described_class.new
transaction.count = max = described_class::THRESHOLD
diff --git a/spec/lib/gitlab/query_limiting_spec.rb b/spec/lib/gitlab/query_limiting_spec.rb
index 0fcd865567d..4f70c65adca 100644
--- a/spec/lib/gitlab/query_limiting_spec.rb
+++ b/spec/lib/gitlab/query_limiting_spec.rb
@@ -63,6 +63,20 @@ RSpec.describe Gitlab::QueryLimiting do
expect(transaction.count).to eq(before)
end
+
+ it 'whitelists when enabled' do
+ described_class.whitelist('https://example.com')
+
+ expect(transaction.whitelisted).to eq(true)
+ end
+
+ it 'does not whitelist when disabled' do
+ allow(described_class).to receive(:enable?).and_return(false)
+
+ described_class.whitelist('https://example.com')
+
+ expect(transaction.whitelisted).to eq(false)
+ end
end
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 776ca81a338..1aca3dae41b 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -367,6 +367,35 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+ describe '.npm_package_name_regex' do
+ subject { described_class.npm_package_name_regex }
+
+ it { is_expected.to match('@scope/package') }
+ it { is_expected.to match('unscoped-package') }
+ it { is_expected.not_to match('@first-scope@second-scope/package') }
+ it { is_expected.not_to match('scope-without-at-symbol/package') }
+ it { is_expected.not_to match('@not-a-scoped-package') }
+ it { is_expected.not_to match('@scope/sub/package') }
+ it { is_expected.not_to match('@scope/../../package') }
+ it { is_expected.not_to match('@scope%2e%2e%2fpackage') }
+ it { is_expected.not_to match('@%2e%2e%2f/package') }
+
+ context 'capturing group' do
+ [
+ ['@scope/package', 'scope'],
+ ['unscoped-package', nil],
+ ['@not-a-scoped-package', nil],
+ ['@scope/sub/package', nil],
+ ['@inv@lid-scope/package', nil]
+ ].each do |package_name, extracted_scope_name|
+ it "extracts the scope name for #{package_name}" do
+ match = package_name.match(described_class.npm_package_name_regex)
+ expect(match&.captures&.first).to eq(extracted_scope_name)
+ end
+ end
+ end
+ end
+
describe '.nuget_version_regex' do
subject { described_class.nuget_version_regex }
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index e58e41d3e4f..71f4f2a3b64 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -2,6 +2,7 @@
require 'spec_helper'
+# rubocop: disable RSpec/MultipleMemoizedHelpers
RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
context "with worker attribution" do
subject { described_class.new }
@@ -112,6 +113,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
end
+ it 'calls BackgroundTransaction' do
+ expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |instance|
+ expect(instance).to receive(:run)
+ end
+
+ subject.call(worker, job, :test) {}
+ end
+
it 'sets queue specific metrics' do
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
@@ -287,3 +296,4 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
end
end
+# rubocop: enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/client_spec.rb
new file mode 100644
index 00000000000..df8e47d60f0
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/client_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Client, :clean_gitlab_redis_queues do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ "TestSizeLimiterWorker"
+ end
+
+ include ApplicationWorker
+
+ def perform(*args); end
+ end
+ end
+
+ before do
+ stub_const("TestSizeLimiterWorker", worker_class)
+ end
+
+ describe '#call' do
+ context 'when the validator rejects the job' do
+ before do
+ allow(Gitlab::SidekiqMiddleware::SizeLimiter::Validator).to receive(:validate!).and_raise(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(
+ TestSizeLimiterWorker, 500, 300
+ )
+ )
+ end
+
+ it 'raises an exception when scheduling job with #perform_at' do
+ expect do
+ TestSizeLimiterWorker.perform_at(30.seconds.from_now, 1, 2, 3)
+ end.to raise_error Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError
+ end
+
+ it 'raises an exception when scheduling job with #perform_async' do
+ expect do
+ TestSizeLimiterWorker.perform_async(1, 2, 3)
+ end.to raise_error Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError
+ end
+
+ it 'raises an exception when scheduling job with #perform_in' do
+ expect do
+ TestSizeLimiterWorker.perform_in(3.seconds, 1, 2, 3)
+ end.to raise_error Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError
+ end
+ end
+
+ context 'when the validator validates the job suscessfully' do
+ before do
+ # Do nothing
+ allow(Gitlab::SidekiqMiddleware::SizeLimiter::Client).to receive(:validate!)
+ end
+
+ it 'raises an exception when scheduling job with #perform_at' do
+ expect do
+ TestSizeLimiterWorker.perform_at(30.seconds.from_now, 1, 2, 3)
+ end.not_to raise_error
+
+ expect(TestSizeLimiterWorker.jobs).to contain_exactly(
+ a_hash_including(
+ "class" => "TestSizeLimiterWorker",
+ "args" => [1, 2, 3],
+ "at" => be_a(Float)
+ )
+ )
+ end
+
+ it 'raises an exception when scheduling job with #perform_async' do
+ expect do
+ TestSizeLimiterWorker.perform_async(1, 2, 3)
+ end.not_to raise_error
+
+ expect(TestSizeLimiterWorker.jobs).to contain_exactly(
+ a_hash_including(
+ "class" => "TestSizeLimiterWorker",
+ "args" => [1, 2, 3]
+ )
+ )
+ end
+
+ it 'raises an exception when scheduling job with #perform_in' do
+ expect do
+ TestSizeLimiterWorker.perform_in(3.seconds, 1, 2, 3)
+ end.not_to raise_error
+
+ expect(TestSizeLimiterWorker.jobs).to contain_exactly(
+ a_hash_including(
+ "class" => "TestSizeLimiterWorker",
+ "args" => [1, 2, 3],
+ "at" => be_a(Float)
+ )
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
new file mode 100644
index 00000000000..75b1d9fd87e
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ "TestSizeLimiterWorker"
+ end
+
+ include ApplicationWorker
+
+ def perform(*args); end
+ end
+ end
+
+ before do
+ stub_const("TestSizeLimiterWorker", worker_class)
+ end
+
+ it 'encapsulates worker info' do
+ exception = described_class.new(TestSizeLimiterWorker, 500, 300)
+
+ expect(exception.message).to eql("TestSizeLimiterWorker job exceeds payload size limit (500/300)")
+ expect(exception.worker_class).to eql(TestSizeLimiterWorker)
+ expect(exception.size).to be(500)
+ expect(exception.size_limit).to be(300)
+ expect(exception.sentry_extra_data).to eql(
+ worker_class: 'TestSizeLimiterWorker',
+ size: 500,
+ size_limit: 300
+ )
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
new file mode 100644
index 00000000000..3140686c908
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
@@ -0,0 +1,253 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ "TestSizeLimiterWorker"
+ end
+
+ include ApplicationWorker
+
+ def perform(*args); end
+ end
+ end
+
+ before do
+ stub_const("TestSizeLimiterWorker", worker_class)
+ end
+
+ describe '#initialize' do
+ context 'when the input mode is valid' do
+ it 'does not log a warning message' do
+ expect(::Sidekiq.logger).not_to receive(:warn)
+
+ described_class.new(TestSizeLimiterWorker, {}, mode: 'track')
+ described_class.new(TestSizeLimiterWorker, {}, mode: 'raise')
+ end
+ end
+
+ context 'when the input mode is invalid' do
+ it 'defaults to track mode and logs a warning message' do
+ expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter mode: invalid. Fallback to track mode.')
+
+ validator = described_class.new(TestSizeLimiterWorker, {}, mode: 'invalid')
+
+ expect(validator.mode).to eql('track')
+ end
+ end
+
+ context 'when the input mode is empty' do
+ it 'defaults to track mode' do
+ expect(::Sidekiq.logger).not_to receive(:warn)
+
+ validator = described_class.new(TestSizeLimiterWorker, {})
+
+ expect(validator.mode).to eql('track')
+ end
+ end
+
+ context 'when the size input is valid' do
+ it 'does not log a warning message' do
+ expect(::Sidekiq.logger).not_to receive(:warn)
+
+ described_class.new(TestSizeLimiterWorker, {}, size_limit: 300)
+ described_class.new(TestSizeLimiterWorker, {}, size_limit: 0)
+ end
+ end
+
+ context 'when the size input is invalid' do
+ it 'defaults to 0 and logs a warning message' do
+ expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter limit: -1')
+
+ described_class.new(TestSizeLimiterWorker, {}, size_limit: -1)
+ end
+ end
+
+ context 'when the size input is empty' do
+ it 'defaults to 0' do
+ expect(::Sidekiq.logger).not_to receive(:warn)
+
+ validator = described_class.new(TestSizeLimiterWorker, {})
+
+ expect(validator.size_limit).to be(0)
+ end
+ end
+ end
+
+ shared_examples 'validate limit job payload size' do
+ context 'in track mode' do
+ let(:mode) { 'track' }
+
+ context 'when size limit negative' do
+ let(:size_limit) { -1 }
+
+ it 'does not track jobs' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ validate.call(TestSizeLimiterWorker, { a: 'a' * 300 })
+ end
+
+ it 'does not raise exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+
+ context 'when size limit is 0' do
+ let(:size_limit) { 0 }
+
+ it 'does not track jobs' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ validate.call(TestSizeLimiterWorker, { a: 'a' * 300 })
+ end
+
+ it 'does not raise exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+
+ context 'when job size is bigger than size limit' do
+ let(:size_limit) { 50 }
+
+ it 'tracks job' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ be_a(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError)
+ )
+
+ validate.call(TestSizeLimiterWorker, { a: 'a' * 100 })
+ end
+
+ it 'does not raise an exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ end
+
+ context 'when the worker has big_payload attribute' do
+ before do
+ worker_class.big_payload!
+ end
+
+ it 'does not track jobs' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ validate.call(TestSizeLimiterWorker, { a: 'a' * 300 })
+ validate.call('TestSizeLimiterWorker', { a: 'a' * 300 })
+ end
+
+ it 'does not raise an exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ expect { validate.call('TestSizeLimiterWorker', { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when job size is less than size limit' do
+ let(:size_limit) { 50 }
+
+ it 'does not track job' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ validate.call(TestSizeLimiterWorker, { a: 'a' })
+ end
+
+ it 'does not raise an exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' }) }.not_to raise_error
+ end
+ end
+ end
+
+ context 'in raise mode' do
+ let(:mode) { 'raise' }
+
+ context 'when size limit is negative' do
+ let(:size_limit) { -1 }
+
+ it 'does not raise exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+
+ context 'when size limit is 0' do
+ let(:size_limit) { 0 }
+
+ it 'does not raise exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+
+ context 'when job size is bigger than size limit' do
+ let(:size_limit) { 50 }
+
+ it 'raises an exception' do
+ expect do
+ validate.call(TestSizeLimiterWorker, { a: 'a' * 300 })
+ end.to raise_error(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError,
+ /TestSizeLimiterWorker job exceeds payload size limit/i
+ )
+ end
+
+ context 'when the worker has big_payload attribute' do
+ before do
+ worker_class.big_payload!
+ end
+
+ it 'does not raise an exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error
+ expect { validate.call('TestSizeLimiterWorker', { a: 'a' * 300 }) }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when job size is less than size limit' do
+ let(:size_limit) { 50 }
+
+ it 'does not raise an exception' do
+ expect { validate.call(TestSizeLimiterWorker, { a: 'a' }) }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ describe '#validate!' do
+ context 'when calling SizeLimiter.validate!' do
+ let(:validate) { ->(worker_clas, job) { described_class.validate!(worker_class, job) } }
+
+ before do
+ stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode)
+ stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit)
+ end
+
+ it_behaves_like 'validate limit job payload size'
+ end
+
+ context 'when creating an instance with the related ENV variables' do
+ let(:validate) do
+ ->(worker_clas, job) do
+ validator = described_class.new(worker_class, job, mode: mode, size_limit: size_limit)
+ validator.validate!
+ end
+ end
+
+ before do
+ stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode)
+ stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit)
+ end
+
+ it_behaves_like 'validate limit job payload size'
+ end
+
+ context 'when creating an instance with mode and size limit' do
+ let(:validate) do
+ ->(worker_clas, job) do
+ validator = described_class.new(worker_class, job, mode: mode, size_limit: size_limit)
+ validator.validate!
+ end
+ end
+
+ it_behaves_like 'validate limit job payload size'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index b632fc8bad2..755f6004e52 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -177,6 +177,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqMiddleware::DuplicateJobs::Client,
::Gitlab::SidekiqStatus::ClientMiddleware,
::Gitlab::SidekiqMiddleware::AdminMode::Client,
+ ::Gitlab::SidekiqMiddleware::SizeLimiter::Client,
::Gitlab::SidekiqMiddleware::ClientMetrics
]
end
diff --git a/spec/lib/gitlab/string_range_marker_spec.rb b/spec/lib/gitlab/string_range_marker_spec.rb
index 52fab6e3109..6f63c8e2df4 100644
--- a/spec/lib/gitlab/string_range_marker_spec.rb
+++ b/spec/lib/gitlab/string_range_marker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::StringRangeMarker do
raw = 'abc <def>'
inline_diffs = [2..5]
- described_class.new(raw, rich).mark(inline_diffs) do |text, left:, right:|
+ described_class.new(raw, rich).mark(inline_diffs) do |text, left:, right:, mode:|
"LEFT#{text}RIGHT".html_safe
end
end
diff --git a/spec/lib/gitlab/string_regex_marker_spec.rb b/spec/lib/gitlab/string_regex_marker_spec.rb
index 2dadd222820..a02be83558c 100644
--- a/spec/lib/gitlab/string_regex_marker_spec.rb
+++ b/spec/lib/gitlab/string_regex_marker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::StringRegexMarker do
let(:rich) { %{<span class="key">"name"</span><span class="punctuation">: </span><span class="value">"AFNetworking"</span>}.html_safe }
subject do
- described_class.new(raw, rich).mark(/"[^"]+":\s*"(?<name>[^"]+)"/, group: :name) do |text, left:, right:|
+ described_class.new(raw, rich).mark(/"[^"]+":\s*"(?<name>[^"]+)"/, group: :name) do |text, left:, right:, mode:|
%{<a href="#">#{text}</a>}.html_safe
end
end
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::StringRegexMarker do
let(:rich) { %{a &lt;b&gt; &lt;c&gt; d}.html_safe }
subject do
- described_class.new(raw, rich).mark(/<[a-z]>/) do |text, left:, right:|
+ described_class.new(raw, rich).mark(/<[a-z]>/) do |text, left:, right:, mode:|
%{<strong>#{text}</strong>}.html_safe
end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index 7a0a4f0cc46..561edbd38f8 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do
context 'staging' do
before do
- allow(Gitlab).to receive(:staging?).and_return(true)
+ stub_config_setting(url: 'https://staging.gitlab.com')
end
include_examples 'contains environment', 'staging'
@@ -30,11 +30,27 @@ RSpec.describe Gitlab::Tracking::StandardContext do
context 'production' do
before do
- allow(Gitlab).to receive(:com_and_canary?).and_return(true)
+ stub_config_setting(url: 'https://gitlab.com')
end
include_examples 'contains environment', 'production'
end
+
+ context 'org' do
+ before do
+ stub_config_setting(url: 'https://dev.gitlab.org')
+ end
+
+ include_examples 'contains environment', 'org'
+ end
+
+ context 'other self-managed instance' do
+ before do
+ stub_rails_env('production')
+ end
+
+ include_examples 'contains environment', 'self-managed'
+ end
end
it 'contains source' do
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 80740c8112e..ac052bd7a80 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -61,8 +61,8 @@ RSpec.describe Gitlab::Tracking do
expect(args[:property]).to eq('property')
expect(args[:value]).to eq(1.5)
expect(args[:context].length).to eq(2)
- expect(args[:context].first).to eq(other_context)
- expect(args[:context].last.to_json[:schema]).to eq(Gitlab::Tracking::StandardContext::GITLAB_STANDARD_SCHEMA_URL)
+ expect(args[:context].first.to_json[:schema]).to eq(Gitlab::Tracking::StandardContext::GITLAB_STANDARD_SCHEMA_URL)
+ expect(args[:context].last).to eq(other_context)
end
described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5,
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index d2c5844b0fa..661ef507a82 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -57,14 +57,12 @@ RSpec.describe Gitlab::TreeSummary do
context 'with caching', :use_clean_rails_memory_store_caching do
subject { Rails.cache.fetch(key) }
- before do
- summarized
- end
-
context 'Repository tree cache' do
let(:key) { ['projects', project.id, 'content', commit.id, path] }
it 'creates a cache for repository content' do
+ summarized
+
is_expected.to eq([{ file_name: 'a.txt', type: :blob }])
end
end
@@ -72,11 +70,34 @@ RSpec.describe Gitlab::TreeSummary do
context 'Commits list cache' do
let(:offset) { 0 }
let(:limit) { 25 }
- let(:key) { ['projects', project.id, 'last_commits_list', commit.id, path, offset, limit] }
+ let(:key) { ['projects', project.id, 'last_commits', commit.id, path, offset, limit] }
it 'creates a cache for commits list' do
+ summarized
+
is_expected.to eq('a.txt' => commit.to_hash)
end
+
+ context 'when commit has a very long message' do
+ before do
+ repo.create_file(
+ project.creator,
+ 'long.txt',
+ '',
+ message: message,
+ branch_name: project.default_branch_or_master
+ )
+ end
+
+ let(:message) { 'a' * 1025 }
+ let(:expected_message) { message[0...1021] + '...' }
+
+ it 'truncates commit message to 1 kilobyte' do
+ summarized
+
+ is_expected.to include('long.txt' => a_hash_including(message: expected_message))
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index fa01d4e48df..e076815c4f6 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -166,7 +166,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
let(:ports) { Project::VALID_IMPORT_PORTS }
it 'allows imports from configured web host and port' do
- import_url = "http://#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}/t.git"
+ import_url = "http://#{Gitlab.host_with_port}/t.git"
expect(described_class.blocked_url?(import_url)).to be false
end
@@ -190,7 +190,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
it 'returns true for bad protocol on configured web/SSH host and ports' do
- web_url = "javascript://#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}/t.git%0aalert(1)"
+ web_url = "javascript://#{Gitlab.host_with_port}/t.git%0aalert(1)"
expect(described_class.blocked_url?(web_url)).to be true
ssh_url = "javascript://#{Gitlab.config.gitlab_shell.ssh_host}:#{Gitlab.config.gitlab_shell.ssh_port}/t.git%0aalert(1)"
diff --git a/spec/lib/gitlab/usage/docs/renderer_spec.rb b/spec/lib/gitlab/usage/docs/renderer_spec.rb
index 0677aa2d9d7..f3b83a4a4b3 100644
--- a/spec/lib/gitlab/usage/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/usage/docs/renderer_spec.rb
@@ -2,19 +2,21 @@
require 'spec_helper'
+CODE_REGEX = %r{<code>(.*)</code>}.freeze
+
RSpec.describe Gitlab::Usage::Docs::Renderer do
describe 'contents' do
let(:dictionary_path) { Gitlab::Usage::Docs::Renderer::DICTIONARY_PATH }
- let(:items) { Gitlab::Usage::MetricDefinition.definitions }
+ let(:items) { Gitlab::Usage::MetricDefinition.definitions.first(10).to_h }
it 'generates dictionary for given items' do
generated_dictionary = described_class.new(items).contents
+
generated_dictionary_keys = RDoc::Markdown
.parse(generated_dictionary)
.table_of_contents
- .select { |metric_doc| metric_doc.level == 2 && !metric_doc.text.start_with?('info:') }
- .map(&:text)
- .map { |text| text.sub('<code>', '').sub('</code>', '') }
+ .select { |metric_doc| metric_doc.level == 3 }
+ .map { |item| item.text.match(CODE_REGEX)&.captures&.first }
expect(generated_dictionary_keys).to match_array(items.keys)
end
diff --git a/spec/lib/gitlab/usage/docs/value_formatter_spec.rb b/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
index 7002c76a7cf..f21656df894 100644
--- a/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
+++ b/spec/lib/gitlab/usage/docs/value_formatter_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Usage::Docs::ValueFormatter do
:data_source | 'redis' | 'Redis'
:data_source | 'ruby' | 'Ruby'
:introduced_by_url | 'http://test.com' | '[Introduced by](http://test.com)'
- :tier | %w(gold premium) | 'gold, premium'
- :distribution | %w(ce ee) | 'ce, ee'
+ :tier | %w(gold premium) | ' `gold`, `premium`'
+ :distribution | %w(ce ee) | ' `ce`, `ee`'
:key_path | 'key.path' | '**`key.path`**'
:milestone | '13.4' | '13.4'
- :status | 'data_available' | 'data_available'
+ :status | 'data_available' | '`data_available`'
end
with_them do
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 5469ded18f9..7d8e3056384 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -9,10 +9,50 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:end_date) { Date.current }
let(:sources) { Gitlab::Usage::Metrics::Aggregates::Sources }
+ let(:namespace) { described_class.to_s.deconstantize.constantize }
let_it_be(:recorded_at) { Time.current.to_i }
+ def aggregated_metric(name:, time_frame:, source: "redis", events: %w[event1 event2 event3], operator: "OR", feature_flag: nil)
+ {
+ name: name,
+ source: source,
+ events: events,
+ operator: operator,
+ time_frame: time_frame,
+ feature_flag: feature_flag
+ }.compact.with_indifferent_access
+ end
+
context 'aggregated_metrics_data' do
+ shared_examples 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+ end
+
+ context 'with disabled database_sourced_aggregated_metrics feature flag' do
+ before do
+ stub_feature_flags(database_sourced_aggregated_metrics: false)
+ end
+
+ let(:aggregated_metrics) do
+ [
+ aggregated_metric(name: "gmau_2", source: "database", time_frame: time_frame)
+ ]
+ end
+
+ it 'skips database sourced metrics', :aggregate_failures do
+ results = {}
+ params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
+
+ expect(sources::PostgresHll).not_to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3]))
+ expect(aggregated_metrics_data).to eq(results)
+ end
+ end
+ end
+
shared_examples 'aggregated_metrics_data' do
context 'no aggregated metric is defined' do
it 'returns empty hash' do
@@ -31,37 +71,13 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
end
- context 'with disabled database_sourced_aggregated_metrics feature flag' do
- before do
- stub_feature_flags(database_sourced_aggregated_metrics: false)
- end
-
- let(:aggregated_metrics) do
- [
- { name: 'gmau_1', source: 'redis', events: %w[event3 event5], operator: "OR" },
- { name: 'gmau_2', source: 'database', events: %w[event1 event2 event3], operator: "OR" }
- ].map(&:with_indifferent_access)
- end
-
- it 'skips database sourced metrics', :aggregate_failures do
- results = {
- 'gmau_1' => 5
- }
-
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
-
- expect(sources::RedisHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(5)
- expect(sources::PostgresHll).not_to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3]))
- expect(aggregated_metrics_data).to eq(results)
- end
- end
-
context 'with AND operator' do
let(:aggregated_metrics) do
+ params = { source: datasource, operator: "AND", time_frame: time_frame }
[
- { name: 'gmau_1', source: 'redis', events: %w[event3 event5], operator: "AND" },
- { name: 'gmau_2', source: 'database', events: %w[event1 event2 event3], operator: "AND" }
- ].map(&:with_indifferent_access)
+ aggregated_metric(**params.merge(name: "gmau_1", events: %w[event3 event5])),
+ aggregated_metric(**params.merge(name: "gmau_2"))
+ ]
end
it 'returns the number of unique events recorded for every metric in aggregate', :aggregate_failures do
@@ -73,30 +89,30 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
# gmau_1 data is as follow
# |A| => 4
- expect(sources::RedisHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
# |B| => 6
- expect(sources::RedisHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
# |A + B| => 8
- expect(sources::RedisHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
# Exclusion inclusion principle formula to calculate intersection of 2 sets
# |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
# gmau_2 data is as follow:
# |A| => 2
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
# |B| => 3
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
# |C| => 5
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
# |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
# |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
# |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
# |A + B + C| => 8
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
# Exclusion inclusion principle formula to calculate intersection of 3 sets
# |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
# (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
@@ -108,20 +124,17 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
context 'with OR operator' do
let(:aggregated_metrics) do
[
- { name: 'gmau_1', source: 'redis', events: %w[event3 event5], operator: "OR" },
- { name: 'gmau_2', source: 'database', events: %w[event1 event2 event3], operator: "OR" }
- ].map(&:with_indifferent_access)
+ aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: "OR")
+ ]
end
it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
results = {
- 'gmau_1' => 5,
- 'gmau_2' => 3
+ 'gmau_1' => 5
}
params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
- expect(sources::RedisHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(5)
- expect(sources::PostgresHll).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(3)
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
@@ -130,21 +143,22 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
let(:enabled_feature_flag) { 'test_ff_enabled' }
let(:disabled_feature_flag) { 'test_ff_disabled' }
let(:aggregated_metrics) do
+ params = { source: datasource, time_frame: time_frame }
[
# represents stable aggregated metrics that has been fully released
- { name: 'gmau_without_ff', source: 'redis', events: %w[event3_slot event5_slot], operator: "OR" },
+ aggregated_metric(**params.merge(name: "gmau_without_ff")),
# represents new aggregated metric that is under performance testing on gitlab.com
- { name: 'gmau_enabled', source: 'redis', events: %w[event4], operator: "OR", feature_flag: enabled_feature_flag },
+ aggregated_metric(**params.merge(name: "gmau_enabled", feature_flag: enabled_feature_flag)),
# represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
- { name: 'gmau_disabled', source: 'redis', events: %w[event4], operator: "OR", feature_flag: disabled_feature_flag }
- ].map(&:with_indifferent_access)
+ aggregated_metric(**params.merge(name: "gmau_disabled", feature_flag: disabled_feature_flag))
+ ]
end
it 'does not calculate data for aggregates with ff turned off' do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
- allow(sources::RedisHll).to receive(:calculate_metrics_union).and_return(6)
+ allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_return(6)
expect(aggregated_metrics_data).to eq('gmau_without_ff' => 6, 'gmau_enabled' => 6)
end
@@ -156,31 +170,29 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it 'raises error when unknown aggregation operator is used' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'redis', events: %w[event1_slot], operator: "SUM" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
end
- expect { aggregated_metrics_data }.to raise_error Gitlab::Usage::Metrics::Aggregates::UnknownAggregationOperator
+ expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationOperator
end
it 'raises error when unknown aggregation source is used' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'whoami', events: %w[event1_slot], operator: "AND" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
end
- expect { aggregated_metrics_data }.to raise_error Gitlab::Usage::Metrics::Aggregates::UnknownAggregationSource
+ expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationSource
end
- it 're raises Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
- error = Gitlab::UsageDataCounters::HLLRedisCounter::EventError
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
-
+ it 'raises error when union is missing' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'redis', events: %w[event1_slot], operator: "OR" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
end
+ allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
- expect { aggregated_metrics_data }.to raise_error error
+ expect { aggregated_metrics_data }.to raise_error sources::UnionNotAvailable
end
end
@@ -192,7 +204,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it 'rescues unknown aggregation operator error' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'redis', events: %w[event1_slot], operator: "SUM" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
end
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
@@ -201,20 +213,91 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it 'rescues unknown aggregation source error' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'whoami', events: %w[event1_slot], operator: "AND" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
end
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
- it 'rescues Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
- error = Gitlab::UsageDataCounters::HLLRedisCounter::EventError
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
-
+ it 'rescues error when union is missing' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics)
- .and_return([{ name: 'gmau_1', source: 'redis', events: %w[event1_slot], operator: "OR" }])
+ .and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
end
+ allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
+
+ expect(aggregated_metrics_data).to eq('gmau_1' => -1)
+ end
+ end
+ end
+ end
+
+ shared_examples 'database_sourced_aggregated_metrics' do
+ let(:datasource) { namespace::DATABASE_SOURCE }
+
+ it_behaves_like 'aggregated_metrics_data'
+ end
+
+ shared_examples 'redis_sourced_aggregated_metrics' do
+ let(:datasource) { namespace::REDIS_SOURCE }
+
+ it_behaves_like 'aggregated_metrics_data' do
+ context 'error handling' do
+ let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)] }
+ let(:error) { Gitlab::UsageDataCounters::HLLRedisCounter::EventError }
+
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
+ end
+
+ context 'development and test environment' do
+ it 're raises Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
+ expect { aggregated_metrics_data }.to raise_error error
+ end
+ end
+
+ context 'production' do
+ it 'rescues Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
+ stub_rails_env('production')
+
+ expect(aggregated_metrics_data).to eq('gmau_1' => -1)
+ end
+ end
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_all_time_data' do
+ subject(:aggregated_metrics_data) { described_class.new(recorded_at).all_time_data }
+
+ let(:start_date) { nil }
+ let(:end_date) { nil }
+ let(:time_frame) { ['all'] }
+
+ it_behaves_like 'database_sourced_aggregated_metrics'
+ it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature'
+
+ context 'redis sourced aggregated metrics' do
+ let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', time_frame: time_frame)] }
+
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+ end
+
+ context 'development and test environment' do
+ it 'raises Gitlab::Usage::Metrics::Aggregates::DisallowedAggregationTimeFrame' do
+ expect { aggregated_metrics_data }.to raise_error namespace::DisallowedAggregationTimeFrame
+ end
+ end
+
+ context 'production env' do
+ it 'returns fallback value for unsupported time frame' do
+ stub_rails_env('production')
expect(aggregated_metrics_data).to eq('gmau_1' => -1)
end
@@ -223,7 +306,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
it 'allows for YAML aliases in aggregated metrics configs' do
- expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true)
+ expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once)
described_class.new(recorded_at)
end
@@ -232,32 +315,34 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
subject(:aggregated_metrics_data) { described_class.new(recorded_at).weekly_data }
let(:start_date) { 7.days.ago.to_date }
+ let(:time_frame) { ['7d'] }
- it_behaves_like 'aggregated_metrics_data'
+ it_behaves_like 'database_sourced_aggregated_metrics'
+ it_behaves_like 'redis_sourced_aggregated_metrics'
+ it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature'
end
describe '.aggregated_metrics_monthly_data' do
subject(:aggregated_metrics_data) { described_class.new(recorded_at).monthly_data }
let(:start_date) { 4.weeks.ago.to_date }
+ let(:time_frame) { ['28d'] }
- it_behaves_like 'aggregated_metrics_data'
+ it_behaves_like 'database_sourced_aggregated_metrics'
+ it_behaves_like 'redis_sourced_aggregated_metrics'
+ it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature'
context 'metrics union calls' do
- let(:aggregated_metrics) do
- [
- { name: 'gmau_3', source: 'redis', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" }
- ].map(&:with_indifferent_access)
- end
-
it 'caches intermediate operations', :aggregate_failures do
+ events = %w[event1 event2 event3 event5]
allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ allow(instance).to receive(:aggregated_metrics)
+ .and_return([aggregated_metric(name: 'gmau_1', events: events, operator: "AND", time_frame: time_frame)])
end
params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
- aggregated_metrics[0][:events].each do |event|
+ events.each do |event|
expect(sources::RedisHll).to receive(:calculate_metrics_union)
.with(params.merge(metric_names: event))
.once
@@ -265,7 +350,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
2.upto(4) do |subset_size|
- aggregated_metrics[0][:events].combination(subset_size).each do |events|
+ events.combination(subset_size).each do |events|
expect(sources::RedisHll).to receive(:calculate_metrics_union)
.with(params.merge(metric_names: events))
.once
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index 7b8be8e8bc6..a2a40f17269 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:set).with("#{metric_1}_weekly-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
+ expect(redis).to receive(:set).with("#{metric_1}_7d-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:set).with("#{metric_1}_monthly-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
+ expect(redis).to receive(:set).with("#{metric_1}_28d-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis).to receive(:set).with("#{metric_1}_all_time-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
+ expect(redis).to receive(:set).with("#{metric_1}_all-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
new file mode 100644
index 00000000000..cd0413feab4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
+ include UsageDataHelpers
+
+ before do
+ stub_usage_data_connections
+ end
+
+ describe '#generate' do
+ shared_examples 'name suggestion' do
+ it 'return correct name' do
+ expect(described_class.generate(key_path)).to eq name_suggestion
+ end
+ end
+
+ context 'for count with default column metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with count(Board)
+ let(:key_path) { 'counts.boards' }
+ let(:name_suggestion) { 'count_boards' }
+ end
+ end
+
+ context 'for count distinct with column defined metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with distinct_count(ZoomMeeting, :issue_id)
+ let(:key_path) { 'counts.issues_using_zoom_quick_actions' }
+ let(:name_suggestion) { 'count_distinct_issue_id_from_zoom_meetings' }
+ end
+ end
+
+ context 'for sum metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count)
+ let(:key_path) { 'counts.jira_imports_total_imported_issues_count' }
+ let(:name_suggestion) { "sum_imported_issues_count_from_<adjective describing: '(jira_imports.status = 4)'>_jira_imports" }
+ end
+ end
+
+ context 'for add metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with add(data[:personal_snippets], data[:project_snippets])
+ let(:key_path) { 'counts.snippets' }
+ let(:name_suggestion) { "add_count_<adjective describing: '(snippets.type = 'PersonalSnippet')'>_snippets_and_count_<adjective describing: '(snippets.type = 'ProjectSnippet')'>_snippets" }
+ end
+ end
+
+ context 'for redis metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
+ let(:key_path) { 'analytics_unique_visits.analytics_unique_visits_for_any_target' }
+ let(:name_suggestion) { '<please fill metric name>' }
+ end
+ end
+
+ context 'for alt_usage_data metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with alt_usage_data(fallback: nil) { operating_system }
+ let(:key_path) { 'settings.operating_system' }
+ let(:name_suggestion) { '<please fill metric name>' }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb
new file mode 100644
index 00000000000..68016e760e4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Constraints do
+ describe '#accept' do
+ let(:collector) { Arel::Collectors::SubstituteBinds.new(ActiveRecord::Base.connection, Arel::Collectors::SQLString.new) }
+
+ it 'builds correct constraints description' do
+ table = Arel::Table.new('records')
+ arel = table.from.project(table['id'].count).where(table[:attribute].eq(true).and(table[:some_value].gt(5)))
+ described_class.new(ApplicationRecord.connection).accept(arel, collector)
+
+ expect(collector.value).to eql '(records.attribute = true AND records.some_value > 5)'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
index 58f974fbe12..9aba86cdaf2 100644
--- a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
@@ -23,6 +23,22 @@ RSpec.describe 'aggregated metrics' do
end
end
+ RSpec::Matchers.define :have_known_time_frame do
+ allowed_time_frames = [
+ Gitlab::Utils::UsageData::ALL_TIME_TIME_FRAME_NAME,
+ Gitlab::Utils::UsageData::TWENTY_EIGHT_DAYS_TIME_FRAME_NAME,
+ Gitlab::Utils::UsageData::SEVEN_DAYS_TIME_FRAME_NAME
+ ]
+
+ match do |aggregate|
+ (aggregate[:time_frame] - allowed_time_frames).empty?
+ end
+
+ failure_message do |aggregate|
+ "Aggregate with name: `#{aggregate[:name]}` uses not allowed time_frame`#{aggregate[:time_frame] - allowed_time_frames}`"
+ end
+ end
+
let_it_be(:known_events) do
Gitlab::UsageDataCounters::HLLRedisCounter.known_events
end
@@ -38,10 +54,18 @@ RSpec.describe 'aggregated metrics' do
expect(aggregated_metrics).to all has_known_source
end
+ it 'all aggregated metrics has known source' do
+ expect(aggregated_metrics).to all have_known_time_frame
+ end
+
aggregated_metrics&.select { |agg| agg[:source] == Gitlab::Usage::Metrics::Aggregates::REDIS_SOURCE }&.each do |aggregate|
context "for #{aggregate[:name]} aggregate of #{aggregate[:events].join(' ')}" do
let_it_be(:events_records) { known_events.select { |event| aggregate[:events].include?(event[:name]) } }
+ it "does not include 'all' time frame for Redis sourced aggregate" do
+ expect(aggregate[:time_frame]).not_to include(Gitlab::Utils::UsageData::ALL_TIME_TIME_FRAME_NAME)
+ end
+
it "only refers to known events" do
expect(aggregate[:events]).to all be_known_event
end
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
new file mode 100644
index 00000000000..664e7938a7e
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# If this spec fails, we need to add the new code review event to the correct aggregated metric
+RSpec.describe 'Code review events' do
+ it 'the aggregated metrics contain all the code review metrics' do
+ path = Rails.root.join('lib/gitlab/usage_data_counters/aggregated_metrics/code_review.yml')
+ aggregated_events = YAML.safe_load(File.read(path), aliases: true)&.map(&:with_indifferent_access)
+
+ code_review_aggregated_events = aggregated_events
+ .map { |event| event['events'] }
+ .flatten
+ .uniq
+
+ code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review")
+
+ exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs]
+ code_review_aggregated_events += exceptions
+
+ expect(code_review_events - code_review_aggregated_events).to be_empty
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index b4894ec049f..d12dcdae955 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -42,7 +42,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'terraform',
'ci_templates',
'quickactions',
- 'pipeline_authoring'
+ 'pipeline_authoring',
+ 'epics_usage'
)
end
end
@@ -150,10 +151,17 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect { described_class.track_event(different_aggregation, values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
- it 'raise error if metrics of unknown aggregation' do
+ it 'raise error if metrics of unknown event' do
expect { described_class.track_event('unknown', values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
+ it 'reports an error if Feature.enabled raise an error' do
+ expect(Feature).to receive(:enabled?).and_raise(StandardError.new)
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ described_class.track_event(:g_analytics_contribution, values: entity1, time: Date.current)
+ end
+
context 'for weekly events' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do
described_class.track_event("g_analytics_contribution", values: entity1)
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index bf43f7552e6..f8f6494b92e 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:time) { Time.zone.now }
context 'for Issue title edit actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_TITLE_CHANGED }
def track_action(params)
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue description edit actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
def track_action(params)
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue assignee edit actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
def track_action(params)
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue make confidential actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
def track_action(params)
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue make visible actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_MADE_VISIBLE }
def track_action(params)
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue created actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_CREATED }
def track_action(params)
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue closed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_CLOSED }
def track_action(params)
@@ -79,7 +79,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue reopened actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_REOPENED }
def track_action(params)
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue label changed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_LABEL_CHANGED }
def track_action(params)
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue cross-referenced actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_CROSS_REFERENCED }
def track_action(params)
@@ -109,7 +109,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue moved actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_MOVED }
def track_action(params)
@@ -119,7 +119,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue cloned actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_CLONED }
def track_action(params)
@@ -129,7 +129,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue relate actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_RELATED }
def track_action(params)
@@ -139,7 +139,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue unrelate actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_UNRELATED }
def track_action(params)
@@ -149,7 +149,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue marked as duplicate actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE }
def track_action(params)
@@ -159,7 +159,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue locked actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_LOCKED }
def track_action(params)
@@ -169,7 +169,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue unlocked actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_UNLOCKED }
def track_action(params)
@@ -179,7 +179,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue designs added actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_DESIGNS_ADDED }
def track_action(params)
@@ -189,7 +189,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue designs modified actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_DESIGNS_MODIFIED }
def track_action(params)
@@ -199,7 +199,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue designs removed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_DESIGNS_REMOVED }
def track_action(params)
@@ -209,7 +209,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue due date changed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_DUE_DATE_CHANGED }
def track_action(params)
@@ -219,7 +219,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue time estimate changed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
def track_action(params)
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue time spent changed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED }
def track_action(params)
@@ -239,7 +239,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue comment added actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_COMMENT_ADDED }
def track_action(params)
@@ -249,7 +249,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue comment edited actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_COMMENT_EDITED }
def track_action(params)
@@ -259,7 +259,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue comment removed actions' do
- it_behaves_like 'a tracked issue edit event' do
+ it_behaves_like 'a daily tracked issuable event' do
let(:action) { described_class::ISSUE_COMMENT_REMOVED }
def track_action(params)
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index a604de4a61f..6486a5a22ba 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -21,6 +21,14 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
end
end
+ shared_examples_for 'not tracked merge request unique event' do
+ specify do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ subject
+ end
+ end
+
describe '.track_mr_diffs_action' do
subject { described_class.track_mr_diffs_action(merge_request: merge_request) }
@@ -284,4 +292,98 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_CREATE_FROM_ISSUE_ACTION }
end
end
+
+ describe '.track_discussion_locked_action' do
+ subject { described_class.track_discussion_locked_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_DISCUSSION_LOCKED_ACTION }
+ end
+ end
+
+ describe '.track_discussion_unlocked_action' do
+ subject { described_class.track_discussion_unlocked_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_DISCUSSION_UNLOCKED_ACTION }
+ end
+ end
+
+ describe '.track_time_estimate_changed_action' do
+ subject { described_class.track_time_estimate_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_TIME_ESTIMATE_CHANGED_ACTION }
+ end
+ end
+
+ describe '.track_time_spent_changed_action' do
+ subject { described_class.track_time_spent_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_TIME_SPENT_CHANGED_ACTION }
+ end
+ end
+
+ describe '.track_assignees_changed_action' do
+ subject { described_class.track_assignees_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_ASSIGNEES_CHANGED_ACTION }
+ end
+ end
+
+ describe '.track_reviewers_changed_action' do
+ subject { described_class.track_reviewers_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_REVIEWERS_CHANGED_ACTION }
+ end
+ end
+
+ describe '.track_mr_including_ci_config' do
+ subject { described_class.track_mr_including_ci_config(user: user, merge_request: merge_request) }
+
+ context 'when merge request includes a ci config change' do
+ before do
+ allow(merge_request).to receive(:diff_stats).and_return([double(path: 'abc.txt'), double(path: '.gitlab-ci.yml')])
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_INCLUDING_CI_CONFIG_ACTION }
+ end
+
+ context 'when FF usage_data_o_pipeline_authoring_unique_users_pushing_mr_ciconfigfile is disabled' do
+ before do
+ stub_feature_flags(usage_data_o_pipeline_authoring_unique_users_pushing_mr_ciconfigfile: false)
+ end
+
+ it_behaves_like 'not tracked merge request unique event'
+ end
+ end
+
+ context 'when merge request does not include any ci config change' do
+ before do
+ allow(merge_request).to receive(:diff_stats).and_return([double(path: 'abc.txt'), double(path: 'abc.xyz')])
+ end
+
+ it_behaves_like 'not tracked merge request unique event'
+ end
+ end
+
+ describe '.track_milestone_changed_action' do
+ subject { described_class.track_milestone_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_MILESTONE_CHANGED_ACTION }
+ end
+ end
+
+ describe '.track_labels_changed_action' do
+ subject { described_class.track_labels_changed_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_LABELS_CHANGED_ACTION }
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 7b5efb11034..1be2a83f98f 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 45
+ expect(described_class::KNOWN_EVENTS.size).to eq 48
end
described_class::KNOWN_EVENTS.each do |event|
diff --git a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
index d4c423f57fe..2df0f331f73 100644
--- a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
@@ -160,4 +160,24 @@ RSpec.describe Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter, :cle
end
end
end
+
+ context 'tracking invite_email' do
+ let(:quickaction_name) { 'invite_email' }
+
+ context 'single email' do
+ let(:args) { 'someone@gitlab.com' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_invite_email_single' }
+ end
+ end
+
+ context 'multiple emails' do
+ let(:args) { 'someone@gitlab.com another@gitlab.com' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_invite_email_multiple' }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 7fc77593265..12eac643383 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -38,4 +38,12 @@ RSpec.describe Gitlab::UsageDataQueries do
expect(described_class.sum(Issue, :weight)).to eq('SELECT SUM("issues"."weight") FROM "issues"')
end
end
+
+ describe '.add' do
+ it 'returns the combined raw SQL with an inner query' do
+ expect(described_class.add('SELECT COUNT("users"."id") FROM "users"',
+ 'SELECT COUNT("issues"."id") FROM "issues"'))
+ .to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 602f6640d72..b1581bf02a6 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -382,14 +382,15 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe 'usage_activity_by_stage_monitor' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
- user = create(:user, dashboard: 'operations')
+ user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
- create(:project, creator: user)
+ project = create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
create(:project_tracing_setting)
create(:project_error_tracking_setting)
create(:incident)
create(:incident, alert_management_alert: create(:alert_management_alert))
+ create(:alert_management_http_integration, :active, project: project)
end
expect(described_class.usage_activity_by_stage_monitor({})).to include(
@@ -399,10 +400,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_tracing_enabled: 2,
projects_with_error_tracking_enabled: 2,
projects_with_incidents: 4,
- projects_with_alert_incidents: 2
+ projects_with_alert_incidents: 2,
+ projects_with_enabled_alert_integrations_histogram: { '1' => 2 }
)
- expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
+ data_28_days = described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)
+ expect(data_28_days).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1,
@@ -411,6 +414,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_incidents: 2,
projects_with_alert_incidents: 1
)
+
+ expect(data_28_days).not_to include(:projects_with_enabled_alert_integrations_histogram)
end
end
@@ -528,14 +533,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS)
end
- it 'gathers usage counts' do
+ it 'gathers usage counts', :aggregate_failures do
count_data = subject[:counts]
expect(count_data[:boards]).to eq(1)
expect(count_data[:projects]).to eq(4)
- expect(count_data.values_at(*UsageDataHelpers::SMAU_KEYS)).to all(be_an(Integer))
expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS)
expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty
+ expect(count_data.values).to all(be_a_kind_of(Integer))
end
it 'gathers usage counts correctly' do
@@ -1129,12 +1134,40 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe ".operating_system" do
+ let(:ohai_data) { { "platform" => "ubuntu", "platform_version" => "20.04" } }
+
+ before do
+ allow_next_instance_of(Ohai::System) do |ohai|
+ allow(ohai).to receive(:data).and_return(ohai_data)
+ end
+ end
+
+ subject { described_class.operating_system }
+
+ it { is_expected.to eq("ubuntu-20.04") }
+
+ context 'when on Debian with armv architecture' do
+ let(:ohai_data) { { "platform" => "debian", "platform_version" => "10", 'kernel' => { 'machine' => 'armv' } } }
+
+ it { is_expected.to eq("raspbian-10") }
+ end
+ end
+
describe ".system_usage_data_settings" do
+ before do
+ allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
+ end
+
subject { described_class.system_usage_data_settings }
it 'gathers settings usage data', :aggregate_failures do
expect(subject[:settings][:ldap_encrypted_secrets_enabled]).to eq(Gitlab::Auth::Ldap::Config.encrypted_secrets.active?)
end
+
+ it 'populates operating system information' do
+ expect(subject[:settings][:operating_system]).to eq('ubuntu-20.04')
+ end
end
end
@@ -1325,7 +1358,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
let(:ineligible_total_categories) do
- %w[source_code ci_secrets_management incident_management_alerts snippets terraform pipeline_authoring]
+ %w[source_code ci_secrets_management incident_management_alerts snippets terraform epics_usage]
end
it 'has all known_events' do
@@ -1347,25 +1380,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.aggregated_metrics_weekly' do
- subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_weekly }
+ describe '.aggregated_metrics_data' do
+ it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate methods', :aggregate_failures do
+ expected_payload = {
+ counts_weekly: { aggregated_metrics: { global_search_gmau: 123 } },
+ counts_monthly: { aggregated_metrics: { global_search_gmau: 456 } },
+ counts: { aggregate_global_search_gmau: 789 }
+ }
- it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate#weekly_data', :aggregate_failures do
expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
expect(instance).to receive(:weekly_data).and_return(global_search_gmau: 123)
+ expect(instance).to receive(:monthly_data).and_return(global_search_gmau: 456)
+ expect(instance).to receive(:all_time_data).and_return(global_search_gmau: 789)
end
- expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
- end
- end
-
- describe '.aggregated_metrics_monthly' do
- subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_monthly }
-
- it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate#monthly_data', :aggregate_failures do
- expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
- expect(instance).to receive(:monthly_data).and_return(global_search_gmau: 123)
- end
- expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
+ expect(described_class.aggregated_metrics_data).to eq(expected_payload)
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index e964e695828..6e1904c43e1 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Utils::UsageData do
+ include Database::DatabaseHelpers
+
describe '#count' do
let(:relation) { double(:relation) }
@@ -183,6 +185,120 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
+ describe '#histogram' do
+ let_it_be(:projects) { create_list(:project, 3) }
+ let(:project1) { projects.first }
+ let(:project2) { projects.second }
+ let(:project3) { projects.third }
+
+ let(:fallback) { described_class::HISTOGRAM_FALLBACK }
+ let(:relation) { AlertManagement::HttpIntegration.active }
+ let(:column) { :project_id }
+
+ def expect_error(exception, message, &block)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(instance_of(exception))
+ .and_call_original
+
+ expect(&block).to raise_error(
+ an_instance_of(exception).and(
+ having_attributes(message: message, backtrace: be_kind_of(Array)))
+ )
+ end
+
+ it 'checks bucket bounds to be not equal' do
+ expect_error(ArgumentError, 'Lower bucket bound cannot equal to upper bucket bound') do
+ described_class.histogram(relation, column, buckets: 1..1)
+ end
+ end
+
+ it 'checks bucket_size being non-zero' do
+ expect_error(ArgumentError, 'Bucket size cannot be zero') do
+ described_class.histogram(relation, column, buckets: 1..2, bucket_size: 0)
+ end
+ end
+
+ it 'limits the amount of buckets without providing bucket_size argument' do
+ expect_error(ArgumentError, 'Bucket size 101 exceeds the limit of 100') do
+ described_class.histogram(relation, column, buckets: 1..101)
+ end
+ end
+
+ it 'limits the amount of buckets when providing bucket_size argument' do
+ expect_error(ArgumentError, 'Bucket size 101 exceeds the limit of 100') do
+ described_class.histogram(relation, column, buckets: 1..2, bucket_size: 101)
+ end
+ end
+
+ it 'without data' do
+ histogram = described_class.histogram(relation, column, buckets: 1..100)
+
+ expect(histogram).to eq({})
+ end
+
+ it 'aggregates properly within bounds' do
+ create(:alert_management_http_integration, :active, project: project1)
+ create(:alert_management_http_integration, :inactive, project: project1)
+
+ create(:alert_management_http_integration, :active, project: project2)
+ create(:alert_management_http_integration, :active, project: project2)
+ create(:alert_management_http_integration, :inactive, project: project2)
+
+ create(:alert_management_http_integration, :active, project: project3)
+ create(:alert_management_http_integration, :inactive, project: project3)
+
+ histogram = described_class.histogram(relation, column, buckets: 1..100)
+
+ expect(histogram).to eq('1' => 2, '2' => 1)
+ end
+
+ it 'aggregates properly out of bounds' do
+ create_list(:alert_management_http_integration, 3, :active, project: project1)
+ histogram = described_class.histogram(relation, column, buckets: 1..2)
+
+ expect(histogram).to eq('2' => 1)
+ end
+
+ it 'returns fallback and logs canceled queries' do
+ create(:alert_management_http_integration, :active, project: project1)
+
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'histogram',
+ relation: relation.table_name,
+ operation: 'histogram',
+ operation_args: [column, 1, 100, 99],
+ query: kind_of(String),
+ message: /PG::QueryCanceled/
+ )
+
+ with_statement_timeout(0.001) do
+ relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)')
+ histogram = described_class.histogram(relation, column, buckets: 1..100)
+
+ expect(histogram).to eq(fallback)
+ end
+ end
+ end
+
+ describe '#add' do
+ it 'adds given values' do
+ expect(described_class.add(1, 3)).to eq(4)
+ end
+
+ it 'adds given values' do
+ expect(described_class.add).to eq(0)
+ end
+
+ it 'returns the fallback value when adding fails' do
+ expect(described_class.add(nil, 3)).to eq(-1)
+ end
+
+ it 'returns the fallback value one of the arguments is negative' do
+ expect(described_class.add(-1, 1)).to eq(-1)
+ end
+ end
+
describe '#alt_usage_data' do
it 'returns the fallback when it gets an error' do
expect(described_class.alt_usage_data { raise StandardError } ).to eq(-1)
@@ -203,6 +319,12 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.redis_usage_data { raise ::Redis::CommandError } ).to eq(-1)
end
+ it 'returns the fallback when Redis HLL raises any error' do
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
+
+ expect(described_class.redis_usage_data { raise Gitlab::UsageDataCounters::HLLRedisCounter::CategoryMismatch } ).to eq(15)
+ end
+
it 'returns the evaluated block when given' do
expect(described_class.redis_usage_data { 1 }).to eq(1)
end
@@ -222,6 +344,13 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#with_prometheus_client' do
+ it 'returns fallback with for an exception in yield block' do
+ allow(described_class).to receive(:prometheus_client).and_return(Gitlab::PrometheusClient.new('http://localhost:9090'))
+ result = described_class.with_prometheus_client(fallback: -42) { |client| raise StandardError }
+
+ expect(result).to be(-42)
+ end
+
shared_examples 'query data from Prometheus' do
it 'yields a client instance and returns the block result' do
result = described_class.with_prometheus_client { |client| client }
@@ -231,10 +360,10 @@ RSpec.describe Gitlab::Utils::UsageData do
end
shared_examples 'does not query data from Prometheus' do
- it 'returns nil by default' do
+ it 'returns {} by default' do
result = described_class.with_prometheus_client { |client| client }
- expect(result).to be_nil
+ expect(result).to eq({})
end
it 'returns fallback if provided' do
@@ -338,38 +467,15 @@ RSpec.describe Gitlab::Utils::UsageData do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
let(:event_name) { 'incident_management_alert_status_changed' }
let(:unknown_event) { 'unknown' }
- let(:feature) { "usage_data_#{event_name}" }
-
- before do
- skip_feature_flags_yaml_validation
- end
- context 'with feature enabled' do
- before do
- stub_feature_flags(feature => true)
- end
+ it 'tracks redis hll event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
- it 'tracks redis hll event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
-
- described_class.track_usage_event(event_name, value)
- end
-
- it 'raise an error for unknown event' do
- expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
- end
+ described_class.track_usage_event(event_name, value)
end
- context 'with feature disabled' do
- before do
- stub_feature_flags(feature => false)
- end
-
- it 'does not track event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- described_class.track_usage_event(event_name, value)
- end
+ it 'raise an error for unknown event' do
+ expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
end
diff --git a/spec/lib/gitlab/visibility_level_spec.rb b/spec/lib/gitlab/visibility_level_spec.rb
index 63c31c82d59..0d34d22cbbe 100644
--- a/spec/lib/gitlab/visibility_level_spec.rb
+++ b/spec/lib/gitlab/visibility_level_spec.rb
@@ -131,4 +131,29 @@ RSpec.describe Gitlab::VisibilityLevel do
end
end
end
+
+ describe '.options' do
+ context 'keys' do
+ it 'returns the allowed visibility levels' do
+ expect(described_class.options.keys).to contain_exactly('Private', 'Internal', 'Public')
+ end
+ end
+ end
+
+ describe '.level_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:level_value, :level_name) do
+ described_class::PRIVATE | 'Private'
+ described_class::INTERNAL | 'Internal'
+ described_class::PUBLIC | 'Public'
+ non_existing_record_access_level | 'Unknown'
+ end
+
+ with_them do
+ it 'returns the name of the visibility level' do
+ expect(described_class.level_name(level_value)).to eq(level_name)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/word_diff/chunk_collection_spec.rb b/spec/lib/gitlab/word_diff/chunk_collection_spec.rb
new file mode 100644
index 00000000000..aa837f760c1
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/chunk_collection_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::ChunkCollection do
+ subject(:collection) { described_class.new }
+
+ describe '#add' do
+ it 'adds elements to the chunk collection' do
+ collection.add('Hello')
+ collection.add(' World')
+
+ expect(collection.content).to eq('Hello World')
+ end
+ end
+
+ describe '#content' do
+ subject { collection.content }
+
+ context 'when no elements in the collection' do
+ it { is_expected.to eq('') }
+ end
+
+ context 'when elements exist' do
+ before do
+ collection.add('Hi')
+ collection.add(' GitLab!')
+ end
+
+ it { is_expected.to eq('Hi GitLab!') }
+ end
+ end
+
+ describe '#reset' do
+ it 'clears the collection' do
+ collection.add('1')
+ collection.add('2')
+
+ collection.reset
+
+ expect(collection.content).to eq('')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/line_processor_spec.rb b/spec/lib/gitlab/word_diff/line_processor_spec.rb
new file mode 100644
index 00000000000..f448f5b5eb6
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/line_processor_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::LineProcessor do
+ subject(:line_processor) { described_class.new(line) }
+
+ describe '#extract' do
+ subject(:segment) { line_processor.extract }
+
+ context 'when line is a diff hunk' do
+ let(:line) { "@@ -1,14 +1,13 @@\n" }
+
+ it 'returns DiffHunk segment' do
+ expect(segment).to be_a(Gitlab::WordDiff::Segments::DiffHunk)
+ expect(segment.to_s).to eq('@@ -1,14 +1,13 @@')
+ end
+ end
+
+ context 'when line has a newline delimiter' do
+ let(:line) { "~\n" }
+
+ it 'returns Newline segment' do
+ expect(segment).to be_a(Gitlab::WordDiff::Segments::Newline)
+ expect(segment.to_s).to eq('')
+ end
+ end
+
+ context 'when line has only space' do
+ let(:line) { " \n" }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when line has content' do
+ let(:line) { "+New addition\n" }
+
+ it 'returns Chunk segment' do
+ expect(segment).to be_a(Gitlab::WordDiff::Segments::Chunk)
+ expect(segment.to_s).to eq('New addition')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/parser_spec.rb b/spec/lib/gitlab/word_diff/parser_spec.rb
new file mode 100644
index 00000000000..3aeefb57a02
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/parser_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::Parser do
+ subject(:parser) { described_class.new }
+
+ describe '#parse' do
+ subject { parser.parse(diff.lines).to_a }
+
+ let(:diff) do
+ <<~EOF
+ @@ -1,14 +1,13 @@
+ ~
+ Unchanged line
+ ~
+ ~
+ -Old change
+ +New addition
+ unchanged content
+ ~
+ @@ -50,14 +50,13 @@
+ +First change
+ same same same_
+ -removed_
+ +added_
+ end of the line
+ ~
+ ~
+ EOF
+ end
+
+ it 'returns a collection of lines' do
+ diff_lines = subject
+
+ aggregate_failures do
+ expect(diff_lines.count).to eq(7)
+
+ expect(diff_lines.map(&:to_hash)).to match_array(
+ [
+ a_hash_including(index: 0, old_pos: 1, new_pos: 1, text: '', type: nil),
+ a_hash_including(index: 1, old_pos: 2, new_pos: 2, text: 'Unchanged line', type: nil),
+ a_hash_including(index: 2, old_pos: 3, new_pos: 3, text: '', type: nil),
+ a_hash_including(index: 3, old_pos: 4, new_pos: 4, text: 'Old changeNew addition unchanged content', type: nil),
+ a_hash_including(index: 4, old_pos: 50, new_pos: 50, text: '@@ -50,14 +50,13 @@', type: 'match'),
+ a_hash_including(index: 5, old_pos: 50, new_pos: 50, text: 'First change same same same_removed_added_end of the line', type: nil),
+ a_hash_including(index: 6, old_pos: 51, new_pos: 51, text: '', type: nil)
+ ]
+ )
+ end
+ end
+
+ it 'restarts object index after several calls to Enumerator' do
+ enumerator = parser.parse(diff.lines)
+
+ 2.times do
+ expect(enumerator.first.index).to eq(0)
+ end
+ end
+
+ context 'when diff is empty' do
+ let(:diff) { '' }
+
+ it { is_expected.to eq([]) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/positions_counter_spec.rb b/spec/lib/gitlab/word_diff/positions_counter_spec.rb
new file mode 100644
index 00000000000..e2c246f6801
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/positions_counter_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::PositionsCounter do
+ subject(:counter) { described_class.new }
+
+ describe 'Initial state' do
+ it 'starts with predefined values' do
+ expect(counter.pos_old).to eq(1)
+ expect(counter.pos_new).to eq(1)
+ expect(counter.line_obj_index).to eq(0)
+ end
+ end
+
+ describe '#increase_pos_num' do
+ it 'increases old and new positions' do
+ expect { counter.increase_pos_num }.to change { counter.pos_old }.from(1).to(2)
+ .and change { counter.pos_new }.from(1).to(2)
+ end
+ end
+
+ describe '#increase_obj_index' do
+ it 'increases object index' do
+ expect { counter.increase_obj_index }.to change { counter.line_obj_index }.from(0).to(1)
+ end
+ end
+
+ describe '#set_pos_num' do
+ it 'sets old and new positions' do
+ expect { counter.set_pos_num(old: 10, new: 12) }.to change { counter.pos_old }.from(1).to(10)
+ .and change { counter.pos_new }.from(1).to(12)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/segments/chunk_spec.rb b/spec/lib/gitlab/word_diff/segments/chunk_spec.rb
new file mode 100644
index 00000000000..797cc42a03c
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/segments/chunk_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::Segments::Chunk do
+ subject(:chunk) { described_class.new(line) }
+
+ let(:line) { ' Hello' }
+
+ describe '#removed?' do
+ subject { chunk.removed? }
+
+ it { is_expected.to be_falsey }
+
+ context 'when line starts with "-"' do
+ let(:line) { '-Removed' }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#added?' do
+ subject { chunk.added? }
+
+ it { is_expected.to be_falsey }
+
+ context 'when line starts with "+"' do
+ let(:line) { '+Added' }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#to_s' do
+ subject { chunk.to_s }
+
+ it 'removes lead string modifier' do
+ is_expected.to eq('Hello')
+ end
+
+ context 'when chunk is empty' do
+ let(:line) { '' }
+
+ it { is_expected.to eq('') }
+ end
+ end
+
+ describe '#length' do
+ subject { chunk.length }
+
+ it { is_expected.to eq('Hello'.length) }
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/segments/diff_hunk_spec.rb b/spec/lib/gitlab/word_diff/segments/diff_hunk_spec.rb
new file mode 100644
index 00000000000..5250e6d73c2
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/segments/diff_hunk_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::Segments::DiffHunk do
+ subject(:diff_hunk) { described_class.new(line) }
+
+ let(:line) { '@@ -3,14 +4,13 @@' }
+
+ describe '#pos_old' do
+ subject { diff_hunk.pos_old }
+
+ it { is_expected.to eq 3 }
+
+ context 'when diff hunk is broken' do
+ let(:line) { '@@ ??? @@' }
+
+ it { is_expected.to eq 0 }
+ end
+ end
+
+ describe '#pos_new' do
+ subject { diff_hunk.pos_new }
+
+ it { is_expected.to eq 4 }
+
+ context 'when diff hunk is broken' do
+ let(:line) { '@@ ??? @@' }
+
+ it { is_expected.to eq 0 }
+ end
+ end
+
+ describe '#first_line?' do
+ subject { diff_hunk.first_line? }
+
+ it { is_expected.to be_falsey }
+
+ context 'when diff hunk located on the first line' do
+ let(:line) { '@@ -1,14 +1,13 @@' }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#to_s' do
+ subject { diff_hunk.to_s }
+
+ it { is_expected.to eq(line) }
+ end
+end
diff --git a/spec/lib/gitlab/word_diff/segments/newline_spec.rb b/spec/lib/gitlab/word_diff/segments/newline_spec.rb
new file mode 100644
index 00000000000..ed5054844f1
--- /dev/null
+++ b/spec/lib/gitlab/word_diff/segments/newline_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WordDiff::Segments::Newline do
+ subject(:newline) { described_class.new }
+
+ describe '#to_s' do
+ subject { newline.to_s }
+
+ it { is_expected.to eq '' }
+ end
+end
diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb
index ac6f7e49fe0..2ac9c1f3a3b 100644
--- a/spec/lib/gitlab/x509/signature_spec.rb
+++ b/spec/lib/gitlab/x509/signature_spec.rb
@@ -11,6 +11,65 @@ RSpec.describe Gitlab::X509::Signature do
}
end
+ shared_examples "a verified signature" do
+ it 'returns a verified signature if email does match' do
+ signature = described_class.new(
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data,
+ X509Helpers::User1.certificate_email,
+ X509Helpers::User1.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_truthy
+ expect(signature.verification_status).to eq(:verified)
+ end
+
+ it 'returns an unverified signature if email does not match' do
+ signature = described_class.new(
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data,
+ "gitlab@example.com",
+ X509Helpers::User1.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_truthy
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if email does match and time is wrong' do
+ signature = described_class.new(
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data,
+ X509Helpers::User1.certificate_email,
+ Time.new(2020, 2, 22)
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if certificate is revoked' do
+ signature = described_class.new(
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data,
+ X509Helpers::User1.certificate_email,
+ X509Helpers::User1.signed_commit_time
+ )
+
+ expect(signature.verification_status).to eq(:verified)
+
+ signature.x509_certificate.revoked!
+
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
context 'commit signature' do
let(:certificate_attributes) do
{
@@ -30,62 +89,25 @@ RSpec.describe Gitlab::X509::Signature do
allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
end
- it 'returns a verified signature if email does match' do
- signature = described_class.new(
- X509Helpers::User1.signed_commit_signature,
- X509Helpers::User1.signed_commit_base_data,
- X509Helpers::User1.certificate_email,
- X509Helpers::User1.signed_commit_time
- )
-
- expect(signature.x509_certificate).to have_attributes(certificate_attributes)
- expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
- expect(signature.verified_signature).to be_truthy
- expect(signature.verification_status).to eq(:verified)
- end
+ it_behaves_like "a verified signature"
+ end
- it 'returns an unverified signature if email does not match' do
- signature = described_class.new(
- X509Helpers::User1.signed_commit_signature,
- X509Helpers::User1.signed_commit_base_data,
- "gitlab@example.com",
- X509Helpers::User1.signed_commit_time
- )
+ context 'with the certificate defined by OpenSSL::X509::DEFAULT_CERT_FILE' do
+ before do
+ store = OpenSSL::X509::Store.new
+ certificate = OpenSSL::X509::Certificate.new(X509Helpers::User1.trust_cert)
+ file_path = Rails.root.join("tmp/cert.pem").to_s
- expect(signature.x509_certificate).to have_attributes(certificate_attributes)
- expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
- expect(signature.verified_signature).to be_truthy
- expect(signature.verification_status).to eq(:unverified)
- end
+ File.open(file_path, "wb") do |f|
+ f.print certificate.to_pem
+ end
- it 'returns an unverified signature if email does match and time is wrong' do
- signature = described_class.new(
- X509Helpers::User1.signed_commit_signature,
- X509Helpers::User1.signed_commit_base_data,
- X509Helpers::User1.certificate_email,
- Time.new(2020, 2, 22)
- )
+ stub_const("OpenSSL::X509::DEFAULT_CERT_FILE", file_path)
- expect(signature.x509_certificate).to have_attributes(certificate_attributes)
- expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
- expect(signature.verified_signature).to be_falsey
- expect(signature.verification_status).to eq(:unverified)
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
end
- it 'returns an unverified signature if certificate is revoked' do
- signature = described_class.new(
- X509Helpers::User1.signed_commit_signature,
- X509Helpers::User1.signed_commit_base_data,
- X509Helpers::User1.certificate_email,
- X509Helpers::User1.signed_commit_time
- )
-
- expect(signature.verification_status).to eq(:verified)
-
- signature.x509_certificate.revoked!
-
- expect(signature.verification_status).to eq(:unverified)
- end
+ it_behaves_like "a verified signature"
end
context 'without trusted certificate within store' do
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index fa0cd214c7e..2ee27fbe20c 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -37,26 +37,9 @@ RSpec.describe 'Marginalia spec' do
}
end
- context 'when the feature is enabled' do
- before do
- stub_feature(true)
- end
-
- it 'generates a query that includes the component and value' do
- component_map.each do |component, value|
- expect(recorded.log.last).to include("#{component}:#{value}")
- end
- end
- end
-
- context 'when the feature is disabled' do
- before do
- stub_feature(false)
- end
-
- it 'excludes annotations in generated queries' do
- expect(recorded.log.last).not_to include("/*")
- expect(recorded.log.last).not_to include("*/")
+ it 'generates a query that includes the component and value' do
+ component_map.each do |component, value|
+ expect(recorded.log.last).to include("#{component}:#{value}")
end
end
end
@@ -90,59 +73,37 @@ RSpec.describe 'Marginalia spec' do
}
end
- context 'when the feature is enabled' do
- before do
- stub_feature(true)
+ it 'generates a query that includes the component and value' do
+ component_map.each do |component, value|
+ expect(recorded.log.last).to include("#{component}:#{value}")
end
+ end
- it 'generates a query that includes the component and value' do
- component_map.each do |component, value|
- expect(recorded.log.last).to include("#{component}:#{value}")
- end
- end
-
- describe 'for ActionMailer delivery jobs' do
- let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later }
-
- let(:recorded) do
- ActiveRecord::QueryRecorder.new do
- delivery_job.perform_now
- end
- end
-
- let(:component_map) do
- {
- "application" => "sidekiq",
- "jid" => delivery_job.job_id,
- "job_class" => delivery_job.arguments.first
- }
- end
+ describe 'for ActionMailer delivery jobs' do
+ let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later }
- it 'generates a query that includes the component and value' do
- component_map.each do |component, value|
- expect(recorded.log.last).to include("#{component}:#{value}")
- end
+ let(:recorded) do
+ ActiveRecord::QueryRecorder.new do
+ delivery_job.perform_now
end
end
- end
- context 'when the feature is disabled' do
- before do
- stub_feature(false)
+ let(:component_map) do
+ {
+ "application" => "sidekiq",
+ "jid" => delivery_job.job_id,
+ "job_class" => delivery_job.arguments.first
+ }
end
- it 'excludes annotations in generated queries' do
- expect(recorded.log.last).not_to include("/*")
- expect(recorded.log.last).not_to include("*/")
+ it 'generates a query that includes the component and value' do
+ component_map.each do |component, value|
+ expect(recorded.log.last).to include("#{component}:#{value}")
+ end
end
end
end
- def stub_feature(value)
- stub_feature_flags(marginalia: value)
- Gitlab::Marginalia.set_enabled_from_feature_flag
- end
-
def make_request(correlation_id)
request_env = Rack::MockRequest.env_for('/')
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 547bba5117a..12c6cbe03b3 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -224,6 +224,17 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(subject[:CustomPutHeaders]).to be_truthy
expect(subject[:PutHeaders]).to eq({})
end
+
+ context 'with an object with UTF-8 characters' do
+ let(:object_name) { 'tmp/uploads/テスト' }
+
+ it 'returns an escaped path' do
+ expect(subject[:GetURL]).to start_with(storage_url)
+
+ uri = Addressable::URI.parse(subject[:GetURL])
+ expect(uri.path).to include("tmp/uploads/#{CGI.escape("テスト")}")
+ end
+ end
end
shared_examples 'a valid upload with multipart data' do
diff --git a/spec/lib/pager_duty/webhook_payload_parser_spec.rb b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
index 54c61b9121c..647f19e3d3a 100644
--- a/spec/lib/pager_duty/webhook_payload_parser_spec.rb
+++ b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'json_schemer'
+require 'spec_helper'
RSpec.describe PagerDuty::WebhookPayloadParser do
describe '.call' do
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
new file mode 100644
index 00000000000..dad5a2bf461
--- /dev/null
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Peek::Views::ActiveRecord, :request_store do
+ subject { Peek.views.find { |v| v.instance_of?(Peek::Views::ActiveRecord) } }
+
+ let(:connection) { double(:connection) }
+
+ let(:event_1) do
+ {
+ name: 'SQL',
+ sql: 'SELECT * FROM users WHERE id = 10',
+ cached: false,
+ connection: connection
+ }
+ end
+
+ let(:event_2) do
+ {
+ name: 'SQL',
+ sql: 'SELECT * FROM users WHERE id = 10',
+ cached: true,
+ connection: connection
+ }
+ end
+
+ let(:event_3) do
+ {
+ name: 'SQL',
+ sql: 'UPDATE users SET admin = true WHERE id = 10',
+ cached: false,
+ connection: connection
+ }
+ end
+
+ before do
+ allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
+ end
+
+ it 'subscribes and store data into peek views' do
+ Timecop.freeze(2021, 2, 23, 10, 0) do
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
+ ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)
+ end
+
+ expect(subject.results).to match(
+ calls: '3 (1 cached)',
+ duration: '6000.00ms',
+ warnings: ["active-record duration: 6000.0 over 3000"],
+ details: contain_exactly(
+ a_hash_including(
+ cached: '',
+ duration: 1000.0,
+ sql: 'SELECT * FROM users WHERE id = 10'
+ ),
+ a_hash_including(
+ cached: 'cached',
+ duration: 2000.0,
+ sql: 'SELECT * FROM users WHERE id = 10'
+ ),
+ a_hash_including(
+ cached: '',
+ duration: 3000.0,
+ sql: 'UPDATE users SET admin = true WHERE id = 10'
+ )
+ )
+ )
+ end
+end
diff --git a/spec/lib/quality/test_level_spec.rb b/spec/lib/quality/test_level_spec.rb
index 2232d47234f..32960cd571b 100644
--- a/spec/lib/quality/test_level_spec.rb
+++ b/spec/lib/quality/test_level_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
end
end
@@ -103,7 +103,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|support_specs|tasks|uploaders|validators|views|workers|tooling)})
+ .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)})
end
end
diff --git a/spec/lib/release_highlights/validator/entry_spec.rb b/spec/lib/release_highlights/validator/entry_spec.rb
index da44938f165..5f7ccbf4310 100644
--- a/spec/lib/release_highlights/validator/entry_spec.rb
+++ b/spec/lib/release_highlights/validator/entry_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
end
it 'validates boolean value of "self-managed" and "gitlab-com"' do
- allow(entry).to receive(:value_for).with('self-managed').and_return('nope')
- allow(entry).to receive(:value_for).with('gitlab-com').and_return('yerp')
+ allow(entry).to receive(:value_for).with(:'self-managed').and_return('nope')
+ allow(entry).to receive(:value_for).with(:'gitlab-com').and_return('yerp')
subject.valid?
@@ -50,17 +50,18 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
end
it 'validates URI of "url" and "image_url"' do
- allow(entry).to receive(:value_for).with('image_url').and_return('imgur/gitlab_feature.gif')
- allow(entry).to receive(:value_for).with('url').and_return('gitlab/newest_release.html')
+ stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
+ allow(entry).to receive(:value_for).with(:image_url).and_return('https://foobar.x/images/ci/gitlab-ci-cd-logo_2x.png')
+ allow(entry).to receive(:value_for).with(:url).and_return('')
subject.valid?
- expect(subject.errors[:url]).to include(/must be a URL/)
- expect(subject.errors[:image_url]).to include(/must be a URL/)
+ expect(subject.errors[:url]).to include(/must be a valid URL/)
+ expect(subject.errors[:image_url]).to include(/is blocked: Host cannot be resolved or invalid/)
end
it 'validates release is numerical' do
- allow(entry).to receive(:value_for).with('release').and_return('one')
+ allow(entry).to receive(:value_for).with(:release).and_return('one')
subject.valid?
@@ -68,7 +69,7 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
end
it 'validates published_at is a date' do
- allow(entry).to receive(:value_for).with('published_at').and_return('christmas day')
+ allow(entry).to receive(:value_for).with(:published_at).and_return('christmas day')
subject.valid?
@@ -76,7 +77,7 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
end
it 'validates packages are included in list' do
- allow(entry).to receive(:value_for).with('packages').and_return(['ALL'])
+ allow(entry).to receive(:value_for).with(:packages).and_return(['ALL'])
subject.valid?
diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb
index a423e8cc5f6..f30754b4167 100644
--- a/spec/lib/release_highlights/validator_spec.rb
+++ b/spec/lib/release_highlights/validator_spec.rb
@@ -78,7 +78,10 @@ RSpec.describe ReleaseHighlights::Validator do
end
describe 'when validating all files' do
- it 'they should have no errors' do
+ # Permit DNS requests to validate all URLs in the YAML files
+ it 'they should have no errors', :permit_dns do
+ stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
+
expect(described_class.validate_all!).to be_truthy, described_class.error_message
end
end
diff --git a/spec/lib/rspec_flaky/config_spec.rb b/spec/lib/rspec_flaky/config_spec.rb
deleted file mode 100644
index 6b148599b67..00000000000
--- a/spec/lib/rspec_flaky/config_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::Config, :aggregate_failures do
- before do
- # Stub these env variables otherwise specs don't behave the same on the CI
- stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil)
- stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
- stub_env('FLAKY_RSPEC_REPORT_PATH', nil)
- stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil)
- end
-
- describe '.generate_report?' do
- context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is not set" do
- it 'returns false' do
- expect(described_class).not_to be_generate_report
- end
- end
-
- context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set" do
- using RSpec::Parameterized::TableSyntax
-
- where(:env_value, :result) do
- '1' | true
- 'true' | true
- 'foo' | false
- '0' | false
- 'false' | false
- end
-
- with_them do
- before do
- stub_env('FLAKY_RSPEC_GENERATE_REPORT', env_value)
- end
-
- it 'returns false' do
- expect(described_class.generate_report?).to be(result)
- end
- end
- end
- end
-
- describe '.suite_flaky_examples_report_path' do
- context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do
- it 'returns the default path' do
- expect(Rails.root).to receive(:join).with('rspec_flaky/suite-report.json')
- .and_return('root/rspec_flaky/suite-report.json')
-
- expect(described_class.suite_flaky_examples_report_path).to eq('root/rspec_flaky/suite-report.json')
- end
- end
-
- context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do
- before do
- stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json')
- end
-
- it 'returns the value of the env variable' do
- expect(described_class.suite_flaky_examples_report_path).to eq('foo/suite-report.json')
- end
- end
- end
-
- describe '.flaky_examples_report_path' do
- context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do
- it 'returns the default path' do
- expect(Rails.root).to receive(:join).with('rspec_flaky/report.json')
- .and_return('root/rspec_flaky/report.json')
-
- expect(described_class.flaky_examples_report_path).to eq('root/rspec_flaky/report.json')
- end
- end
-
- context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is set" do
- before do
- stub_env('FLAKY_RSPEC_REPORT_PATH', 'foo/report.json')
- end
-
- it 'returns the value of the env variable' do
- expect(described_class.flaky_examples_report_path).to eq('foo/report.json')
- end
- end
- end
-
- describe '.new_flaky_examples_report_path' do
- context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do
- it 'returns the default path' do
- expect(Rails.root).to receive(:join).with('rspec_flaky/new-report.json')
- .and_return('root/rspec_flaky/new-report.json')
-
- expect(described_class.new_flaky_examples_report_path).to eq('root/rspec_flaky/new-report.json')
- end
- end
-
- context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is set" do
- before do
- stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', 'foo/new-report.json')
- end
-
- it 'returns the value of the env variable' do
- expect(described_class.new_flaky_examples_report_path).to eq('foo/new-report.json')
- end
- end
- end
-end
diff --git a/spec/lib/rspec_flaky/example_spec.rb b/spec/lib/rspec_flaky/example_spec.rb
deleted file mode 100644
index 4b45a15c463..00000000000
--- a/spec/lib/rspec_flaky/example_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::Example do
- let(:example_attrs) do
- {
- id: 'spec/foo/bar_spec.rb:2',
- metadata: {
- file_path: 'spec/foo/bar_spec.rb',
- line_number: 2,
- full_description: 'hello world'
- },
- execution_result: double(status: 'passed', exception: 'BOOM!'),
- attempts: 1
- }
- end
-
- let(:rspec_example) { double(example_attrs) }
-
- describe '#initialize' do
- shared_examples 'a valid Example instance' do
- it 'returns valid attributes' do
- example = described_class.new(args)
-
- expect(example.example_id).to eq(example_attrs[:id])
- end
- end
-
- context 'when given an Rspec::Core::Example that responds to #example' do
- let(:args) { double(example: rspec_example) }
-
- it_behaves_like 'a valid Example instance'
- end
-
- context 'when given an Rspec::Core::Example that does not respond to #example' do
- let(:args) { rspec_example }
-
- it_behaves_like 'a valid Example instance'
- end
- end
-
- subject { described_class.new(rspec_example) }
-
- describe '#uid' do
- it 'returns a hash of the full description' do
- expect(subject.uid).to eq(Digest::MD5.hexdigest("#{subject.description}-#{subject.file}"))
- end
- end
-
- describe '#example_id' do
- it 'returns the ID of the RSpec::Core::Example' do
- expect(subject.example_id).to eq(rspec_example.id)
- end
- end
-
- describe '#attempts' do
- it 'returns the attempts of the RSpec::Core::Example' do
- expect(subject.attempts).to eq(rspec_example.attempts)
- end
- end
-
- describe '#file' do
- it 'returns the metadata[:file_path] of the RSpec::Core::Example' do
- expect(subject.file).to eq(rspec_example.metadata[:file_path])
- end
- end
-
- describe '#line' do
- it 'returns the metadata[:line_number] of the RSpec::Core::Example' do
- expect(subject.line).to eq(rspec_example.metadata[:line_number])
- end
- end
-
- describe '#description' do
- it 'returns the metadata[:full_description] of the RSpec::Core::Example' do
- expect(subject.description).to eq(rspec_example.metadata[:full_description])
- end
- end
-
- describe '#status' do
- it 'returns the execution_result.status of the RSpec::Core::Example' do
- expect(subject.status).to eq(rspec_example.execution_result.status)
- end
- end
-
- describe '#exception' do
- it 'returns the execution_result.exception of the RSpec::Core::Example' do
- expect(subject.exception).to eq(rspec_example.execution_result.exception)
- end
- end
-end
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
deleted file mode 100644
index b1647d5830a..00000000000
--- a/spec/lib/rspec_flaky/flaky_example_spec.rb
+++ /dev/null
@@ -1,165 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do
- let(:flaky_example_attrs) do
- {
- example_id: 'spec/foo/bar_spec.rb:2',
- file: 'spec/foo/bar_spec.rb',
- line: 2,
- description: 'hello world',
- first_flaky_at: 1234,
- last_flaky_at: 2345,
- last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/12',
- last_attempts_count: 2,
- flaky_reports: 1
- }
- end
-
- let(:example_attrs) do
- {
- uid: 'abc123',
- example_id: flaky_example_attrs[:example_id],
- file: flaky_example_attrs[:file],
- line: flaky_example_attrs[:line],
- description: flaky_example_attrs[:description],
- status: 'passed',
- exception: 'BOOM!',
- attempts: flaky_example_attrs[:last_attempts_count]
- }
- end
-
- let(:example) { double(example_attrs) }
-
- before do
- # Stub these env variables otherwise specs don't behave the same on the CI
- stub_env('CI_PROJECT_URL', nil)
- stub_env('CI_JOB_ID', nil)
- end
-
- describe '#initialize' do
- shared_examples 'a valid FlakyExample instance' do
- let(:flaky_example) { described_class.new(args) }
-
- it 'returns valid attributes' do
- expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
- expect(flaky_example.file).to eq(flaky_example_attrs[:file])
- expect(flaky_example.line).to eq(flaky_example_attrs[:line])
- expect(flaky_example.description).to eq(flaky_example_attrs[:description])
- expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
- expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at)
- expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count])
- expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
- end
- end
-
- context 'when given an Rspec::Example' do
- it_behaves_like 'a valid FlakyExample instance' do
- let(:args) { example }
- let(:expected_first_flaky_at) { nil }
- let(:expected_last_flaky_at) { nil }
- let(:expected_flaky_reports) { 0 }
- end
- end
-
- context 'when given a hash' do
- it_behaves_like 'a valid FlakyExample instance' do
- let(:args) { flaky_example_attrs }
- let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
- let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
- let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
- end
- end
- end
-
- describe '#update_flakiness!' do
- shared_examples 'an up-to-date FlakyExample instance' do
- let(:flaky_example) { described_class.new(args) }
-
- it 'updates the first_flaky_at' do
- now = Time.now
- expected_first_flaky_at = flaky_example.first_flaky_at || now
- Timecop.freeze(now) { flaky_example.update_flakiness! }
-
- expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
- end
-
- it 'updates the last_flaky_at' do
- now = Time.now
- Timecop.freeze(now) { flaky_example.update_flakiness! }
-
- expect(flaky_example.last_flaky_at).to eq(now)
- end
-
- it 'updates the flaky_reports' do
- expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1
-
- expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1)
- expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
- end
-
- context 'when passed a :last_attempts_count' do
- it 'updates the last_attempts_count' do
- flaky_example.update_flakiness!(last_attempts_count: 42)
-
- expect(flaky_example.last_attempts_count).to eq(42)
- end
- end
-
- context 'when run on the CI' do
- before do
- stub_env('CI_PROJECT_URL', 'https://gitlab.com/gitlab-org/gitlab-foss')
- stub_env('CI_JOB_ID', 42)
- end
-
- it 'updates the last_flaky_job' do
- flaky_example.update_flakiness!
-
- expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/42')
- end
- end
- end
-
- context 'when given an Rspec::Example' do
- it_behaves_like 'an up-to-date FlakyExample instance' do
- let(:args) { example }
- end
- end
-
- context 'when given a hash' do
- it_behaves_like 'an up-to-date FlakyExample instance' do
- let(:args) { flaky_example_attrs }
- end
- end
- end
-
- describe '#to_h' do
- shared_examples 'a valid FlakyExample hash' do
- let(:additional_attrs) { {} }
-
- it 'returns a valid hash' do
- flaky_example = described_class.new(args)
- final_hash = flaky_example_attrs.merge(additional_attrs)
-
- expect(flaky_example.to_h).to eq(final_hash)
- end
- end
-
- context 'when given an Rspec::Example' do
- let(:args) { example }
-
- it_behaves_like 'a valid FlakyExample hash' do
- let(:additional_attrs) do
- { first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 }
- end
- end
- end
-
- context 'when given a hash' do
- let(:args) { flaky_example_attrs }
-
- it_behaves_like 'a valid FlakyExample hash'
- end
- end
-end
diff --git a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
deleted file mode 100644
index b2fd1d3733a..00000000000
--- a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
- let(:collection_hash) do
- {
- a: { example_id: 'spec/foo/bar_spec.rb:2' },
- b: { example_id: 'spec/foo/baz_spec.rb:3' }
- }
- end
-
- let(:collection_report) do
- {
- a: {
- example_id: 'spec/foo/bar_spec.rb:2',
- first_flaky_at: nil,
- last_flaky_at: nil,
- last_flaky_job: nil
- },
- b: {
- example_id: 'spec/foo/baz_spec.rb:3',
- first_flaky_at: nil,
- last_flaky_at: nil,
- last_flaky_job: nil
- }
- }
- end
-
- describe '#initialize' do
- it 'accepts no argument' do
- expect { described_class.new }.not_to raise_error
- end
-
- it 'accepts a hash' do
- expect { described_class.new(collection_hash) }.not_to raise_error
- end
-
- it 'does not accept anything else' do
- expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`collection` must be a Hash, Array given!")
- end
- end
-
- describe '#to_h' do
- it 'calls #to_h on the values' do
- collection = described_class.new(collection_hash)
-
- expect(collection.to_h).to eq(collection_report)
- end
- end
-
- describe '#-' do
- it 'returns only examples that are not present in the given collection' do
- collection1 = described_class.new(collection_hash)
- collection2 = described_class.new(
- a: { example_id: 'spec/foo/bar_spec.rb:2' },
- c: { example_id: 'spec/bar/baz_spec.rb:4' })
-
- expect((collection2 - collection1).to_h).to eq(
- c: {
- example_id: 'spec/bar/baz_spec.rb:4',
- first_flaky_at: nil,
- last_flaky_at: nil,
- last_flaky_job: nil
- })
- end
-
- it 'fails if the given collection does not respond to `#key?`' do
- collection = described_class.new(collection_hash)
-
- expect { collection - [1, 2, 3] }.to raise_error(ArgumentError, "`other` must respond to `#key?`, Array does not!")
- end
- end
-end
diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb
deleted file mode 100644
index 10ed724d4de..00000000000
--- a/spec/lib/rspec_flaky/listener_spec.rb
+++ /dev/null
@@ -1,219 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::Listener, :aggregate_failures do
- let(:already_flaky_example_uid) { '6e869794f4cfd2badd93eb68719371d1' }
- let(:suite_flaky_example_report) do
- {
- "#{already_flaky_example_uid}": {
- example_id: 'spec/foo/bar_spec.rb:2',
- file: 'spec/foo/bar_spec.rb',
- line: 2,
- description: 'hello world',
- first_flaky_at: 1234,
- last_flaky_at: 4321,
- last_attempts_count: 3,
- flaky_reports: 1,
- last_flaky_job: nil
- }
- }
- end
-
- let(:already_flaky_example_attrs) do
- {
- id: 'spec/foo/bar_spec.rb:2',
- metadata: {
- file_path: 'spec/foo/bar_spec.rb',
- line_number: 2,
- full_description: 'hello world'
- },
- execution_result: double(status: 'passed', exception: nil)
- }
- end
-
- let(:already_flaky_example) { RspecFlaky::FlakyExample.new(suite_flaky_example_report[already_flaky_example_uid]) }
- let(:new_example_attrs) do
- {
- id: 'spec/foo/baz_spec.rb:3',
- metadata: {
- file_path: 'spec/foo/baz_spec.rb',
- line_number: 3,
- full_description: 'hello GitLab'
- },
- execution_result: double(status: 'passed', exception: nil)
- }
- end
-
- before do
- # Stub these env variables otherwise specs don't behave the same on the CI
- stub_env('CI_PROJECT_URL', nil)
- stub_env('CI_JOB_ID', nil)
- stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
- end
-
- describe '#initialize' do
- shared_examples 'a valid Listener instance' do
- let(:expected_suite_flaky_examples) { {} }
-
- it 'returns a valid Listener instance' do
- listener = described_class.new
-
- expect(listener.suite_flaky_examples.to_h).to eq(expected_suite_flaky_examples)
- expect(listener.flaky_examples).to eq({})
- end
- end
-
- context 'when no report file exists' do
- it_behaves_like 'a valid Listener instance'
- end
-
- context 'when SUITE_FLAKY_RSPEC_REPORT_PATH is set' do
- let(:report_file_path) { 'foo/report.json' }
-
- before do
- stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file_path)
- end
-
- context 'and report file exists' do
- before do
- expect(File).to receive(:exist?).with(report_file_path).and_return(true)
- end
-
- it 'delegates the load to RspecFlaky::Report' do
- report = RspecFlaky::Report.new(RspecFlaky::FlakyExamplesCollection.new(suite_flaky_example_report))
-
- expect(RspecFlaky::Report).to receive(:load).with(report_file_path).and_return(report)
- expect(described_class.new.suite_flaky_examples.to_h).to eq(report.flaky_examples.to_h)
- end
- end
-
- context 'and report file does not exist' do
- before do
- expect(File).to receive(:exist?).with(report_file_path).and_return(false)
- end
-
- it 'return an empty hash' do
- expect(RspecFlaky::Report).not_to receive(:load)
- expect(described_class.new.suite_flaky_examples.to_h).to eq({})
- end
- end
- end
- end
-
- describe '#example_passed' do
- let(:rspec_example) { double(new_example_attrs) }
- let(:notification) { double(example: rspec_example) }
- let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
-
- shared_examples 'a non-flaky example' do
- it 'does not change the flaky examples hash' do
- expect { listener.example_passed(notification) }
- .not_to change { listener.flaky_examples }
- end
- end
-
- shared_examples 'an existing flaky example' do
- let(:expected_flaky_example) do
- {
- example_id: 'spec/foo/bar_spec.rb:2',
- file: 'spec/foo/bar_spec.rb',
- line: 2,
- description: 'hello world',
- first_flaky_at: 1234,
- last_attempts_count: 2,
- flaky_reports: 2,
- last_flaky_job: nil
- }
- end
-
- it 'changes the flaky examples hash' do
- new_example = RspecFlaky::Example.new(rspec_example)
-
- now = Time.now
- Timecop.freeze(now) do
- expect { listener.example_passed(notification) }
- .to change { listener.flaky_examples[new_example.uid].to_h }
- end
-
- expect(listener.flaky_examples[new_example.uid].to_h)
- .to eq(expected_flaky_example.merge(last_flaky_at: now))
- end
- end
-
- shared_examples 'a new flaky example' do
- let(:expected_flaky_example) do
- {
- example_id: 'spec/foo/baz_spec.rb:3',
- file: 'spec/foo/baz_spec.rb',
- line: 3,
- description: 'hello GitLab',
- last_attempts_count: 2,
- flaky_reports: 1,
- last_flaky_job: nil
- }
- end
-
- it 'changes the all flaky examples hash' do
- new_example = RspecFlaky::Example.new(rspec_example)
-
- now = Time.now
- Timecop.freeze(now) do
- expect { listener.example_passed(notification) }
- .to change { listener.flaky_examples[new_example.uid].to_h }
- end
-
- expect(listener.flaky_examples[new_example.uid].to_h)
- .to eq(expected_flaky_example.merge(first_flaky_at: now, last_flaky_at: now))
- end
- end
-
- describe 'when the RSpec example does not respond to attempts' do
- it_behaves_like 'a non-flaky example'
- end
-
- describe 'when the RSpec example has 1 attempt' do
- let(:rspec_example) { double(new_example_attrs.merge(attempts: 1)) }
-
- it_behaves_like 'a non-flaky example'
- end
-
- describe 'when the RSpec example has 2 attempts' do
- let(:rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
-
- it_behaves_like 'a new flaky example'
-
- context 'with an existing flaky example' do
- let(:rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
-
- it_behaves_like 'an existing flaky example'
- end
- end
- end
-
- describe '#dump_summary' do
- let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
- let(:new_flaky_rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
- let(:already_flaky_rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
- let(:notification_new_flaky_rspec_example) { double(example: new_flaky_rspec_example) }
- let(:notification_already_flaky_rspec_example) { double(example: already_flaky_rspec_example) }
-
- context 'when a report file path is set by FLAKY_RSPEC_REPORT_PATH' do
- it 'delegates the writes to RspecFlaky::Report' do
- listener.example_passed(notification_new_flaky_rspec_example)
- listener.example_passed(notification_already_flaky_rspec_example)
-
- report1 = double
- report2 = double
-
- expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples).and_return(report1)
- expect(report1).to receive(:write).with(RspecFlaky::Config.flaky_examples_report_path)
-
- expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples - listener.suite_flaky_examples).and_return(report2)
- expect(report2).to receive(:write).with(RspecFlaky::Config.new_flaky_examples_report_path)
-
- listener.dump_summary(nil)
- end
- end
- end
-end
diff --git a/spec/lib/rspec_flaky/report_spec.rb b/spec/lib/rspec_flaky/report_spec.rb
deleted file mode 100644
index 5cacfdb82fb..00000000000
--- a/spec/lib/rspec_flaky/report_spec.rb
+++ /dev/null
@@ -1,129 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RspecFlaky::Report, :aggregate_failures do
- let(:thirty_one_days) { 3600 * 24 * 31 }
- let(:collection_hash) do
- {
- a: { example_id: 'spec/foo/bar_spec.rb:2' },
- b: { example_id: 'spec/foo/baz_spec.rb:3', first_flaky_at: (Time.now - thirty_one_days).to_s, last_flaky_at: (Time.now - thirty_one_days).to_s }
- }
- end
-
- let(:suite_flaky_example_report) do
- {
- '6e869794f4cfd2badd93eb68719371d1': {
- example_id: 'spec/foo/bar_spec.rb:2',
- file: 'spec/foo/bar_spec.rb',
- line: 2,
- description: 'hello world',
- first_flaky_at: 1234,
- last_flaky_at: 4321,
- last_attempts_count: 3,
- flaky_reports: 1,
- last_flaky_job: nil
- }
- }
- end
-
- let(:flaky_examples) { RspecFlaky::FlakyExamplesCollection.new(collection_hash) }
- let(:report) { described_class.new(flaky_examples) }
-
- describe '.load' do
- let!(:report_file) do
- Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
- f.write(Gitlab::Json.pretty_generate(suite_flaky_example_report))
- f.rewind
- end
- end
-
- after do
- report_file.close
- report_file.unlink
- end
-
- it 'loads the report file' do
- expect(described_class.load(report_file.path).flaky_examples.to_h).to eq(suite_flaky_example_report)
- end
- end
-
- describe '.load_json' do
- let(:report_json) do
- Gitlab::Json.pretty_generate(suite_flaky_example_report)
- end
-
- it 'loads the report file' do
- expect(described_class.load_json(report_json).flaky_examples.to_h).to eq(suite_flaky_example_report)
- end
- end
-
- describe '#initialize' do
- it 'accepts a RspecFlaky::FlakyExamplesCollection' do
- expect { report }.not_to raise_error
- end
-
- it 'does not accept anything else' do
- expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`flaky_examples` must be a RspecFlaky::FlakyExamplesCollection, Array given!")
- end
- end
-
- it 'delegates to #flaky_examples using SimpleDelegator' do
- expect(report.__getobj__).to eq(flaky_examples)
- end
-
- describe '#write' do
- let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') }
-
- before do
- FileUtils.rm(report_file_path) if File.exist?(report_file_path)
- end
-
- after do
- FileUtils.rm(report_file_path) if File.exist?(report_file_path)
- end
-
- context 'when RspecFlaky::Config.generate_report? is false' do
- before do
- allow(RspecFlaky::Config).to receive(:generate_report?).and_return(false)
- end
-
- it 'does not write any report file' do
- report.write(report_file_path)
-
- expect(File.exist?(report_file_path)).to be(false)
- end
- end
-
- context 'when RspecFlaky::Config.generate_report? is true' do
- before do
- allow(RspecFlaky::Config).to receive(:generate_report?).and_return(true)
- end
-
- it 'delegates the writes to RspecFlaky::Report' do
- report.write(report_file_path)
-
- expect(File.exist?(report_file_path)).to be(true)
- expect(File.read(report_file_path))
- .to eq(Gitlab::Json.pretty_generate(report.flaky_examples.to_h))
- end
- end
- end
-
- describe '#prune_outdated' do
- it 'returns a new collection without the examples older than 30 days by default' do
- new_report = flaky_examples.to_h.dup.tap { |r| r.delete(:b) }
- new_flaky_examples = report.prune_outdated
-
- expect(new_flaky_examples).to be_a(described_class)
- expect(new_flaky_examples.to_h).to eq(new_report)
- expect(flaky_examples).to have_key(:b)
- end
-
- it 'accepts a given number of days' do
- new_flaky_examples = report.prune_outdated(days: 32)
-
- expect(new_flaky_examples.to_h).to eq(report.to_h)
- end
- end
-end
diff --git a/spec/lib/system_check/sidekiq_check_spec.rb b/spec/lib/system_check/sidekiq_check_spec.rb
new file mode 100644
index 00000000000..c2f61e0e4b7
--- /dev/null
+++ b/spec/lib/system_check/sidekiq_check_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SystemCheck::SidekiqCheck do
+ describe '#multi_check' do
+ def stub_ps_output(output)
+ allow(Gitlab::Popen).to receive(:popen).with(%w(ps uxww)).and_return([output, nil])
+ end
+
+ def expect_check_output(matcher)
+ expect { subject.multi_check }.to output(matcher).to_stdout
+ end
+
+ it 'fails when no worker processes are running' do
+ stub_ps_output <<~PS
+ root 2193947 0.9 0.1 146564 18104 ? Ssl 17:34 0:00 ruby bin/sidekiq-cluster * -P ...
+ PS
+
+ expect_check_output include(
+ 'Running? ... no',
+ 'Please fix the error above and rerun the checks.'
+ )
+ end
+
+ it 'fails when more than one cluster process is running' do
+ stub_ps_output <<~PS
+ root 2193947 0.9 0.1 146564 18104 ? Ssl 17:34 0:00 ruby bin/sidekiq-cluster * -P ...
+ root 2193948 0.9 0.1 146564 18104 ? Ssl 17:34 0:00 ruby bin/sidekiq-cluster * -P ...
+ root 2193955 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ PS
+
+ expect_check_output include(
+ 'Running? ... yes',
+ 'Number of Sidekiq processes (cluster/worker) ... 2/1',
+ 'Please fix the error above and rerun the checks.'
+ )
+ end
+
+ it 'succeeds when one cluster process and one or more worker processes are running' do
+ stub_ps_output <<~PS
+ root 2193947 0.9 0.1 146564 18104 ? Ssl 17:34 0:00 ruby bin/sidekiq-cluster * -P ...
+ root 2193955 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ root 2193956 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ PS
+
+ expect_check_output <<~OUTPUT
+ Running? ... yes
+ Number of Sidekiq processes (cluster/worker) ... 1/2
+ OUTPUT
+ end
+
+ # TODO: Running without a cluster is deprecated and will be removed in GitLab 14.0
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/323225
+ context 'when running without a cluster' do
+ it 'fails when more than one worker process is running' do
+ stub_ps_output <<~PS
+ root 2193955 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ root 2193956 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ PS
+
+ expect_check_output include(
+ 'Running? ... yes',
+ 'Number of Sidekiq processes (cluster/worker) ... 0/2',
+ 'Please fix the error above and rerun the checks.'
+ )
+ end
+
+ it 'succeeds when one worker process is running' do
+ stub_ps_output <<~PS
+ root 2193955 92.2 3.1 4675972 515516 ? Sl 17:34 0:13 sidekiq 5.2.9 ...
+ PS
+
+ expect_check_output <<~OUTPUT
+ Running? ... yes
+ Number of Sidekiq processes (cluster/worker) ... 0/1
+ OUTPUT
+ end
+ end
+ end
+end