From 9f46488805e86b1bc341ea1620b866016c2ce5ed Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Wed, 20 May 2020 14:34:42 +0000 Subject: Add latest changes from gitlab-org/gitlab@13-0-stable-ee --- .../gitlab/alert_management/alert_params_spec.rb | 94 +++++ .../alert_management/alert_status_counts_spec.rb | 55 +++ spec/lib/gitlab/alerting/alert_spec.rb | 24 ++ .../alerting/notification_payload_parser_spec.rb | 29 +- .../analytics/cycle_analytics/median_spec.rb | 42 +++ spec/lib/gitlab/app_json_logger_spec.rb | 4 +- spec/lib/gitlab/application_context_spec.rb | 14 +- spec/lib/gitlab/auth/auth_finders_spec.rb | 160 ++++++--- spec/lib/gitlab/auth/o_auth/provider_spec.rb | 12 +- spec/lib/gitlab/auth_spec.rb | 8 + ...nvironment_id_deployment_merge_requests_spec.rb | 46 +++ .../backfill_snippet_repositories_spec.rb | 187 +++++++++- .../migrate_issue_trackers_sensitive_data_spec.rb | 73 ++-- spec/lib/gitlab/chat/responder/mattermost_spec.rb | 117 ++++++ .../gitlab/checks/push_file_count_check_spec.rb | 4 +- spec/lib/gitlab/ci/config/entry/artifacts_spec.rb | 48 ++- spec/lib/gitlab/ci/config/entry/reports_spec.rb | 2 + spec/lib/gitlab/ci/config/entry/trigger_spec.rb | 13 - spec/lib/gitlab/ci/cron_parser_spec.rb | 314 ++++++++++------- .../gitlab/ci/parsers/accessibility/pa11y_spec.rb | 118 +++++++ .../lib/gitlab/ci/parsers/terraform/tfplan_spec.rb | 51 +++ spec/lib/gitlab/ci/parsers/test/junit_spec.rb | 60 +++- spec/lib/gitlab/ci/parsers_spec.rb | 16 + spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb | 10 + .../reports/accessibility_reports_comparer_spec.rb | 270 ++++++++++++++ .../ci/reports/accessibility_reports_spec.rb | 232 ++++++++++++ .../gitlab/ci/reports/terraform_reports_spec.rb | 34 ++ spec/lib/gitlab/ci/reports/test_case_spec.rb | 2 +- spec/lib/gitlab/ci/reports/test_reports_spec.rb | 25 +- spec/lib/gitlab/ci/reports/test_suite_spec.rb | 27 +- ...wser_performance_testing_gitlab_ci_yaml_spec.rb | 85 +++++ .../ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb | 58 +++ .../Jobs/code_quality_gitlab_ci_yaml_spec.rb | 86 +++++ .../templates/Jobs/deploy_gitlab_ci_yaml_spec.rb | 222 ++++++++++++ .../ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb | 86 +++++ .../templates/auto_devops_gitlab_ci_yaml_spec.rb | 100 +++++- spec/lib/gitlab/ci/yaml_processor_spec.rb | 22 +- spec/lib/gitlab/code_navigation_path_spec.rb | 31 +- .../external_database_checker_spec.rb | 56 +++ .../cycle_analytics/group_stage_summary_spec.rb | 176 ---------- .../gitlab/cycle_analytics/summary/value_spec.rb | 33 ++ spec/lib/gitlab/danger/changelog_spec.rb | 8 - spec/lib/gitlab/danger/helper_spec.rb | 65 ++++ spec/lib/gitlab/danger/teammate_spec.rb | 13 +- spec/lib/gitlab/database/batch_count_spec.rb | 8 + spec/lib/gitlab/database/migration_helpers_spec.rb | 391 +++++++++++++-------- .../partitioned_foreign_key_spec.rb | 48 +++ .../partitioning_migration_helpers_spec.rb | 230 ++++++++++++ .../v1/rename_base_spec.rb | 4 +- spec/lib/gitlab/database/with_lock_retries_spec.rb | 21 +- spec/lib/gitlab/diff/file_spec.rb | 55 +++ .../gitlab/diff/formatters/text_formatter_spec.rb | 18 + spec/lib/gitlab/diff/position_spec.rb | 4 +- spec/lib/gitlab/elasticsearch/logs/lines_spec.rb | 24 +- spec/lib/gitlab/elasticsearch/logs/pods_spec.rb | 4 +- spec/lib/gitlab/email/handler_spec.rb | 12 + .../email/hook/smime_signature_interceptor_spec.rb | 23 +- spec/lib/gitlab/email/smime/certificate_spec.rb | 55 ++- spec/lib/gitlab/email/smime/signer_spec.rb | 35 +- .../exclusive_lease_helpers/sleeping_lock_spec.rb | 102 ++++++ spec/lib/gitlab/exclusive_lease_helpers_spec.rb | 31 +- spec/lib/gitlab/exclusive_lease_spec.rb | 82 ++++- spec/lib/gitlab/experimentation_spec.rb | 53 +-- spec/lib/gitlab/gfm/reference_rewriter_spec.rb | 12 + spec/lib/gitlab/git/attributes_parser_spec.rb | 8 + spec/lib/gitlab/git/blob_spec.rb | 12 + spec/lib/gitlab/git/commit_spec.rb | 20 ++ spec/lib/gitlab/git/tag_spec.rb | 30 ++ spec/lib/gitlab/git_access_design_spec.rb | 45 +++ spec/lib/gitlab/git_access_snippet_spec.rb | 153 ++++++-- spec/lib/gitlab/git_access_wiki_spec.rb | 10 +- spec/lib/gitlab/gl_repository/repo_type_spec.rb | 37 ++ spec/lib/gitlab/gl_repository_spec.rb | 15 + spec/lib/gitlab/google_code_import/client_spec.rb | 2 +- .../lib/gitlab/google_code_import/importer_spec.rb | 2 +- .../formatters/lograge_with_timestamp_spec.rb | 2 +- .../loggers/cloudflare_logger_spec.rb | 31 ++ .../grape_logging/loggers/exception_logger_spec.rb | 69 +++- .../authorize/authorize_field_service_spec.rb | 10 + .../graphql/pagination/keyset/connection_spec.rb | 15 +- spec/lib/gitlab/graphql_logger_spec.rb | 6 +- spec/lib/gitlab/health_checks/master_check_spec.rb | 5 +- spec/lib/gitlab/hook_data/issuable_builder_spec.rb | 1 + spec/lib/gitlab/import_export/all_models.yml | 22 +- .../import_export/attribute_configuration_spec.rb | 3 - .../import_export/design_repo_restorer_spec.rb | 42 +++ .../gitlab/import_export/design_repo_saver_spec.rb | 37 ++ .../import_export/fast_hash_serializer_spec.rb | 2 +- .../group/legacy_tree_restorer_spec.rb | 2 +- .../import_export/group/tree_restorer_spec.rb | 184 ++++++++++ .../gitlab/import_export/group/tree_saver_spec.rb | 140 ++++++++ .../import_export_equivalence_spec.rb | 4 +- .../import_export/import_test_coverage_spec.rb | 13 +- spec/lib/gitlab/import_export/importer_spec.rb | 79 +++-- .../import_export/json/legacy_reader/file_spec.rb | 2 +- .../import_export/json/legacy_reader/hash_spec.rb | 4 +- .../import_export/json/ndjson_reader_spec.rb | 14 +- spec/lib/gitlab/import_export/lfs_saver_spec.rb | 2 +- .../import_export/project/export_task_spec.rb | 43 ++- .../import_export/project/import_task_spec.rb | 49 +-- .../import_export/project/tree_restorer_spec.rb | 108 +++--- .../import_export/project/tree_saver_spec.rb | 25 ++ .../import_export/relation_tree_restorer_spec.rb | 14 +- .../gitlab/import_export/safe_model_attributes.yml | 2 + spec/lib/gitlab/instrumentation_helper_spec.rb | 6 +- spec/lib/gitlab/jira_import/base_importer_spec.rb | 20 +- .../jira_import/handle_labels_service_spec.rb | 53 +++ .../gitlab/jira_import/issue_serializer_spec.rb | 150 +++++--- .../lib/gitlab/jira_import/issues_importer_spec.rb | 36 +- .../lib/gitlab/jira_import/labels_importer_spec.rb | 83 ++++- .../gitlab/jira_import/metadata_collector_spec.rb | 178 ++++++++++ spec/lib/gitlab/jira_import/user_mapper_spec.rb | 80 +++++ spec/lib/gitlab/json_logger_spec.rb | 4 +- spec/lib/gitlab/json_spec.rb | 152 ++++++-- spec/lib/gitlab/kubernetes/helm/api_spec.rb | 25 +- .../gitlab/kubernetes/helm/base_command_spec.rb | 80 +---- .../gitlab/kubernetes/helm/delete_command_spec.rb | 41 +-- .../gitlab/kubernetes/helm/init_command_spec.rb | 73 +--- .../gitlab/kubernetes/helm/install_command_spec.rb | 84 +---- .../gitlab/kubernetes/helm/parsers/list_v2_spec.rb | 100 ++++++ .../gitlab/kubernetes/helm/patch_command_spec.rb | 68 +--- spec/lib/gitlab/kubernetes/helm/pod_spec.rb | 2 +- .../gitlab/kubernetes/helm/reset_command_spec.rb | 33 +- spec/lib/gitlab/kubernetes/kube_client_spec.rb | 108 +++++- spec/lib/gitlab/kubernetes/network_policy_spec.rb | 224 ++++++++++++ .../gitlab/legacy_github_import/importer_spec.rb | 1 + spec/lib/gitlab/logging/cloudflare_helper_spec.rb | 52 +++ spec/lib/gitlab/lograge/custom_options_spec.rb | 33 +- spec/lib/gitlab/mail_room/mail_room_spec.rb | 3 +- .../gitlab/metrics/background_transaction_spec.rb | 6 - .../dashboard/stages/grafana_formatter_spec.rb | 6 +- spec/lib/gitlab/metrics/dashboard/url_spec.rb | 32 +- .../metrics/exporter/sidekiq_exporter_spec.rb | 2 +- spec/lib/gitlab/metrics/method_call_spec.rb | 19 - spec/lib/gitlab/metrics/metric_spec.rb | 71 ---- spec/lib/gitlab/metrics/rack_middleware_spec.rb | 25 -- .../metrics/samplers/database_sampler_spec.rb | 49 +++ .../gitlab/metrics/samplers/influx_sampler_spec.rb | 105 ------ .../gitlab/metrics/samplers/ruby_sampler_spec.rb | 32 +- spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb | 7 - .../gitlab/metrics/subscribers/action_view_spec.rb | 6 - spec/lib/gitlab/metrics/system_spec.rb | 117 +++++- spec/lib/gitlab/metrics/transaction_spec.rb | 165 +-------- spec/lib/gitlab/metrics/web_transaction_spec.rb | 144 +------- spec/lib/gitlab/metrics_spec.rb | 78 ---- spec/lib/gitlab/middleware/multipart_spec.rb | 11 + spec/lib/gitlab/omniauth_initializer_spec.rb | 16 + spec/lib/gitlab/pagination/keyset_spec.rb | 12 + spec/lib/gitlab/path_regex_spec.rb | 15 + spec/lib/gitlab/performance_bar_spec.rb | 59 ++-- .../phabricator_import/conduit/response_spec.rb | 4 +- .../conduit/tasks_response_spec.rb | 2 +- .../conduit/users_response_spec.rb | 2 +- .../phabricator_import/issues/importer_spec.rb | 2 +- spec/lib/gitlab/project_search_results_spec.rb | 113 ++++-- spec/lib/gitlab/prometheus_client_spec.rb | 4 +- spec/lib/gitlab/regex_spec.rb | 33 ++ spec/lib/gitlab/repository_url_builder_spec.rb | 2 +- spec/lib/gitlab/request_context_spec.rb | 4 + spec/lib/gitlab/runtime_spec.rb | 13 + spec/lib/gitlab/search_results_spec.rb | 10 +- spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb | 8 - .../gitlab/sidekiq_logging/json_formatter_spec.rb | 27 +- .../sidekiq_logging/structured_logger_spec.rb | 25 +- .../duplicate_jobs/duplicate_job_spec.rb | 19 +- .../extra_done_log_metadata_spec.rb | 35 ++ spec/lib/gitlab/sidekiq_middleware_spec.rb | 24 +- spec/lib/gitlab/snippet_search_results_spec.rb | 29 +- spec/lib/gitlab/static_site_editor/config_spec.rb | 28 +- spec/lib/gitlab/throttle_spec.rb | 78 +--- spec/lib/gitlab/tracking_spec.rb | 8 +- spec/lib/gitlab/tree_summary_spec.rb | 16 +- spec/lib/gitlab/url_builder_spec.rb | 13 +- .../usage_data_counters/designs_counter_spec.rb | 14 + .../usage_data_counters/web_ide_counter_spec.rb | 47 +-- spec/lib/gitlab/usage_data_spec.rb | 344 +++++++++++++++--- spec/lib/gitlab/user_access_snippet_spec.rb | 52 +++ spec/lib/gitlab/utils/measuring_spec.rb | 40 +++ spec/lib/gitlab/utils_spec.rb | 34 +- spec/lib/gitlab/view/presenter/factory_spec.rb | 6 +- .../gitlab/wiki_pages/front_matter_parser_spec.rb | 6 +- spec/lib/gitlab/with_request_store_spec.rb | 30 ++ spec/lib/gitlab/workhorse_spec.rb | 88 ++--- spec/lib/gitlab/x509/signature_spec.rb | 160 +++++++++ spec/lib/gitlab/x509/tag_spec.rb | 42 +++ 185 files changed, 7239 insertions(+), 2397 deletions(-) create mode 100644 spec/lib/gitlab/alert_management/alert_params_spec.rb create mode 100644 spec/lib/gitlab/alert_management/alert_status_counts_spec.rb create mode 100644 spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb create mode 100644 spec/lib/gitlab/chat/responder/mattermost_spec.rb create mode 100644 spec/lib/gitlab/ci/parsers/accessibility/pa11y_spec.rb create mode 100644 spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb create mode 100644 spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb create mode 100644 spec/lib/gitlab/ci/reports/accessibility_reports_spec.rb create mode 100644 spec/lib/gitlab/ci/reports/terraform_reports_spec.rb create mode 100644 spec/lib/gitlab/ci/templates/Jobs/browser_performance_testing_gitlab_ci_yaml_spec.rb create mode 100644 spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb create mode 100644 spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb create mode 100644 spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb create mode 100644 spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb create mode 100644 spec/lib/gitlab/config_checker/external_database_checker_spec.rb delete mode 100644 spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb create mode 100644 spec/lib/gitlab/cycle_analytics/summary/value_spec.rb create mode 100644 spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb create mode 100644 spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb create mode 100644 spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb create mode 100644 spec/lib/gitlab/git_access_design_spec.rb create mode 100644 spec/lib/gitlab/grape_logging/loggers/cloudflare_logger_spec.rb create mode 100644 spec/lib/gitlab/import_export/design_repo_restorer_spec.rb create mode 100644 spec/lib/gitlab/import_export/design_repo_saver_spec.rb create mode 100644 spec/lib/gitlab/import_export/group/tree_restorer_spec.rb create mode 100644 spec/lib/gitlab/import_export/group/tree_saver_spec.rb create mode 100644 spec/lib/gitlab/jira_import/handle_labels_service_spec.rb create mode 100644 spec/lib/gitlab/jira_import/metadata_collector_spec.rb create mode 100644 spec/lib/gitlab/jira_import/user_mapper_spec.rb create mode 100644 spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb create mode 100644 spec/lib/gitlab/kubernetes/network_policy_spec.rb create mode 100644 spec/lib/gitlab/logging/cloudflare_helper_spec.rb delete mode 100644 spec/lib/gitlab/metrics/metric_spec.rb create mode 100644 spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb delete mode 100644 spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb create mode 100644 spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb create mode 100644 spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb create mode 100644 spec/lib/gitlab/utils/measuring_spec.rb create mode 100644 spec/lib/gitlab/with_request_store_spec.rb create mode 100644 spec/lib/gitlab/x509/tag_spec.rb (limited to 'spec/lib/gitlab') diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb new file mode 100644 index 00000000000..5cf34038f68 --- /dev/null +++ b/spec/lib/gitlab/alert_management/alert_params_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::AlertManagement::AlertParams do + let_it_be(:project) { create(:project, :repository, :private) } + + describe '.from_generic_alert' do + let(:started_at) { Time.current.change(usec: 0).rfc3339 } + let(:default_payload) do + { + 'title' => 'Alert title', + 'description' => 'Description', + 'monitoring_tool' => 'Monitoring tool name', + 'service' => 'Service', + 'hosts' => ['gitlab.com'], + 'start_time' => started_at, + 'some' => { 'extra' => { 'payload' => 'here' } } + } + end + let(:payload) { default_payload } + + subject { described_class.from_generic_alert(project: project, payload: payload) } + + it 'returns Alert compatible parameters' do + is_expected.to eq( + project_id: project.id, + title: 'Alert title', + description: 'Description', + monitoring_tool: 'Monitoring tool name', + service: 'Service', + severity: 'critical', + hosts: ['gitlab.com'], + payload: payload, + started_at: started_at + ) + end + + context 'when severity given' do + let(:payload) { default_payload.merge(severity: 'low') } + + it 'returns Alert compatible parameters' do + expect(subject[:severity]).to eq('low') + end + end + + context 'when there are no hosts in the payload' do + let(:payload) { {} } + + it 'hosts param is an empty array' do + expect(subject[:hosts]).to be_empty + end + end + end + + describe '.from_prometheus_alert' do + let(:payload) do + { + 'status' => 'firing', + 'labels' => { + 'alertname' => 'GitalyFileServerDown', + 'channel' => 'gitaly', + 'pager' => 'pagerduty', + 'severity' => 's1' + }, + 'annotations' => { + 'description' => 'Alert description', + 'runbook' => 'troubleshooting/gitaly-down.md', + 'title' => 'Alert title' + }, + 'startsAt' => '2020-04-27T10:10:22.265949279Z', + 'endsAt' => '0001-01-01T00:00:00Z', + 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1', + 'fingerprint' => 'b6ac4d42057c43c1' + } + end + let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) } + + subject { described_class.from_prometheus_alert(project: project, parsed_alert: parsed_alert) } + + it 'returns Alert-compatible params' do + is_expected.to eq( + project_id: project.id, + title: 'Alert title', + description: 'Alert description', + monitoring_tool: 'Prometheus', + payload: payload, + started_at: parsed_alert.starts_at, + ended_at: parsed_alert.ends_at, + fingerprint: parsed_alert.gitlab_fingerprint + ) + end + end +end diff --git a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb new file mode 100644 index 00000000000..816ed918fe8 --- /dev/null +++ b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::AlertManagement::AlertStatusCounts do + let_it_be(:current_user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:alert_1) { create(:alert_management_alert, :resolved, project: project) } + let_it_be(:alert_2) { create(:alert_management_alert, :ignored, project: project) } + let_it_be(:alert_3) { create(:alert_management_alert) } + let(:params) { {} } + + describe '#execute' do + subject(:counts) { described_class.new(current_user, project, params) } + + context 'for an unauthorized user' do + it 'returns zero for all statuses' do + expect(counts.open).to eq(0) + expect(counts.all).to eq(0) + + AlertManagement::Alert::STATUSES.each_key do |status| + expect(counts.send(status)).to eq(0) + end + end + end + + context 'for an authorized user' do + before do + project.add_developer(current_user) + end + + it 'returns the correct counts for each status' do + expect(counts.open).to eq(0) + expect(counts.all).to eq(2) + expect(counts.resolved).to eq(1) + expect(counts.ignored).to eq(1) + expect(counts.triggered).to eq(0) + expect(counts.acknowledged).to eq(0) + end + + context 'when filtering params are included' do + let(:params) { { status: AlertManagement::Alert::STATUSES[:resolved] } } + + it 'returns the correct counts for each status' do + expect(counts.open).to eq(0) + expect(counts.all).to eq(1) + expect(counts.resolved).to eq(1) + expect(counts.ignored).to eq(0) + expect(counts.triggered).to eq(0) + expect(counts.acknowledged).to eq(0) + end + end + end + end +end diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb index 6d97f08af91..a0582515f3d 100644 --- a/spec/lib/gitlab/alerting/alert_spec.rb +++ b/spec/lib/gitlab/alerting/alert_spec.rb @@ -246,6 +246,30 @@ describe Gitlab::Alerting::Alert do it_behaves_like 'parse payload', 'annotations/gitlab_incident_markdown' end + describe '#gitlab_fingerprint' do + subject { alert.gitlab_fingerprint } + + context 'when the alert is a GitLab managed alert' do + include_context 'gitlab alert' + + it 'returns a fingerprint' do + plain_fingerprint = [alert.metric_id, alert.starts_at].join('/') + + is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint)) + end + end + + context 'when the alert is from self managed Prometheus' do + include_context 'full query' + + it 'returns a fingerprint' do + plain_fingerprint = [alert.starts_at, alert.title, alert.full_query].join('/') + + is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint)) + end + end + end + describe '#valid?' do before do payload.update( diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb index a38aea7b972..f32095b3c86 100644 --- a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb +++ b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb @@ -12,7 +12,8 @@ describe Gitlab::Alerting::NotificationPayloadParser do 'description' => 'Description', 'monitoring_tool' => 'Monitoring tool name', 'service' => 'Service', - 'hosts' => ['gitlab.com'] + 'hosts' => ['gitlab.com'], + 'severity' => 'low' } end @@ -26,7 +27,8 @@ describe Gitlab::Alerting::NotificationPayloadParser do 'description' => 'Description', 'monitoring_tool' => 'Monitoring tool name', 'service' => 'Service', - 'hosts' => ['gitlab.com'] + 'hosts' => ['gitlab.com'], + 'severity' => 'low' }, 'startsAt' => starts_at.rfc3339 } @@ -67,11 +69,24 @@ describe Gitlab::Alerting::NotificationPayloadParser do let(:payload) { {} } it 'returns default parameters' do - is_expected.to eq( - 'annotations' => { 'title' => 'New: Incident' }, + is_expected.to match( + 'annotations' => { + 'title' => described_class::DEFAULT_TITLE, + 'severity' => described_class::DEFAULT_SEVERITY + }, 'startsAt' => starts_at.rfc3339 ) end + + context 'when severity is blank' do + before do + payload[:severity] = '' + end + + it 'sets severity to the default ' do + expect(subject.dig('annotations', 'severity')).to eq(described_class::DEFAULT_SEVERITY) + end + end end context 'when payload attributes have blank lines' do @@ -88,7 +103,10 @@ describe Gitlab::Alerting::NotificationPayloadParser do it 'returns default parameters' do is_expected.to eq( - 'annotations' => { 'title' => 'New: Incident' }, + 'annotations' => { + 'title' => 'New: Incident', + 'severity' => described_class::DEFAULT_SEVERITY + }, 'startsAt' => starts_at.rfc3339 ) end @@ -112,6 +130,7 @@ describe Gitlab::Alerting::NotificationPayloadParser do is_expected.to eq( 'annotations' => { 'title' => 'New: Incident', + 'severity' => described_class::DEFAULT_SEVERITY, 'description' => 'Description', 'additional.params.1' => 'Some value 1', 'additional.params.2' => 'Some value 2' diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb new file mode 100644 index 00000000000..92ecec350ae --- /dev/null +++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Analytics::CycleAnalytics::Median do + let_it_be(:project) { create(:project, :repository) } + let(:query) { Project.joins(merge_requests: :metrics) } + + let(:stage) do + build( + :cycle_analytics_project_stage, + start_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestCreated.identifier, + end_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestMerged.identifier, + project: project + ) + end + + subject { described_class.new(stage: stage, query: query).seconds } + + around do |example| + Timecop.freeze { example.run } + end + + it 'retruns nil when no results' do + expect(subject).to eq(nil) + end + + it 'returns median duration seconds as float' do + merge_request1 = create(:merge_request, source_branch: '1', target_project: project, source_project: project) + merge_request2 = create(:merge_request, source_branch: '2', target_project: project, source_project: project) + + Timecop.travel(5.minutes.from_now) do + merge_request1.metrics.update!(merged_at: Time.zone.now) + end + + Timecop.travel(10.minutes.from_now) do + merge_request2.metrics.update!(merged_at: Time.zone.now) + end + + expect(subject).to be_within(0.5).of(7.5.minutes.seconds) + end +end diff --git a/spec/lib/gitlab/app_json_logger_spec.rb b/spec/lib/gitlab/app_json_logger_spec.rb index 22a398f8bca..d11456236cc 100644 --- a/spec/lib/gitlab/app_json_logger_spec.rb +++ b/spec/lib/gitlab/app_json_logger_spec.rb @@ -9,10 +9,10 @@ describe Gitlab::AppJsonLogger do let(:string_message) { 'Information' } it 'logs a hash as a JSON' do - expect(JSON.parse(subject.format_message('INFO', Time.now, nil, hash_message))).to include(hash_message) + expect(Gitlab::Json.parse(subject.format_message('INFO', Time.now, nil, hash_message))).to include(hash_message) end it 'logs a string as a JSON' do - expect(JSON.parse(subject.format_message('INFO', Time.now, nil, string_message))).to include('message' => string_message) + expect(Gitlab::Json.parse(subject.format_message('INFO', Time.now, nil, string_message))).to include('message' => string_message) end end diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb index 6674ea059a0..3be967ac8a4 100644 --- a/spec/lib/gitlab/application_context_spec.rb +++ b/spec/lib/gitlab/application_context_spec.rb @@ -55,10 +55,10 @@ describe Gitlab::ApplicationContext do end describe '#to_lazy_hash' do - let(:user) { build(:user) } - let(:project) { build(:project) } - let(:namespace) { create(:group) } - let(:subgroup) { create(:group, parent: namespace) } + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:namespace) { create(:group) } + let_it_be(:subgroup) { create(:group, parent: namespace) } def result(context) context.to_lazy_hash.transform_values { |v| v.respond_to?(:call) ? v.call : v } @@ -106,5 +106,11 @@ describe Gitlab::ApplicationContext do context.use {} end + + it 'does not cause queries' do + context = described_class.new(project: create(:project), namespace: create(:group, :nested), user: create(:user)) + + expect { context.use { Labkit::Context.current.to_h } }.not_to exceed_query_limit(0) + end end end diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb index 0b6fda31d7b..774a87752b9 100644 --- a/spec/lib/gitlab/auth/auth_finders_spec.rb +++ b/spec/lib/gitlab/auth/auth_finders_spec.rb @@ -17,6 +17,17 @@ describe Gitlab::Auth::AuthFinders do request.update_param(key, value) end + def set_header(key, value) + env[key] = value + end + + def set_basic_auth_header(username, password) + set_header( + 'HTTP_AUTHORIZATION', + ActionController::HttpAuthentication::Basic.encode_credentials(username, password) + ) + end + describe '#find_user_from_warden' do context 'with CSRF token' do before do @@ -31,7 +42,7 @@ describe Gitlab::Auth::AuthFinders do context 'with valid credentials' do it 'returns the user' do - env['warden'] = double("warden", authenticate: user) + set_header('warden', double("warden", authenticate: user)) expect(find_user_from_warden).to eq user end @@ -41,7 +52,7 @@ describe Gitlab::Auth::AuthFinders do context 'without CSRF token' do it 'returns nil' do allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(false) - env['warden'] = double("warden", authenticate: user) + set_header('warden', double("warden", authenticate: user)) expect(find_user_from_warden).to be_nil end @@ -51,8 +62,8 @@ describe Gitlab::Auth::AuthFinders do describe '#find_user_from_feed_token' do context 'when the request format is atom' do before do - env['SCRIPT_NAME'] = 'url.atom' - env['HTTP_ACCEPT'] = 'application/atom+xml' + set_header('SCRIPT_NAME', 'url.atom') + set_header('HTTP_ACCEPT', 'application/atom+xml') end context 'when feed_token param is provided' do @@ -94,7 +105,7 @@ describe Gitlab::Auth::AuthFinders do context 'when the request format is not atom' do it 'returns nil' do - env['SCRIPT_NAME'] = 'json' + set_header('SCRIPT_NAME', 'json') set_param(:feed_token, user.feed_token) @@ -104,7 +115,7 @@ describe Gitlab::Auth::AuthFinders do context 'when the request format is empty' do it 'the method call does not modify the original value' do - env['SCRIPT_NAME'] = 'url.atom' + set_header('SCRIPT_NAME', 'url.atom') env.delete('action_dispatch.request.formats') @@ -118,7 +129,7 @@ describe Gitlab::Auth::AuthFinders do describe '#find_user_from_static_object_token' do shared_examples 'static object request' do before do - env['SCRIPT_NAME'] = path + set_header('SCRIPT_NAME', path) end context 'when token header param is present' do @@ -174,7 +185,7 @@ describe Gitlab::Auth::AuthFinders do context 'when request format is not archive nor blob' do before do - env['script_name'] = 'url' + set_header('script_name', 'url') end it 'returns nil' do @@ -183,11 +194,82 @@ describe Gitlab::Auth::AuthFinders do end end + describe '#deploy_token_from_request' do + let_it_be(:deploy_token) { create(:deploy_token) } + let_it_be(:route_authentication_setting) { { deploy_token_allowed: true } } + + subject { deploy_token_from_request } + + it { is_expected.to be_nil } + + shared_examples 'an unauthenticated route' do + context 'when route is not allowed to use deploy_tokens' do + let(:route_authentication_setting) { { deploy_token_allowed: false } } + + it { is_expected.to be_nil } + end + end + + context 'with deploy token headers' do + before do + set_header(described_class::DEPLOY_TOKEN_HEADER, deploy_token.token) + end + + it { is_expected.to eq deploy_token } + + it_behaves_like 'an unauthenticated route' + + context 'with incorrect token' do + before do + set_header(described_class::DEPLOY_TOKEN_HEADER, 'invalid_token') + end + + it { is_expected.to be_nil } + end + end + + context 'with oauth headers' do + before do + set_header('HTTP_AUTHORIZATION', "Bearer #{deploy_token.token}") + end + + it { is_expected.to eq deploy_token } + + it_behaves_like 'an unauthenticated route' + + context 'with invalid token' do + before do + set_header('HTTP_AUTHORIZATION', "Bearer invalid_token") + end + + it { is_expected.to be_nil } + end + end + + context 'with basic auth headers' do + before do + set_basic_auth_header(deploy_token.username, deploy_token.token) + end + + it { is_expected.to eq deploy_token } + + it_behaves_like 'an unauthenticated route' + + context 'with incorrect token' do + before do + set_basic_auth_header(deploy_token.username, 'invalid') + end + + it { is_expected.to be_nil } + end + end + end + describe '#find_user_from_access_token' do let(:personal_access_token) { create(:personal_access_token, user: user) } before do - env['SCRIPT_NAME'] = 'url.atom' + set_header('SCRIPT_NAME', 'url.atom') end it 'returns nil if no access_token present' do @@ -196,13 +278,13 @@ describe Gitlab::Auth::AuthFinders do context 'when validate_access_token! returns valid' do it 'returns user' do - env[described_class::PRIVATE_TOKEN_HEADER] = personal_access_token.token + set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token) expect(find_user_from_access_token).to eq user end it 'returns exception if token has no user' do - env[described_class::PRIVATE_TOKEN_HEADER] = personal_access_token.token + set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token) allow_any_instance_of(PersonalAccessToken).to receive(:user).and_return(nil) expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) @@ -211,7 +293,7 @@ describe Gitlab::Auth::AuthFinders do context 'with OAuth headers' do it 'returns user' do - env['HTTP_AUTHORIZATION'] = "Bearer #{personal_access_token.token}" + set_header('HTTP_AUTHORIZATION', "Bearer #{personal_access_token.token}") expect(find_user_from_access_token).to eq user end @@ -228,7 +310,7 @@ describe Gitlab::Auth::AuthFinders do let(:personal_access_token) { create(:personal_access_token, user: user) } before do - env[described_class::PRIVATE_TOKEN_HEADER] = personal_access_token.token + set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token) end it 'returns exception if token has no user' do @@ -252,19 +334,19 @@ describe Gitlab::Auth::AuthFinders do end it 'returns the user for RSS requests' do - env['SCRIPT_NAME'] = 'url.atom' + set_header('SCRIPT_NAME', 'url.atom') expect(find_user_from_web_access_token(:rss)).to eq(user) end it 'returns the user for ICS requests' do - env['SCRIPT_NAME'] = 'url.ics' + set_header('SCRIPT_NAME', 'url.ics') expect(find_user_from_web_access_token(:ics)).to eq(user) end it 'returns the user for API requests' do - env['SCRIPT_NAME'] = '/api/endpoint' + set_header('SCRIPT_NAME', '/api/endpoint') expect(find_user_from_web_access_token(:api)).to eq(user) end @@ -274,12 +356,12 @@ describe Gitlab::Auth::AuthFinders do let(:personal_access_token) { create(:personal_access_token, user: user) } before do - env['SCRIPT_NAME'] = 'url.atom' + set_header('SCRIPT_NAME', 'url.atom') end context 'passed as header' do it 'returns token if valid personal_access_token' do - env[described_class::PRIVATE_TOKEN_HEADER] = personal_access_token.token + set_header(described_class::PRIVATE_TOKEN_HEADER, personal_access_token.token) expect(find_personal_access_token).to eq personal_access_token end @@ -298,7 +380,7 @@ describe Gitlab::Auth::AuthFinders do end it 'returns exception if invalid personal_access_token' do - env[described_class::PRIVATE_TOKEN_HEADER] = 'invalid_token' + set_header(described_class::PRIVATE_TOKEN_HEADER, 'invalid_token') expect { find_personal_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) end @@ -310,7 +392,7 @@ describe Gitlab::Auth::AuthFinders do context 'passed as header' do it 'returns token if valid oauth_access_token' do - env['HTTP_AUTHORIZATION'] = "Bearer #{token.token}" + set_header('HTTP_AUTHORIZATION', "Bearer #{token.token}") expect(find_oauth_access_token.token).to eq token.token end @@ -329,7 +411,7 @@ describe Gitlab::Auth::AuthFinders do end it 'returns exception if invalid oauth_access_token' do - env['HTTP_AUTHORIZATION'] = "Bearer invalid_token" + set_header('HTTP_AUTHORIZATION', "Bearer invalid_token") expect { find_oauth_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) end @@ -337,7 +419,7 @@ describe Gitlab::Auth::AuthFinders do describe '#find_personal_access_token_from_http_basic_auth' do def auth_header_with(token) - env['HTTP_AUTHORIZATION'] = ActionController::HttpAuthentication::Basic.encode_credentials('username', token) + set_basic_auth_header('username', token) end context 'access token is valid' do @@ -384,14 +466,6 @@ describe Gitlab::Auth::AuthFinders do end describe '#find_user_from_basic_auth_job' do - def basic_http_auth(username, password) - ActionController::HttpAuthentication::Basic.encode_credentials(username, password) - end - - def set_auth(username, password) - env['HTTP_AUTHORIZATION'] = basic_http_auth(username, password) - end - subject { find_user_from_basic_auth_job } context 'when the request does not have AUTHORIZATION header' do @@ -400,25 +474,25 @@ describe Gitlab::Auth::AuthFinders do context 'with wrong credentials' do it 'returns nil without user and password' do - set_auth(nil, nil) + set_basic_auth_header(nil, nil) is_expected.to be_nil end it 'returns nil without password' do - set_auth('some-user', nil) + set_basic_auth_header('some-user', nil) is_expected.to be_nil end it 'returns nil without user' do - set_auth(nil, 'password') + set_basic_auth_header(nil, 'password') is_expected.to be_nil end it 'returns nil without CI username' do - set_auth('user', 'password') + set_basic_auth_header('user', 'password') is_expected.to be_nil end @@ -430,19 +504,19 @@ describe Gitlab::Auth::AuthFinders do let(:build) { create(:ci_build, user: user) } it 'returns nil without password' do - set_auth(username, nil) + set_basic_auth_header(username, nil) is_expected.to be_nil end it 'returns user with valid token' do - set_auth(username, build.token) + set_basic_auth_header(username, build.token) is_expected.to eq user end it 'raises error with invalid token' do - set_auth(username, 'token') + set_basic_auth_header(username, 'token') expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError) end @@ -502,20 +576,20 @@ describe Gitlab::Auth::AuthFinders do context 'when the job token is in the headers' do it 'returns the user if valid job token' do - env[described_class::JOB_TOKEN_HEADER] = job.token + set_header(described_class::JOB_TOKEN_HEADER, job.token) is_expected.to eq(user) expect(@current_authenticated_job).to eq(job) end it 'returns nil without job token' do - env[described_class::JOB_TOKEN_HEADER] = '' + set_header(described_class::JOB_TOKEN_HEADER, '') is_expected.to be_nil end it 'returns exception if invalid job token' do - env[described_class::JOB_TOKEN_HEADER] = 'invalid token' + set_header(described_class::JOB_TOKEN_HEADER, 'invalid token') expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError) end @@ -524,7 +598,7 @@ describe Gitlab::Auth::AuthFinders do let(:route_authentication_setting) { { job_token_allowed: false } } it 'sets current_user to nil' do - env[described_class::JOB_TOKEN_HEADER] = job.token + set_header(described_class::JOB_TOKEN_HEADER, job.token) allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(true) @@ -586,7 +660,7 @@ describe Gitlab::Auth::AuthFinders do context 'with API requests' do before do - env['SCRIPT_NAME'] = '/api/endpoint' + set_header('SCRIPT_NAME', '/api/endpoint') end it 'returns the runner if token is valid' do @@ -614,7 +688,7 @@ describe Gitlab::Auth::AuthFinders do context 'without API requests' do before do - env['SCRIPT_NAME'] = 'url.ics' + set_header('SCRIPT_NAME', 'url.ics') end it 'returns nil if token is valid' do diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb index f46f9d76a1e..8b0d4d786cd 100644 --- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb @@ -63,7 +63,7 @@ describe Gitlab::Auth::OAuth::Provider do context 'for an OmniAuth provider' do before do provider = OpenStruct.new( - name: 'google', + name: 'google_oauth2', app_id: 'asd123', app_secret: 'asd123' ) @@ -71,8 +71,16 @@ describe Gitlab::Auth::OAuth::Provider do end context 'when the provider exists' do + subject { described_class.config_for('google_oauth2') } + it 'returns the config' do - expect(described_class.config_for('google')).to be_a(OpenStruct) + expect(subject).to be_a(OpenStruct) + end + + it 'merges defaults with the given configuration' do + defaults = Gitlab::OmniauthInitializer.default_arguments_for('google_oauth2').deep_stringify_keys + + expect(subject['args']).to include(defaults) end end diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index a0a8767637e..870f02b6933 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -715,6 +715,14 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do end end + describe ".resource_bot_scopes" do + subject { described_class.resource_bot_scopes } + + it { is_expected.to include(*described_class::API_SCOPES - [:read_user]) } + it { is_expected.to include(*described_class::REPOSITORY_SCOPES) } + it { is_expected.to include(*described_class.registry_scopes) } + end + private def expect_results_with_abilities(personal_access_token, abilities, success = true) diff --git a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb new file mode 100644 index 00000000000..34ac70071bb --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20200312134637 do + let(:environments) { table(:environments) } + let(:merge_requests) { table(:merge_requests) } + let(:deployments) { table(:deployments) } + let(:deployment_merge_requests) { table(:deployment_merge_requests) } + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + + subject(:migration) { described_class.new } + + it 'correctly backfills environment_id column' do + namespace = namespaces.create!(name: 'foo', path: 'foo') + project = projects.create!(namespace_id: namespace.id) + + production = environments.create!(project_id: project.id, name: 'production', slug: 'production') + staging = environments.create!(project_id: project.id, name: 'staging', slug: 'staging') + + mr = merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) + + deployment1 = deployments.create!(environment_id: staging.id, iid: 1, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) + deployment2 = deployments.create!(environment_id: production.id, iid: 2, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) + deployment3 = deployments.create!(environment_id: production.id, iid: 3, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) + + # mr is tracked twice in production through deployment2 and deployment3 + deployment_merge_requests.create!(deployment_id: deployment1.id, merge_request_id: mr.id) + deployment_merge_requests.create!(deployment_id: deployment2.id, merge_request_id: mr.id) + deployment_merge_requests.create!(deployment_id: deployment3.id, merge_request_id: mr.id) + + expect(deployment_merge_requests.where(environment_id: nil).count).to eq(3) + + migration.backfill_range(1, mr.id) + + expect(deployment_merge_requests.where(environment_id: nil).count).to be_zero + expect(deployment_merge_requests.count).to eq(2) + + production_deployments = deployment_merge_requests.where(environment_id: production.id) + expect(production_deployments.count).to eq(1) + expect(production_deployments.first.deployment_id).to eq(deployment2.id) + + expect(deployment_merge_requests.where(environment_id: staging.id).count).to eq(1) + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb index 08d3b7bec6a..27ae60eb278 100644 --- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb @@ -2,13 +2,31 @@ require 'spec_helper' -describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do +describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_04_20_094444 do let(:gitlab_shell) { Gitlab::Shell.new } let(:users) { table(:users) } let(:snippets) { table(:snippets) } let(:snippet_repositories) { table(:snippet_repositories) } - let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') } + let(:user_state) { 'active' } + let(:ghost) { false } + let(:user_type) { nil } + let(:user_name) { 'Test' } + + let!(:user) do + users.create(id: 1, + email: 'user@example.com', + projects_limit: 10, + username: 'test', + name: user_name, + state: user_state, + ghost: ghost, + last_activity_on: 1.minute.ago, + user_type: user_type, + confirmed_at: 1.day.ago) + end + + let(:migration_bot) { User.migration_bot } let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } @@ -53,15 +71,52 @@ describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, s end end - shared_examples 'commits the file to the repository' do + shared_examples 'migration_bot user commits files' do it do subject - blob = blob_at(snippet, file_name) + last_commit = raw_repository(snippet).commit - aggregate_failures do - expect(blob).to be - expect(blob.data).to eq content + expect(last_commit.author_name).to eq migration_bot.name + expect(last_commit.author_email).to eq migration_bot.email + end + end + + shared_examples 'commits the file to the repository' do + context 'when author can update snippet and use git' do + it 'creates the repository and commit the file' do + subject + + blob = blob_at(snippet, file_name) + last_commit = raw_repository(snippet).commit + + aggregate_failures do + expect(blob).to be + expect(blob.data).to eq content + expect(last_commit.author_name).to eq user.name + expect(last_commit.author_email).to eq user.email + end + end + end + + context 'when author cannot update snippet or use git' do + context 'when user is blocked' do + let(:user_state) { 'blocked' } + + it_behaves_like 'migration_bot user commits files' + end + + context 'when user is deactivated' do + let(:user_state) { 'deactivated' } + + it_behaves_like 'migration_bot user commits files' + end + + context 'when user is a ghost' do + let(:ghost) { true } + let(:user_type) { 'ghost' } + + it_behaves_like 'migration_bot user commits files' end end end @@ -123,6 +178,124 @@ describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, s end end end + + context 'with invalid file names' do + using RSpec::Parameterized::TableSyntax + + where(:invalid_file_name, :converted_file_name) do + 'filename.js // with comment' | 'filename-js-with-comment' + '.git/hooks/pre-commit' | 'git-hooks-pre-commit' + 'https://gitlab.com' | 'https-gitlab-com' + 'html://web.title%mp4/mpg/mpeg.net' | 'html-web-title-mp4-mpg-mpeg-net' + '../../etc/passwd' | 'etc-passwd' + '.' | 'snippetfile1.txt' + end + + with_them do + let!(:snippet_with_invalid_path) { snippets.create(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: invalid_file_name, content: content) } + let!(:snippet_with_valid_path) { snippets.create(id: 5, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } + let(:ids) { [4, 5] } + + after do + raw_repository(snippet_with_invalid_path).remove + raw_repository(snippet_with_valid_path).remove + end + + it 'checks for file path errors when errors are raised' do + expect(service).to receive(:set_file_path_error).once.and_call_original + + subject + end + + it 'converts invalid filenames' do + subject + + expect(blob_at(snippet_with_invalid_path, converted_file_name)).to be + end + + it 'does not convert valid filenames on subsequent migrations' do + subject + + expect(blob_at(snippet_with_valid_path, file_name)).to be + end + end + end + + context 'when snippet content size is higher than the existing limit' do + let(:limit) { 15 } + let(:content) { 'a' * (limit + 1) } + let(:snippet) { snippet_without_repo } + let(:ids) { [snippet.id, snippet.id] } + + before do + allow(Gitlab::CurrentSettings).to receive(:snippet_size_limit).and_return(limit) + end + + it_behaves_like 'migration_bot user commits files' + end + + context 'when user name is invalid' do + let(:user_name) { '.' } + let!(:snippet) { snippets.create(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } + let(:ids) { [4, 4] } + + after do + raw_repository(snippet).remove + end + + it_behaves_like 'migration_bot user commits files' + end + + context 'when both user name and snippet file_name are invalid' do + let(:user_name) { '.' } + let!(:other_user) do + users.create(id: 2, + email: 'user2@example.com', + projects_limit: 10, + username: 'test2', + name: 'Test2', + state: user_state, + ghost: ghost, + last_activity_on: 1.minute.ago, + user_type: user_type, + confirmed_at: 1.day.ago) + end + let!(:invalid_snippet) { snippets.create(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: '.', content: content) } + let!(:snippet) { snippets.create(id: 5, type: 'PersonalSnippet', author_id: other_user.id, file_name: file_name, content: content) } + let(:ids) { [4, 5] } + + after do + raw_repository(snippet).remove + raw_repository(invalid_snippet).remove + end + + it 'updates the file_name only when it is invalid' do + subject + + expect(blob_at(invalid_snippet, 'snippetfile1.txt')).to be + expect(blob_at(snippet, file_name)).to be + end + + it_behaves_like 'migration_bot user commits files' do + let(:snippet) { invalid_snippet } + end + + it 'does not alter the commit author in subsequent migrations' do + subject + + last_commit = raw_repository(snippet).commit + + expect(last_commit.author_name).to eq other_user.name + expect(last_commit.author_email).to eq other_user.email + end + + it "increases the number of retries temporarily from #{described_class::MAX_RETRIES} to #{described_class::MAX_RETRIES + 1}" do + expect(service).to receive(:create_commit).with(Snippet.find(invalid_snippet.id)).exactly(described_class::MAX_RETRIES + 1).times.and_call_original + expect(service).to receive(:create_commit).with(Snippet.find(snippet.id)).once.and_call_original + + subject + end + end end def blob_at(snippet, path) diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb index 7dae28f72a5..4411dca3fd9 100644 --- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb @@ -4,40 +4,45 @@ require 'spec_helper' describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: 20200130145430 do let(:services) { table(:services) } - # we need to define the classes due to encryption - class IssueTrackerData < ApplicationRecord - self.table_name = 'issue_tracker_data' - - def self.encryption_options - { - key: Settings.attr_encrypted_db_key_base_32, - encode: true, - mode: :per_attribute_iv, - algorithm: 'aes-256-gcm' - } + before do + # we need to define the classes due to encryption + issue_tracker_data = Class.new(ApplicationRecord) do + self.table_name = 'issue_tracker_data' + + def self.encryption_options + { + key: Settings.attr_encrypted_db_key_base_32, + encode: true, + mode: :per_attribute_iv, + algorithm: 'aes-256-gcm' + } + end + + attr_encrypted :project_url, encryption_options + attr_encrypted :issues_url, encryption_options + attr_encrypted :new_issue_url, encryption_options end - attr_encrypted :project_url, encryption_options - attr_encrypted :issues_url, encryption_options - attr_encrypted :new_issue_url, encryption_options - end + jira_tracker_data = Class.new(ApplicationRecord) do + self.table_name = 'jira_tracker_data' - class JiraTrackerData < ApplicationRecord - self.table_name = 'jira_tracker_data' + def self.encryption_options + { + key: Settings.attr_encrypted_db_key_base_32, + encode: true, + mode: :per_attribute_iv, + algorithm: 'aes-256-gcm' + } + end - def self.encryption_options - { - key: Settings.attr_encrypted_db_key_base_32, - encode: true, - mode: :per_attribute_iv, - algorithm: 'aes-256-gcm' - } + attr_encrypted :url, encryption_options + attr_encrypted :api_url, encryption_options + attr_encrypted :username, encryption_options + attr_encrypted :password, encryption_options end - attr_encrypted :url, encryption_options - attr_encrypted :api_url, encryption_options - attr_encrypted :username, encryption_options - attr_encrypted :password, encryption_options + stub_const('IssueTrackerData', issue_tracker_data) + stub_const('JiraTrackerData', jira_tracker_data) end let(:url) { 'http://base-url.tracker.com' } @@ -90,7 +95,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'with jira service' do + context 'with Jira service' do let!(:service) do services.create(id: 10, type: 'JiraService', title: nil, properties: jira_properties.to_json, category: 'issue_tracker') end @@ -202,7 +207,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'with jira service which has data fields record inconsistent with properties field' do + context 'with Jira service which has data fields record inconsistent with properties field' do let!(:service) do services.create(id: 16, type: 'CustomIssueTrackerService', description: 'Existing description', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service| JiraTrackerData.create!(service_id: service.id, url: 'http://other_jira_url') @@ -241,7 +246,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'jira service with empty properties' do + context 'Jira service with empty properties' do let!(:service) do services.create(id: 18, type: 'JiraService', properties: '', category: 'issue_tracker') end @@ -253,7 +258,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'jira service with nil properties' do + context 'Jira service with nil properties' do let!(:service) do services.create(id: 18, type: 'JiraService', properties: nil, category: 'issue_tracker') end @@ -265,7 +270,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'jira service with invalid properties' do + context 'Jira service with invalid properties' do let!(:service) do services.create(id: 18, type: 'JiraService', properties: 'invalid data', category: 'issue_tracker') end @@ -277,7 +282,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: end end - context 'with jira service with invalid properties, valid jira service and valid bugzilla service' do + context 'with Jira service with invalid properties, valid Jira service and valid bugzilla service' do let!(:jira_service_invalid) do services.create(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker') end diff --git a/spec/lib/gitlab/chat/responder/mattermost_spec.rb b/spec/lib/gitlab/chat/responder/mattermost_spec.rb new file mode 100644 index 00000000000..f3480dfef06 --- /dev/null +++ b/spec/lib/gitlab/chat/responder/mattermost_spec.rb @@ -0,0 +1,117 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Chat::Responder::Mattermost do + let(:chat_name) { create(:chat_name, chat_id: 'U123') } + + let(:pipeline) do + pipeline = create(:ci_pipeline) + + pipeline.create_chat_data!( + response_url: 'http://example.com', + chat_name_id: chat_name.id + ) + + pipeline + end + + let(:build) { create(:ci_build, pipeline: pipeline) } + let(:responder) { described_class.new(build) } + + describe '#send_response' do + it 'sends a response back to Slack' do + expect(Gitlab::HTTP).to receive(:post).with( + 'http://example.com', + { headers: { 'Content-Type': 'application/json' }, body: 'hello'.to_json } + ) + + responder.send_response('hello') + end + end + + describe '#success' do + it 'returns the output for a successful build' do + expect(responder) + .to receive(:send_response) + .with( + hash_including( + response_type: :in_channel, + attachments: array_including( + a_hash_including( + text: /#{pipeline.chat_data.chat_name.user.name}.*completed successfully/, + fields: array_including( + a_hash_including(value: /##{build.id}/), + a_hash_including(value: build.name), + a_hash_including(value: "```shell\nscript output\n```") + ) + ) + ) + ) + ) + + responder.success('script output') + end + + it 'limits the output to a fixed size' do + expect(responder) + .to receive(:send_response) + .with( + hash_including( + response_type: :in_channel, + attachments: array_including( + a_hash_including( + fields: array_including( + a_hash_including(value: /The output is too large/) + ) + ) + ) + ) + ) + + responder.success('a' * 4000) + end + + it 'does not send a response if the output is empty' do + expect(responder).not_to receive(:send_response) + + responder.success('') + end + end + + describe '#failure' do + it 'returns the output for a failed build' do + expect(responder) + .to receive(:send_response) + .with( + hash_including( + response_type: :in_channel, + attachments: array_including( + a_hash_including( + text: /#{pipeline.chat_data.chat_name.user.name}.*failed/, + fields: array_including( + a_hash_including(value: /##{build.id}/), + a_hash_including(value: build.name) + ) + ) + ) + ) + ) + + responder.failure + end + end + + describe '#scheduled_output' do + it 'returns the output for a scheduled build' do + output = responder.scheduled_output + + expect(output).to match( + hash_including( + response_type: :ephemeral, + text: /##{build.id}/ + ) + ) + end + end +end diff --git a/spec/lib/gitlab/checks/push_file_count_check_spec.rb b/spec/lib/gitlab/checks/push_file_count_check_spec.rb index 58ba7d579a3..e05102a9ce8 100644 --- a/spec/lib/gitlab/checks/push_file_count_check_spec.rb +++ b/spec/lib/gitlab/checks/push_file_count_check_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::Checks::PushFileCountCheck do let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT } let(:logger) { Gitlab::Checks::TimedLogger.new(timeout: timeout) } - subject { described_class.new(changes, repository: snippet.repository, limit: 1, logger: logger) } + subject { described_class.new(changes, repository: snippet.repository, limit: 2, logger: logger) } describe '#validate!' do using RSpec::Parameterized::TableSyntax @@ -31,7 +31,7 @@ describe Gitlab::Checks::PushFileCountCheck do where(:old, :new, :valid, :message) do 'single-file' | 'edit-file' | true | nil - 'single-file' | 'multiple-files' | false | 'The repository can contain at most 1 file(s).' + 'single-file' | 'multiple-files' | false | 'The repository can contain at most 2 file(s).' 'single-file' | 'no-files' | false | 'The repository must contain at least 1 file.' 'edit-file' | 'rename-and-edit-file' | true | nil end diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb index 513a9b8f2b4..8cfd07df777 100644 --- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb @@ -123,25 +123,53 @@ describe Gitlab::Ci::Config::Entry::Artifacts do end end end + end - context 'when feature flag :ci_expose_arbitrary_artifacts_in_mr is disabled' do + describe 'excluded artifacts' do + context 'when configuration is valid and the feature is enabled' do before do - stub_feature_flags(ci_expose_arbitrary_artifacts_in_mr: false) + stub_feature_flags(ci_artifacts_exclude: true) end - context 'when syntax is correct' do - let(:config) { { expose_as: 'Test results', paths: ['test.txt'] } } + context 'when configuration is valid' do + let(:config) { { untracked: true, exclude: ['some/directory/'] } } - it 'is valid' do - expect(entry.errors).to be_empty + it 'correctly parses the configuration' do + expect(entry).to be_valid + expect(entry.value).to eq config end end - context 'when syntax for :expose_as is incorrect' do - let(:config) { { paths: %w[results.txt], expose_as: '' } } + context 'when configuration is not valid' do + let(:config) { { untracked: true, exclude: 1234 } } + + it 'returns an error' do + expect(entry).not_to be_valid + expect(entry.errors) + .to include 'artifacts exclude should be an array of strings' + end + end + end + + context 'when artifacts/exclude feature is disabled' do + before do + stub_feature_flags(ci_artifacts_exclude: false) + end + + context 'when configuration has been provided' do + let(:config) { { untracked: true, exclude: ['some/directory/'] } } + + it 'returns an error' do + expect(entry).not_to be_valid + expect(entry.errors).to include 'artifacts exclude feature is disabled' + end + end + + context 'when configuration is not present' do + let(:config) { { untracked: true } } - it 'is valid' do - expect(entry.errors).to be_empty + it 'is a valid configuration' do + expect(entry).to be_valid end end end diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb index 9bba3eb2b77..8c6c91d919e 100644 --- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb @@ -47,6 +47,8 @@ describe Gitlab::Ci::Config::Entry::Reports do :dotenv | 'build.dotenv' :cobertura | 'cobertura-coverage.xml' :terraform | 'tfplan.json' + :accessibility | 'gl-accessibility.json' + :cluster_applications | 'gl-cluster-applications.json' end with_them do diff --git a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb index 752c3f59a95..dfd9807583c 100644 --- a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb @@ -114,19 +114,6 @@ describe Gitlab::Ci::Config::Entry::Trigger do .to match /config contains unknown keys: branch/ end end - - context 'when feature flag is off' do - before do - stub_feature_flags(ci_parent_child_pipeline: false) - end - - let(:config) { { include: 'path/to/config.yml' } } - - it 'is returns an error if include is used' do - expect(subject.errors.first) - .to match /config must specify project/ - end - end end context 'when config contains unknown keys' do diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb index 385df72fa41..8f9f3d7fa37 100644 --- a/spec/lib/gitlab/ci/cron_parser_spec.rb +++ b/spec/lib/gitlab/ci/cron_parser_spec.rb @@ -7,198 +7,240 @@ describe Gitlab::Ci::CronParser do it { is_expected.to be > Time.now } end - describe '#next_time_from' do - subject { described_class.new(cron, cron_timezone).next_time_from(Time.now) } + shared_examples_for "returns time in the past" do + it { is_expected.to be < Time.now } + end - context 'when cron and cron_timezone are valid' do - context 'when specific time' do - let(:cron) { '3 4 5 6 *' } - let(:cron_timezone) { 'UTC' } + shared_examples_for 'when cron and cron_timezone are valid' do |returns_time_for_epoch| + context 'when specific time' do + let(:cron) { '3 4 5 6 *' } + let(:cron_timezone) { 'UTC' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - it 'returns exact time' do - expect(subject.min).to eq(3) - expect(subject.hour).to eq(4) - expect(subject.day).to eq(5) - expect(subject.month).to eq(6) - end + it 'returns exact time' do + expect(subject.min).to eq(3) + expect(subject.hour).to eq(4) + expect(subject.day).to eq(5) + expect(subject.month).to eq(6) end + end - context 'when specific day of week' do - let(:cron) { '* * * * 0' } - let(:cron_timezone) { 'UTC' } + context 'when specific day of week' do + let(:cron) { '* * * * 0' } + let(:cron_timezone) { 'UTC' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - it 'returns exact day of week' do - expect(subject.wday).to eq(0) - end + it 'returns exact day of week' do + expect(subject.wday).to eq(0) end + end - context 'when slash used' do - let(:cron) { '*/10 */6 */10 */10 *' } - let(:cron_timezone) { 'UTC' } + context 'when slash used' do + let(:cron) { '*/10 */6 */10 */10 *' } + let(:cron_timezone) { 'UTC' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - it 'returns specific time' do - expect(subject.min).to be_in([0, 10, 20, 30, 40, 50]) - expect(subject.hour).to be_in([0, 6, 12, 18]) - expect(subject.day).to be_in([1, 11, 21, 31]) - expect(subject.month).to be_in([1, 11]) - end + it 'returns specific time' do + expect(subject.min).to be_in([0, 10, 20, 30, 40, 50]) + expect(subject.hour).to be_in([0, 6, 12, 18]) + expect(subject.day).to be_in([1, 11, 21, 31]) + expect(subject.month).to be_in([1, 11]) end + end - context 'when range used' do - let(:cron) { '0,20,40 * 1-5 * *' } - let(:cron_timezone) { 'UTC' } + context 'when range used' do + let(:cron) { '0,20,40 * 1-5 * *' } + let(:cron_timezone) { 'UTC' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - it 'returns specific time' do - expect(subject.min).to be_in([0, 20, 40]) - expect(subject.day).to be_in((1..5).to_a) - end + it 'returns specific time' do + expect(subject.min).to be_in([0, 20, 40]) + expect(subject.day).to be_in((1..5).to_a) end + end - context 'when cron_timezone is TZInfo format' do - before do - allow(Time).to receive(:zone) - .and_return(ActiveSupport::TimeZone['UTC']) - end + context 'when cron_timezone is TZInfo format' do + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone['UTC']) + end - let(:hour_in_utc) do - ActiveSupport::TimeZone[cron_timezone] - .now.change(hour: 0).in_time_zone('UTC').hour - end + let(:hour_in_utc) do + ActiveSupport::TimeZone[cron_timezone] + .now.change(hour: 0).in_time_zone('UTC').hour + end - context 'when cron_timezone is US/Pacific' do - let(:cron) { '* 0 * * *' } - let(:cron_timezone) { 'US/Pacific' } + context 'when cron_timezone is US/Pacific' do + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'US/Pacific' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - context 'when PST (Pacific Standard Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 1, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end + context 'when PST (Pacific Standard Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 1, 1)) do + expect(subject.hour).to eq(hour_in_utc) end end + end - context 'when PDT (Pacific Daylight Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 6, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end + context 'when PDT (Pacific Daylight Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 6, 1)) do + expect(subject.hour).to eq(hour_in_utc) end end end end + end - context 'when cron_timezone is ActiveSupport::TimeZone format' do - before do - allow(Time).to receive(:zone) - .and_return(ActiveSupport::TimeZone['UTC']) - end + context 'when cron_timezone is ActiveSupport::TimeZone format' do + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone['UTC']) + end - let(:hour_in_utc) do - ActiveSupport::TimeZone[cron_timezone] - .now.change(hour: 0).in_time_zone('UTC').hour - end + let(:hour_in_utc) do + ActiveSupport::TimeZone[cron_timezone] + .now.change(hour: 0).in_time_zone('UTC').hour + end - context 'when cron_timezone is Berlin' do - let(:cron) { '* 0 * * *' } - let(:cron_timezone) { 'Berlin' } + context 'when cron_timezone is Berlin' do + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'Berlin' } - it_behaves_like "returns time in the future" + it_behaves_like returns_time_for_epoch - context 'when CET (Central European Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 1, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end + context 'when CET (Central European Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 1, 1)) do + expect(subject.hour).to eq(hour_in_utc) end end + end - context 'when CEST (Central European Summer Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 6, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end + context 'when CEST (Central European Summer Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 6, 1)) do + expect(subject.hour).to eq(hour_in_utc) end end end + end + end + end - context 'when cron_timezone is Eastern Time (US & Canada)' do - let(:cron) { '* 0 * * *' } - let(:cron_timezone) { 'Eastern Time (US & Canada)' } + shared_examples_for 'when cron_timezone is Eastern Time (US & Canada)' do |returns_time_for_epoch, year| + let(:cron) { '* 0 * * *' } + let(:cron_timezone) { 'Eastern Time (US & Canada)' } - it_behaves_like "returns time in the future" + before do + allow(Time).to receive(:zone) + .and_return(ActiveSupport::TimeZone['UTC']) + end - context 'when EST (Eastern Standard Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 1, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end - end - end + let(:hour_in_utc) do + ActiveSupport::TimeZone[cron_timezone] + .now.change(hour: 0).in_time_zone('UTC').hour + end - context 'when EDT (Eastern Daylight Time)' do - it 'converts time in server time zone' do - Timecop.freeze(Time.utc(2017, 6, 1)) do - expect(subject.hour).to eq(hour_in_utc) - end - end - end + it_behaves_like returns_time_for_epoch - context 'when time crosses a Daylight Savings boundary' do - let(:cron) { '* 0 1 12 *'} - - # Note this previously only failed if the time zone is set - # to a zone that observes Daylight Savings - # (e.g. America/Chicago) at the start of the test. Stubbing - # TZ doesn't appear to be enough. - it 'generates day without TZInfo::AmbiguousTime error' do - Timecop.freeze(Time.utc(2020, 1, 1)) do - expect(subject.year).to eq(2020) - expect(subject.month).to eq(12) - expect(subject.day).to eq(1) - end - end - end + context 'when EST (Eastern Standard Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 1, 1)) do + expect(subject.hour).to eq(hour_in_utc) end end end - context 'when cron and cron_timezone are invalid' do - let(:cron) { 'invalid_cron' } - let(:cron_timezone) { 'invalid_cron_timezone' } + context 'when EDT (Eastern Daylight Time)' do + it 'converts time in server time zone' do + Timecop.freeze(Time.utc(2017, 6, 1)) do + expect(subject.hour).to eq(hour_in_utc) + end + end + end - it { is_expected.to be_nil } + context 'when time crosses a Daylight Savings boundary' do + let(:cron) { '* 0 1 12 *'} + + # Note this previously only failed if the time zone is set + # to a zone that observes Daylight Savings + # (e.g. America/Chicago) at the start of the test. Stubbing + # TZ doesn't appear to be enough. + it 'generates day without TZInfo::AmbiguousTime error' do + Timecop.freeze(Time.utc(2020, 1, 1)) do + expect(subject.year).to eq(year) + expect(subject.month).to eq(12) + expect(subject.day).to eq(1) + end + end end + end - context 'when cron syntax is quoted' do - let(:cron) { "'0 * * * *'" } - let(:cron_timezone) { 'UTC' } + shared_examples_for 'when cron and cron_timezone are invalid' do + let(:cron) { 'invalid_cron' } + let(:cron_timezone) { 'invalid_cron_timezone' } - it { expect(subject).to be_nil } - end + it { is_expected.to be_nil } + end - context 'when cron syntax is rufus-scheduler syntax' do - let(:cron) { 'every 3h' } - let(:cron_timezone) { 'UTC' } + shared_examples_for 'when cron syntax is quoted' do + let(:cron) { "'0 * * * *'" } + let(:cron_timezone) { 'UTC' } - it { expect(subject).to be_nil } - end + it { expect(subject).to be_nil } + end - context 'when cron is scheduled to a non existent day' do - let(:cron) { '0 12 31 2 *' } - let(:cron_timezone) { 'UTC' } + shared_examples_for 'when cron syntax is rufus-scheduler syntax' do + let(:cron) { 'every 3h' } + let(:cron_timezone) { 'UTC' } - it { expect(subject).to be_nil } - end + it { expect(subject).to be_nil } + end + + shared_examples_for 'when cron is scheduled to a non existent day' do + let(:cron) { '0 12 31 2 *' } + let(:cron_timezone) { 'UTC' } + + it { expect(subject).to be_nil } + end + + describe '#next_time_from' do + subject { described_class.new(cron, cron_timezone).next_time_from(Time.now) } + + it_behaves_like 'when cron and cron_timezone are valid', 'returns time in the future' + + it_behaves_like 'when cron_timezone is Eastern Time (US & Canada)', 'returns time in the future', 2020 + + it_behaves_like 'when cron and cron_timezone are invalid' + + it_behaves_like 'when cron syntax is quoted' + + it_behaves_like 'when cron syntax is rufus-scheduler syntax' + + it_behaves_like 'when cron is scheduled to a non existent day' + end + + describe '#previous_time_from' do + subject { described_class.new(cron, cron_timezone).previous_time_from(Time.now) } + + it_behaves_like 'when cron and cron_timezone are valid', 'returns time in the past' + + it_behaves_like 'when cron_timezone is Eastern Time (US & Canada)', 'returns time in the past', 2019 + + it_behaves_like 'when cron and cron_timezone are invalid' + + it_behaves_like 'when cron syntax is quoted' + + it_behaves_like 'when cron syntax is rufus-scheduler syntax' + + it_behaves_like 'when cron is scheduled to a non existent day' end describe '#cron_valid?' do diff --git a/spec/lib/gitlab/ci/parsers/accessibility/pa11y_spec.rb b/spec/lib/gitlab/ci/parsers/accessibility/pa11y_spec.rb new file mode 100644 index 00000000000..4d87e3b201a --- /dev/null +++ b/spec/lib/gitlab/ci/parsers/accessibility/pa11y_spec.rb @@ -0,0 +1,118 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +describe Gitlab::Ci::Parsers::Accessibility::Pa11y do + describe '#parse!' do + subject { described_class.new.parse!(pa11y, accessibility_report) } + + let(:accessibility_report) { Gitlab::Ci::Reports::AccessibilityReports.new } + + context "when data is pa11y style JSON" do + context "when there are no URLs provided" do + let(:pa11y) do + { + "total": 1, + "passes": 0, + "errors": 0, + "results": { + "": [ + { + "message": "Protocol error (Page.navigate): Cannot navigate to invalid URL" + } + ] + } + }.to_json + end + + it "returns an accessibility report" do + expect { subject }.not_to raise_error + + expect(accessibility_report.errors_count).to eq(0) + expect(accessibility_report.passes_count).to eq(0) + expect(accessibility_report.scans_count).to eq(0) + expect(accessibility_report.urls).to be_empty + expect(accessibility_report.error_message).to eq("Empty URL detected in gl-accessibility.json") + end + end + + context "when there are no errors" do + let(:pa11y) do + { + "total": 1, + "passes": 1, + "errors": 0, + "results": { + "http://pa11y.org/": [] + } + }.to_json + end + + it "returns an accessibility report" do + expect { subject }.not_to raise_error + + expect(accessibility_report.urls['http://pa11y.org/']).to be_empty + expect(accessibility_report.errors_count).to eq(0) + expect(accessibility_report.passes_count).to eq(1) + expect(accessibility_report.scans_count).to eq(1) + end + end + + context "when there are errors" do + let(:pa11y) do + { + "total": 1, + "passes": 0, + "errors": 1, + "results": { + "https://about.gitlab.com/": [ + { + "code": "WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent", + "type": "error", + "typeCode": 1, + "message": "Anchor element found with a valid href attribute, but no link content has been supplied.", + "context": "", + "selector": "#main-nav > div:nth-child(1) > a", + "runner": "htmlcs", + "runnerExtras": {} + } + ] + } + }.to_json + end + + it "returns an accessibility report" do + expect { subject }.not_to raise_error + + expect(accessibility_report.errors_count).to eq(1) + expect(accessibility_report.passes_count).to eq(0) + expect(accessibility_report.scans_count).to eq(1) + expect(accessibility_report.urls['https://about.gitlab.com/']).to be_present + expect(accessibility_report.urls['https://about.gitlab.com/'].first[:code]).to be_present + end + end + end + + context "when data is not a valid JSON string" do + let(:pa11y) do + { + "total": 1, + "passes": 1, + "errors": 0, + "results": { + "http://pa11y.org/": [] + } + } + end + + it "sets error_message" do + expect { subject }.not_to raise_error + + expect(accessibility_report.error_message).to include('Pa11y parsing failed') + expect(accessibility_report.errors_count).to eq(0) + expect(accessibility_report.passes_count).to eq(0) + expect(accessibility_report.scans_count).to eq(0) + end + end + end +end diff --git a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb new file mode 100644 index 00000000000..19cd75e586c --- /dev/null +++ b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Ci::Parsers::Terraform::Tfplan do + describe '#parse!' do + let_it_be(:artifact) { create(:ci_job_artifact, :terraform) } + + let(:reports) { Gitlab::Ci::Reports::TerraformReports.new } + + context 'when data is tfplan.json' do + context 'when there is no data' do + it 'raises an error' do + plan = '{}' + + expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error( + described_class::TfplanParserError + ) + end + end + + context 'when there is data' do + it 'parses JSON and returns a report' do + plan = '{ "create": 0, "update": 1, "delete": 0 }' + + expect { subject.parse!(plan, reports, artifact: artifact) }.not_to raise_error + + expect(reports.plans).to match( + a_hash_including( + 'tfplan.json' => a_hash_including( + 'create' => 0, + 'update' => 1, + 'delete' => 0 + ) + ) + ) + end + end + end + + context 'when data is not tfplan.json' do + it 'raises an error' do + plan = { 'create' => 0, 'update' => 1, 'delete' => 0 }.to_s + + expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error( + described_class::TfplanParserError + ) + end + end + end +end diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb index b4be5a41cd7..7b7ace02bba 100644 --- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb +++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb @@ -215,8 +215,64 @@ describe Gitlab::Ci::Parsers::Test::Junit do context 'when data is not JUnit style XML' do let(:junit) { { testsuite: 'abc' }.to_json } - it 'raises an error' do - expect { subject }.to raise_error(described_class::JunitParserError) + it 'attaches an error to the TestSuite object' do + expect { subject }.not_to raise_error + expect(test_cases).to be_empty + end + end + + context 'when data is malformed JUnit XML' do + let(:junit) do + <<-EOF.strip_heredoc + + + + EOF + end + + it 'attaches an error to the TestSuite object' do + expect { subject }.not_to raise_error + expect(test_suite.suite_error).to eq("JUnit XML parsing failed: 4:1: FATAL: expected '>'") + end + + it 'returns 0 tests cases' do + subject + + expect(test_cases).to be_empty + expect(test_suite.total_count).to eq(0) + expect(test_suite.success_count).to eq(0) + expect(test_suite.error_count).to eq(0) + end + + it 'returns a failure status' do + subject + + expect(test_suite.total_status).to eq(Gitlab::Ci::Reports::TestCase::STATUS_ERROR) + end + end + + context 'when data is not XML' do + let(:junit) { double(:random_trash) } + + it 'attaches an error to the TestSuite object' do + expect { subject }.not_to raise_error + expect(test_suite.suite_error).to eq('JUnit data parsing failed: no implicit conversion of RSpec::Mocks::Double into String') + end + + it 'returns 0 tests cases' do + subject + + expect(test_cases).to be_empty + expect(test_suite.total_count).to eq(0) + expect(test_suite.success_count).to eq(0) + expect(test_suite.error_count).to eq(0) + end + + it 'returns a failure status' do + subject + + expect(test_suite.total_status).to eq(Gitlab::Ci::Reports::TestCase::STATUS_ERROR) end end diff --git a/spec/lib/gitlab/ci/parsers_spec.rb b/spec/lib/gitlab/ci/parsers_spec.rb index 9d6896b3cb4..0a266e7a206 100644 --- a/spec/lib/gitlab/ci/parsers_spec.rb +++ b/spec/lib/gitlab/ci/parsers_spec.rb @@ -22,6 +22,22 @@ describe Gitlab::Ci::Parsers do end end + context 'when file_type is accessibility' do + let(:file_type) { 'accessibility' } + + it 'fabricates the class' do + is_expected.to be_a(described_class::Accessibility::Pa11y) + end + end + + context 'when file_type is terraform' do + let(:file_type) { 'terraform' } + + it 'fabricates the class' do + is_expected.to be_a(described_class::Terraform::Tfplan) + end + end + context 'when file_type does not exist' do let(:file_type) { 'undefined' } diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb index 9033b71b19f..f82e49f9323 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb @@ -5,11 +5,13 @@ require 'spec_helper' describe Gitlab::Ci::Pipeline::Chain::Sequence do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } + let(:pipeline) { build_stubbed(:ci_pipeline) } let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new } let(:first_step) { spy('first step') } let(:second_step) { spy('second step') } let(:sequence) { [first_step, second_step] } + let(:histogram) { spy('prometheus metric') } subject do described_class.new(pipeline, command, sequence) @@ -52,5 +54,13 @@ describe Gitlab::Ci::Pipeline::Chain::Sequence do it 'returns a pipeline object' do expect(subject.build!).to eq pipeline end + + it 'adds sequence duration to duration histogram' do + allow(command).to receive(:duration_histogram).and_return(histogram) + + subject.build! + + expect(histogram).to have_received(:observe) + end end end diff --git a/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb new file mode 100644 index 00000000000..31a330f46b1 --- /dev/null +++ b/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb @@ -0,0 +1,270 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Ci::Reports::AccessibilityReportsComparer do + let(:comparer) { described_class.new(base_reports, head_reports) } + let(:base_reports) { Gitlab::Ci::Reports::AccessibilityReports.new } + let(:head_reports) { Gitlab::Ci::Reports::AccessibilityReports.new } + let(:url) { "https://gitlab.com" } + let(:single_error) do + [ + { + "code" => "WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent", + "type" => "error", + "typeCode" => 1, + "message" => "Anchor element found with a valid href attribute, but no link content has been supplied.", + "context" => %{ "#main-nav > div:nth-child(1) > a", + "runner" => "htmlcs", + "runnerExtras" => {} + } + ] + end + let(:different_error) do + [ + { + "code" => "WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail", + "type" => "error", + "typeCode" => 1, + "message" => "This element has insufficient contrast at this conformance level.", + "context" => %{Product}, + "selector" => "#main-nav > div:nth-child(2) > ul > li:nth-child(1) > a", + "runner" => "htmlcs", + "runnerExtras" => {} + } + ] + end + + describe '#status' do + subject { comparer.status } + + context 'when head report has an error' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns status failed' do + expect(subject).to eq(described_class::STATUS_FAILED) + end + end + + context 'when head reports does not have errors' do + before do + head_reports.add_url(url, []) + end + + it 'returns status success' do + expect(subject).to eq(described_class::STATUS_SUCCESS) + end + end + end + + describe '#errors_count' do + subject { comparer.errors_count } + + context 'when head report has an error' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns the number of new errors' do + expect(subject).to eq(1) + end + end + + context 'when head reports does not have an error' do + before do + head_reports.add_url(url, []) + end + + it 'returns the number new errors' do + expect(subject).to eq(0) + end + end + end + + describe '#resolved_count' do + subject { comparer.resolved_count } + + context 'when base reports has an error and head has a different error' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, different_error) + end + + it 'returns the resolved count' do + expect(subject).to eq(1) + end + end + + context 'when base reports has errors head has no errors' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, []) + end + + it 'returns the resolved count' do + expect(subject).to eq(1) + end + end + + context 'when base reports has errors and head has the same error' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, single_error) + end + + it 'returns zero' do + expect(subject).to eq(0) + end + end + + context 'when base reports does not have errors and head has errors' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns the number of resolved errors' do + expect(subject).to eq(0) + end + end + end + + describe '#total_count' do + subject { comparer.total_count } + + context 'when base reports has an error' do + before do + base_reports.add_url(url, single_error) + end + + it 'returns the error count' do + expect(subject).to eq(1) + end + end + + context 'when head report has an error' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns the error count' do + expect(subject).to eq(1) + end + end + + context 'when base report has errors and head report has errors' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, different_error) + end + + it 'returns the error count' do + expect(subject).to eq(2) + end + end + end + + describe '#existing_errors' do + subject { comparer.existing_errors } + + context 'when base report has errors and head has a different error' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, different_error) + end + + it 'returns the existing errors' do + expect(subject.size).to eq(1) + expect(subject.first["code"]).to eq("WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent") + end + end + + context 'when base report does not have errors and head has errors' do + before do + base_reports.add_url(url, []) + head_reports.add_url(url, single_error) + end + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + end + + describe '#new_errors' do + subject { comparer.new_errors } + + context 'when base reports has errors and head has more errors' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, single_error + different_error) + end + + it 'returns new errors between base and head reports' do + expect(subject.size).to eq(1) + expect(subject.first["code"]).to eq("WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail") + end + end + + context 'when base reports has an error and head has no errors' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, []) + end + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when base reports does not have errors and head has errors' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns the new error' do + expect(subject.size).to eq(1) + expect(subject.first["code"]).to eq("WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent") + end + end + end + + describe '#resolved_errors' do + subject { comparer.resolved_errors } + + context 'when base report has errors and head has more errors' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, single_error + different_error) + end + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when base reports has errors and head has a different error' do + before do + base_reports.add_url(url, single_error) + head_reports.add_url(url, different_error) + end + + it 'returns the resolved errors' do + expect(subject.size).to eq(1) + expect(subject.first["code"]).to eq("WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent") + end + end + + context 'when base reports does not have errors and head has errors' do + before do + head_reports.add_url(url, single_error) + end + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + end +end diff --git a/spec/lib/gitlab/ci/reports/accessibility_reports_spec.rb b/spec/lib/gitlab/ci/reports/accessibility_reports_spec.rb new file mode 100644 index 00000000000..0dc13b464b1 --- /dev/null +++ b/spec/lib/gitlab/ci/reports/accessibility_reports_spec.rb @@ -0,0 +1,232 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Ci::Reports::AccessibilityReports do + let(:accessibility_report) { described_class.new } + let(:url) { 'https://gitlab.com' } + let(:data) do + [ + { + "code": "WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent", + "type": "error", + "typeCode": 1, + "message": "Anchor element found with a valid href attribute, but no link content has been supplied.", + "context": %{}, + "selector": "html > body > div:nth-child(9) > div:nth-child(2) > a:nth-child(17)", + "runner": "htmlcs", + "runnerExtras": {} + }, + { + "code": "WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent", + "type": "error", + "typeCode": 1, + "message": "Anchor element found with a valid href attribute, but no link content has been supplied.", + "context": %{ body > div:nth-child(9) > div:nth-child(2) > a:nth-child(18)", + "runner": "htmlcs", + "runnerExtras": {} + } + ] + end + + describe '#scans_count' do + subject { accessibility_report.scans_count } + + context 'when data has errors' do + let(:different_url) { 'https://about.gitlab.com' } + + before do + accessibility_report.add_url(url, data) + accessibility_report.add_url(different_url, data) + end + + it 'returns the scans_count' do + expect(subject).to eq(2) + end + end + + context 'when data has no errors' do + before do + accessibility_report.add_url(url, []) + end + + it 'returns the scans_count' do + expect(subject).to eq(1) + end + end + + context 'when data has no url' do + before do + accessibility_report.add_url("", []) + end + + it 'returns the scans_count' do + expect(subject).to eq(0) + end + end + end + + describe '#passes_count' do + subject { accessibility_report.passes_count } + + context 'when data has errors' do + before do + accessibility_report.add_url(url, data) + end + + it 'returns the passes_count' do + expect(subject).to eq(0) + end + end + + context 'when data has no errors' do + before do + accessibility_report.add_url(url, []) + end + + it 'returns the passes_count' do + expect(subject).to eq(1) + end + end + + context 'when data has no url' do + before do + accessibility_report.add_url("", []) + end + + it 'returns the scans_count' do + expect(subject).to eq(0) + end + end + end + + describe '#errors_count' do + subject { accessibility_report.errors_count } + + context 'when data has errors' do + let(:different_url) { 'https://about.gitlab.com' } + + before do + accessibility_report.add_url(url, data) + accessibility_report.add_url(different_url, data) + end + + it 'returns the errors_count' do + expect(subject).to eq(4) + end + end + + context 'when data has no errors' do + before do + accessibility_report.add_url(url, []) + end + + it 'returns the errors_count' do + expect(subject).to eq(0) + end + end + + context 'when data has no url' do + before do + accessibility_report.add_url("", []) + end + + it 'returns the errors_count' do + expect(subject).to eq(0) + end + end + end + + describe '#add_url' do + subject { accessibility_report.add_url(url, data) } + + context 'when data has errors' do + it 'adds urls and data to accessibility report' do + expect { subject }.not_to raise_error + + expect(accessibility_report.urls.keys).to eq([url]) + expect(accessibility_report.urls.values.flatten.size).to eq(2) + end + end + + context 'when data does not have errors' do + let(:data) { [] } + + it 'adds data to accessibility report' do + expect { subject }.not_to raise_error + + expect(accessibility_report.urls.keys).to eq([url]) + expect(accessibility_report.urls.values.flatten.size).to eq(0) + end + end + + context 'when url does not exist' do + let(:url) { '' } + let(:data) { [{ message: "Protocol error (Page.navigate): Cannot navigate to invalid URL" }] } + + it 'sets error_message and decreases total' do + expect { subject }.not_to raise_error + + expect(accessibility_report.scans_count).to eq(0) + expect(accessibility_report.error_message).to eq('Empty URL detected in gl-accessibility.json') + end + end + end + + describe '#set_error_message' do + let(:set_accessibility_error) { accessibility_report.set_error_message('error') } + + context 'when error is nil' do + it 'returns the error' do + expect(set_accessibility_error).to eq('error') + end + + it 'sets the error' do + set_accessibility_error + + expect(accessibility_report.error_message).to eq('error') + end + end + + context 'when a error has already been set' do + before do + accessibility_report.set_error_message('old error') + end + + it 'overwrites the existing message' do + expect { set_accessibility_error }.to change(accessibility_report, :error_message).from('old error').to('error') + end + end + end + + describe '#all_errors' do + subject { accessibility_report.all_errors } + + context 'when data has errors' do + before do + accessibility_report.add_url(url, data) + end + + it 'returns all errors' do + expect(subject.size).to eq(2) + end + end + + context 'when data has no errors' do + before do + accessibility_report.add_url(url, []) + end + + it 'returns an empty array' do + expect(subject).to eq([]) + end + end + + context 'when accessibility report has no data' do + it 'returns an empty array' do + expect(subject).to eq([]) + end + end + end +end diff --git a/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb new file mode 100644 index 00000000000..061029299ac --- /dev/null +++ b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Ci::Reports::TerraformReports do + it 'initializes plans with and empty hash' do + expect(subject.plans).to eq({}) + end + + describe '#add_plan' do + context 'when providing two unique plans' do + it 'returns two plans' do + subject.add_plan('a/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 }) + subject.add_plan('b/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 }) + + expect(subject.plans).to eq({ + 'a/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 }, + 'b/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 } + }) + end + end + + context 'when providing the same plan twice' do + it 'returns the last added plan' do + subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 0, 'delete' => 0 }) + subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 }) + + expect(subject.plans).to eq({ + 'tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 } + }) + end + end + end +end diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb index c0652288cca..b5883867983 100644 --- a/spec/lib/gitlab/ci/reports/test_case_spec.rb +++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb @@ -62,7 +62,7 @@ describe Gitlab::Ci::Reports::TestCase do end context 'when attachment is present' do - let(:attachment_test_case) { build(:test_case, :with_attachment) } + let(:attachment_test_case) { build(:test_case, :failed_with_attachment) } it "initializes the attachment if present" do expect(attachment_test_case.attachment).to eq("some/path.png") diff --git a/spec/lib/gitlab/ci/reports/test_reports_spec.rb b/spec/lib/gitlab/ci/reports/test_reports_spec.rb index 638acde69eb..e51728496e1 100644 --- a/spec/lib/gitlab/ci/reports/test_reports_spec.rb +++ b/spec/lib/gitlab/ci/reports/test_reports_spec.rb @@ -127,7 +127,7 @@ describe Gitlab::Ci::Reports::TestReports do context 'when test suites contain an attachment' do let(:test_case_succes) { build(:test_case) } - let(:test_case_with_attachment) { build(:test_case, :with_attachment) } + let(:test_case_with_attachment) { build(:test_case, :failed_with_attachment) } before do test_reports.get_suite('rspec').add_test_case(test_case_succes) @@ -141,6 +141,29 @@ describe Gitlab::Ci::Reports::TestReports do end end + describe '#suite_errors' do + subject { test_reports.suite_errors } + + context 'when a suite has normal spec errors or failures' do + before do + test_reports.get_suite('junit').add_test_case(create_test_case_java_success) + test_reports.get_suite('junit').add_test_case(create_test_case_java_failed) + test_reports.get_suite('junit').add_test_case(create_test_case_java_error) + end + + it { is_expected.to be_empty } + end + + context 'when there is an error test case' do + before do + test_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success) + test_reports.get_suite('junit').set_suite_error('Existential parsing error') + end + + it { is_expected.to eq({ 'junit' => 'Existential parsing error' }) } + end + end + Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type| describe "##{status_type}_count" do subject { test_reports.public_send("#{status_type}_count") } diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb index 9d9774afc82..e0b2593353a 100644 --- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb +++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb @@ -101,7 +101,7 @@ describe Gitlab::Ci::Reports::TestSuite do end context 'when test cases contain an attachment' do - let(:test_case_with_attachment) { build(:test_case, :with_attachment)} + let(:test_case_with_attachment) { build(:test_case, :failed_with_attachment)} before do test_suite.add_test_case(test_case_with_attachment) @@ -114,6 +114,31 @@ describe Gitlab::Ci::Reports::TestSuite do end end + describe '#set_suite_error' do + let(:set_suite_error) { test_suite.set_suite_error('message') } + + context 'when @suite_error is nil' do + it 'returns message' do + expect(set_suite_error).to eq('message') + end + + it 'sets the new message' do + set_suite_error + expect(test_suite.suite_error).to eq('message') + end + end + + context 'when a suite_error has already been set' do + before do + test_suite.set_suite_error('old message') + end + + it 'overwrites the existing message' do + expect { set_suite_error }.to change(test_suite, :suite_error).from('old message').to('message') + end + end + end + Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type| describe "##{status_type}" do subject { test_suite.public_send("#{status_type}") } diff --git a/spec/lib/gitlab/ci/templates/Jobs/browser_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/browser_performance_testing_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..54c3500b0a0 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/browser_performance_testing_gitlab_ci_yaml_spec.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Jobs/Browser-Performance-Testing.gitlab-ci.yml' do + subject(:template) do + <<~YAML + stages: + - test + - performance + + include: + - template: 'Jobs/Browser-Performance-Testing.gitlab-ci.yml' + + placeholder: + script: + - keep pipeline validator happy by having a job when stages are intentionally empty + YAML + end + + describe 'the created pipeline' do + let(:user) { create(:admin) } + let(:project) do + create(:project, :repository, variables: [ + build(:ci_variable, key: 'CI_KUBERNETES_ACTIVE', value: 'true') + ]) + end + + let(:default_branch) { 'master' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push) } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template) + + allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + it 'has no errors' do + expect(pipeline.errors).to be_empty + end + + shared_examples_for 'performance job on tag or branch' do + it 'by default' do + expect(build_names).to include('performance') + end + + it 'when PERFORMANCE_DISABLED' do + create(:ci_variable, project: project, key: 'PERFORMANCE_DISABLED', value: '1') + + expect(build_names).not_to include('performance') + end + end + + context 'on master' do + it_behaves_like 'performance job on tag or branch' + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it_behaves_like 'performance job on tag or branch' + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it_behaves_like 'performance job on tag or branch' + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project, user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request) } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..b2a9e3f5cf4 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Jobs/Build.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Build') } + + describe 'the created pipeline' do + let_it_be(:user) { create(:admin) } + let_it_be(:project) { create(:project, :repository) } + + let(:default_branch) { 'master' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push) } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'on master' do + it 'creates the build job' do + expect(build_names).to contain_exactly('build') + end + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it 'creates the build job' do + expect(build_names).to contain_exactly('build') + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it 'creates the build job' do + expect(pipeline).to be_tag + expect(build_names).to contain_exactly('build') + end + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project, user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request) } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..9c5b2fd5099 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Jobs/Code-Quality.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Code-Quality') } + + describe 'the created pipeline' do + let_it_be(:user) { create(:admin) } + let_it_be(:project) { create(:project, :repository) } + + let(:default_branch) { 'master' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push) } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'on master' do + it 'creates the code_quality job' do + expect(build_names).to contain_exactly('code_quality') + end + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it 'creates the code_quality job' do + expect(build_names).to contain_exactly('code_quality') + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it 'creates the code_quality job' do + expect(pipeline).to be_tag + expect(build_names).to contain_exactly('code_quality') + end + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project, user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request) } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + + context 'CODE_QUALITY_DISABLED is set' do + before do + create(:ci_variable, key: 'CODE_QUALITY_DISABLED', value: 'true', project: project) + end + + context 'on master' do + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..a6ae23c85d3 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb @@ -0,0 +1,222 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Jobs/Deploy.gitlab-ci.yml' do + subject(:template) do + <<~YAML + stages: + - test + - review + - staging + - canary + - production + - incremental rollout 10% + - incremental rollout 25% + - incremental rollout 50% + - incremental rollout 100% + - cleanup + + include: + - template: Jobs/Deploy.gitlab-ci.yml + + placeholder: + script: + - echo "Ensure at least one job to keep pipeline validator happy" + YAML + end + + describe 'the created pipeline' do + let(:user) { create(:admin) } + let(:project) { create(:project, :repository) } + + let(:default_branch) { 'master' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push) } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template) + + allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'with no cluster' do + it 'does not create any kubernetes deployment jobs' do + expect(build_names).to eq %w(placeholder) + end + end + + context 'with only a disabled cluster' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp, enabled: false, projects: [project]) } + + it 'does not create any kubernetes deployment jobs' do + expect(build_names).to eq %w(placeholder) + end + end + + context 'with an active cluster' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } + + context 'on master' do + it 'by default' do + expect(build_names).to include('production') + expect(build_names).not_to include('review') + end + + it 'when CANARY_ENABLED' do + create(:ci_variable, project: project, key: 'CANARY_ENABLED', value: 'true') + + expect(build_names).to include('production_manual') + expect(build_names).to include('canary') + expect(build_names).not_to include('production') + end + + it 'when STAGING_ENABLED' do + create(:ci_variable, project: project, key: 'STAGING_ENABLED', value: 'true') + + expect(build_names).to include('production_manual') + expect(build_names).to include('staging') + expect(build_names).not_to include('production') + end + + it 'when INCREMENTAL_ROLLOUT_MODE == timed' do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_ENABLED', value: 'true') + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_MODE', value: 'timed') + + expect(build_names).not_to include('production_manual') + expect(build_names).not_to include('production') + expect(build_names).not_to include( + 'rollout 10%', + 'rollout 25%', + 'rollout 50%', + 'rollout 100%' + ) + expect(build_names).to include( + 'timed rollout 10%', + 'timed rollout 25%', + 'timed rollout 50%', + 'timed rollout 100%' + ) + end + + it 'when INCREMENTAL_ROLLOUT_ENABLED' do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_ENABLED', value: 'true') + + expect(build_names).not_to include('production_manual') + expect(build_names).not_to include('production') + expect(build_names).not_to include( + 'timed rollout 10%', + 'timed rollout 25%', + 'timed rollout 50%', + 'timed rollout 100%' + ) + expect(build_names).to include( + 'rollout 10%', + 'rollout 25%', + 'rollout 50%', + 'rollout 100%' + ) + end + + it 'when INCREMENTAL_ROLLOUT_MODE == manual' do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_MODE', value: 'manual') + + expect(build_names).not_to include('production_manual') + expect(build_names).not_to include('production') + expect(build_names).not_to include( + 'timed rollout 10%', + 'timed rollout 25%', + 'timed rollout 50%', + 'timed rollout 100%' + ) + expect(build_names).to include( + 'rollout 10%', + 'rollout 25%', + 'rollout 50%', + 'rollout 100%' + ) + end + end + + shared_examples_for 'review app deployment' do + it 'creates the review and stop_review jobs but no production jobs' do + expect(build_names).to include('review') + expect(build_names).to include('stop_review') + expect(build_names).not_to include('production') + expect(build_names).not_to include('production_manual') + expect(build_names).not_to include('staging') + expect(build_names).not_to include('canary') + expect(build_names).not_to include('timed rollout 10%') + expect(build_names).not_to include('timed rollout 25%') + expect(build_names).not_to include('timed rollout 50%') + expect(build_names).not_to include('timed rollout 100%') + expect(build_names).not_to include('rollout 10%') + expect(build_names).not_to include('rollout 25%') + expect(build_names).not_to include('rollout 50%') + expect(build_names).not_to include('rollout 100%') + end + + it 'does not include review when REVIEW_DISABLED' do + create(:ci_variable, project: project, key: 'REVIEW_DISABLED', value: 'true') + + expect(build_names).not_to include('review') + expect(build_names).not_to include('stop_review') + end + end + + context 'on branch' do + let(:pipeline_ref) { 'feature' } + + before do + allow_any_instance_of(Gitlab::Ci::Pipeline::Chain::Validate::Repository).to receive(:perform!).and_return(true) + end + + it_behaves_like 'review app deployment' + + context 'when INCREMENTAL_ROLLOUT_ENABLED' do + before do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_ENABLED', value: 'true') + end + + it_behaves_like 'review app deployment' + end + + context 'when INCREMENTAL_ROLLOUT_MODE == "timed"' do + before do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_MODE', value: 'timed') + end + + it_behaves_like 'review app deployment' + end + + context 'when INCREMENTAL_ROLLOUT_MODE == "manual"' do + before do + create(:ci_variable, project: project, key: 'INCREMENTAL_ROLLOUT_MODE', value: 'manual') + end + + it_behaves_like 'review app deployment' + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it_behaves_like 'review app deployment' + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project, user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request) } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..2186bf038eb --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Jobs/Test.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Test') } + + describe 'the created pipeline' do + let_it_be(:user) { create(:admin) } + let_it_be(:project) { create(:project, :repository) } + + let(:default_branch) { 'master' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push) } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'on master' do + it 'creates the test job' do + expect(build_names).to contain_exactly('test') + end + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it 'creates the test job' do + expect(build_names).to contain_exactly('test') + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it 'creates the test job' do + expect(pipeline).to be_tag + expect(build_names).to contain_exactly('test') + end + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project, user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request) } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + + context 'TEST_DISABLED is set' do + before do + create(:ci_variable, key: 'TEST_DISABLED', value: 'true', project: project) + end + + context 'on master' do + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + + context 'on another branch' do + let(:pipeline_ref) { 'feature' } + + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + + context 'on tag' do + let(:pipeline_ref) { 'v1.0.0' } + + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb index 0c5d172f17c..af6ec25b9d6 100644 --- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb +++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb @@ -20,16 +20,8 @@ describe 'Auto-DevOps.gitlab-ci.yml' do allow(project).to receive(:default_branch).and_return(default_branch) end - it 'creates a build and a test job' do - expect(build_names).to include('build', 'test') - end - - context 'when the project has no active cluster' do - it 'only creates a build and a test stage' do - expect(pipeline.stages_names).to eq(%w(build test)) - end - - it 'does not create any deployment-related builds' do + shared_examples 'no Kubernetes deployment job' do + it 'does not create any Kubernetes deployment-related builds' do expect(build_names).not_to include('production') expect(build_names).not_to include('production_manual') expect(build_names).not_to include('staging') @@ -39,13 +31,95 @@ describe 'Auto-DevOps.gitlab-ci.yml' do end end - context 'when the project has an active cluster' do - let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } + it 'creates a build and a test job' do + expect(build_names).to include('build', 'test') + end + + context 'when the project is set for deployment to AWS' do + let(:platform_value) { 'ECS' } before do - allow(cluster).to receive(:active?).and_return(true) + create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_value) + end + + shared_examples 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do |job_name| + context 'when AUTO_DEVOPS_PLATFORM_TARGET is nil' do + let(:platform_value) { nil } + + it 'does not trigger the job' do + expect(build_names).not_to include(job_name) + end + end + + context 'when AUTO_DEVOPS_PLATFORM_TARGET is empty' do + let(:platform_value) { '' } + + it 'does not trigger the job' do + expect(build_names).not_to include(job_name) + end + end + end + + it_behaves_like 'no Kubernetes deployment job' + + it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do + let(:job_name) { 'production_ecs' } + end + + it 'creates an ECS deployment job for production only' do + expect(build_names).not_to include('review_ecs') + expect(build_names).to include('production_ecs') end + context 'and we are not on the default branch' do + let(:platform_value) { 'ECS' } + let(:pipeline_branch) { 'patch-1' } + + before do + project.repository.create_branch(pipeline_branch) + end + + it_behaves_like 'no ECS job when AUTO_DEVOPS_PLATFORM_TARGET is not present' do + let(:job_name) { 'review_ecs' } + end + + it 'creates an ECS deployment job for review only' do + expect(build_names).to include('review_ecs') + expect(build_names).not_to include('production_ecs') + expect(build_names).not_to include('review') + expect(build_names).not_to include('production') + end + end + + context 'and when the project has an active cluster' do + let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } + + before do + allow(cluster).to receive(:active?).and_return(true) + end + + context 'on default branch' do + it 'triggers the deployment to Kubernetes, not to ECS' do + expect(build_names).not_to include('review') + expect(build_names).to include('production') + expect(build_names).not_to include('production_ecs') + expect(build_names).not_to include('review_ecs') + end + end + end + end + + context 'when the project has no active cluster' do + it 'only creates a build and a test stage' do + expect(pipeline.stages_names).to eq(%w(build test)) + end + + it_behaves_like 'no Kubernetes deployment job' + end + + context 'when the project has an active cluster' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } + describe 'deployment-related builds' do context 'on default branch' do it 'does not include rollout jobs besides production' do diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index 70c3c5ab339..c93bb901981 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -1364,6 +1364,24 @@ module Gitlab expect { described_class.new(config) }.to raise_error(described_class::ValidationError) end + + it 'populates a build options with complete artifacts configuration' do + stub_feature_flags(ci_artifacts_exclude: true) + + config = <<~YAML + test: + script: echo "Hello World" + artifacts: + paths: + - my/test + exclude: + - my/test/something + YAML + + attributes = Gitlab::Ci::YamlProcessor.new(config).build_attributes('test') + + expect(attributes.dig(*%i[options artifacts exclude])).to eq(%w[my/test/something]) + end end describe "release" do @@ -2264,14 +2282,14 @@ module Gitlab config = YAML.dump({ rspec: { script: "test", type: "acceptance" } }) expect do Gitlab::Ci::YamlProcessor.new(config) - end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be .pre, build, test, deploy, .post") + end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post") end it "returns errors if job stage is not a defined stage" do config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } }) expect do Gitlab::Ci::YamlProcessor.new(config) - end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be .pre, build, test, .post") + end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, .post") end it "returns errors if stages is not an array" do diff --git a/spec/lib/gitlab/code_navigation_path_spec.rb b/spec/lib/gitlab/code_navigation_path_spec.rb index cafe362c8c7..938a2f821fd 100644 --- a/spec/lib/gitlab/code_navigation_path_spec.rb +++ b/spec/lib/gitlab/code_navigation_path_spec.rb @@ -4,18 +4,29 @@ require 'spec_helper' describe Gitlab::CodeNavigationPath do context 'when there is an artifact with code navigation data' do - let(:project) { create(:project, :repository) } - let(:sha) { project.commit.id } - let(:build_name) { Gitlab::CodeNavigationPath::CODE_NAVIGATION_JOB_NAME } + let_it_be(:project) { create(:project, :repository) } + let_it_be(:sha) { project.repository.commits('master', limit: 5).last.id } + let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: sha) } + let_it_be(:job) { create(:ci_build, pipeline: pipeline) } + let_it_be(:artifact) { create(:ci_job_artifact, :lsif, job: job) } + + let(:commit_sha) { sha } let(:path) { 'lib/app.rb' } - let!(:pipeline) { create(:ci_pipeline, project: project, sha: sha) } - let!(:job) { create(:ci_build, pipeline: pipeline, name: build_name) } - let!(:artifact) { create(:ci_job_artifact, :lsif, job: job) } - subject { described_class.new(project, sha).full_json_path_for(path) } + subject { described_class.new(project, commit_sha).full_json_path_for(path) } + + context 'when a pipeline exist for a sha' do + it 'returns path to a file in the artifact' do + expect(subject).to eq("/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json?file_type=lsif") + end + end + + context 'when a pipeline exist for the latest commits' do + let(:commit_sha) { project.commit.id } - it 'assigns code_navigation_build variable' do - expect(subject).to eq("/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json") + it 'returns path to a file in the artifact' do + expect(subject).to eq("/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json?file_type=lsif") + end end context 'when code_navigation feature is disabled' do @@ -23,7 +34,7 @@ describe Gitlab::CodeNavigationPath do stub_feature_flags(code_navigation: false) end - it 'does not assign code_navigation_build variable' do + it 'returns nil' do expect(subject).to be_nil end end diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb new file mode 100644 index 00000000000..d86d132c237 --- /dev/null +++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ConfigChecker::ExternalDatabaseChecker do + describe '#check' do + subject { described_class.check } + + context 'database version is not deprecated' do + before do + allow(described_class).to receive(:db_version_deprecated?).and_return(false) + end + + it { is_expected.to be_empty } + end + + context 'database version is deprecated' do + before do + allow(described_class).to receive(:db_version_deprecated?).and_return(true) + end + + let(:notice_deprecated_database) do + { + type: 'warning', + message: _('Note that PostgreSQL 11 will become the minimum required PostgreSQL version in GitLab 13.0 (May 2020). '\ + 'PostgreSQL 9.6 and PostgreSQL 10 will no longer be supported in GitLab 13.0. '\ + 'Please consider upgrading your PostgreSQL version (%{db_version}) soon.') % { db_version: Gitlab::Database.version.to_s } + } + end + + it 'reports deprecated database notices' do + is_expected.to contain_exactly(notice_deprecated_database) + end + end + end + + describe '#db_version_deprecated' do + subject { described_class.db_version_deprecated? } + + context 'database version is not deprecated' do + before do + allow(Gitlab::Database).to receive(:version).and_return(11) + end + + it { is_expected.to be false } + end + + context 'database version is deprecated' do + before do + allow(Gitlab::Database).to receive(:version).and_return(10) + end + + it { is_expected.to be true } + end + end +end diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb deleted file mode 100644 index 2242895f8ea..00000000000 --- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb +++ /dev/null @@ -1,176 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -describe Gitlab::CycleAnalytics::GroupStageSummary do - let(:group) { create(:group) } - let(:project) { create(:project, :repository, namespace: group) } - let(:project_2) { create(:project, :repository, namespace: group) } - let(:from) { 1.day.ago } - let(:user) { create(:user, :admin) } - - subject { described_class.new(group, options: { from: Time.now, current_user: user }).data } - - describe "#new_issues" do - context 'with from date' do - before do - Timecop.freeze(5.days.ago) { create(:issue, project: project) } - Timecop.freeze(5.days.ago) { create(:issue, project: project_2) } - Timecop.freeze(5.days.from_now) { create(:issue, project: project) } - Timecop.freeze(5.days.from_now) { create(:issue, project: project_2) } - end - - it "finds the number of issues created after it" do - expect(subject.first[:value]).to eq('2') - end - - context 'with subgroups' do - before do - Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project, namespace: create(:group, parent: group))) } - end - - it "finds issues from them" do - expect(subject.first[:value]).to eq('3') - end - end - - context 'with projects specified in options' do - before do - Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project, namespace: group)) } - end - - subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data } - - it 'finds issues from those projects' do - expect(subject.first[:value]).to eq('2') - end - end - - context 'when `from` and `to` parameters are provided' do - subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data } - - it 'finds issues from 5 days ago' do - expect(subject.first[:value]).to eq('2') - end - end - end - - context 'with other projects' do - before do - Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project, namespace: create(:group))) } - Timecop.freeze(5.days.from_now) { create(:issue, project: project) } - Timecop.freeze(5.days.from_now) { create(:issue, project: project_2) } - end - - it "doesn't find issues from them" do - expect(subject.first[:value]).to eq('2') - end - end - end - - describe "#deploys" do - context 'with from date' do - before do - Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) } - Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) } - Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project_2) } - Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project_2) } - end - - it "finds the number of deploys made created after it" do - expect(subject.second[:value]).to eq('2') - end - - context 'with subgroups' do - before do - Timecop.freeze(5.days.from_now) do - create(:deployment, :success, project: create(:project, :repository, namespace: create(:group, parent: group))) - end - end - - it "finds deploys from them" do - expect(subject.second[:value]).to eq('3') - end - end - - context 'with projects specified in options' do - before do - Timecop.freeze(5.days.from_now) do - create(:deployment, :success, project: create(:project, :repository, namespace: group, name: 'not_applicable')) - end - end - - subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data } - - it 'shows deploys from those projects' do - expect(subject.second[:value]).to eq('2') - end - end - - context 'when `from` and `to` parameters are provided' do - subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data } - - it 'finds deployments from 5 days ago' do - expect(subject.second[:value]).to eq('2') - end - end - end - - context 'with other projects' do - before do - Timecop.freeze(5.days.from_now) do - create(:deployment, :success, project: create(:project, :repository, namespace: create(:group))) - end - end - - it "doesn't find deploys from them" do - expect(subject.second[:value]).to eq('-') - end - end - end - - describe '#deployment_frequency' do - let(:from) { 6.days.ago } - let(:to) { nil } - - subject do - described_class.new(group, options: { - from: from, - to: to, - current_user: user - }).data.third - end - - it 'includes the unit: `per day`' do - expect(subject[:unit]).to eq(_('per day')) - end - - before do - Timecop.freeze(5.days.ago) do - create(:deployment, :success, project: project) - end - end - - context 'when `to` is nil' do - it 'includes range until now' do - # 1 deployment over 7 days - expect(subject[:value]).to eq('0.1') - end - end - - context 'when `to` is given' do - let(:from) { 10.days.ago } - let(:to) { 10.days.from_now } - - before do - Timecop.freeze(5.days.from_now) do - create(:deployment, :success, project: project) - end - end - - it 'returns deployment frequency within `from` and `to` range' do - # 2 deployments over 20 days - expect(subject[:value]).to eq('0.1') - end - end - end -end diff --git a/spec/lib/gitlab/cycle_analytics/summary/value_spec.rb b/spec/lib/gitlab/cycle_analytics/summary/value_spec.rb new file mode 100644 index 00000000000..d9bdfa92a04 --- /dev/null +++ b/spec/lib/gitlab/cycle_analytics/summary/value_spec.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::CycleAnalytics::Summary::Value do + describe Gitlab::CycleAnalytics::Summary::Value::None do + it 'returns `-`' do + expect(described_class.new.to_s).to eq('-') + end + end + + describe Gitlab::CycleAnalytics::Summary::Value::Numeric do + it 'returns the string representation of the number' do + expect(described_class.new(3.2).to_s).to eq('3.2') + end + end + + describe Gitlab::CycleAnalytics::Summary::Value::PrettyNumeric do + describe '#to_s' do + it 'returns `-` when the number is 0' do + expect(described_class.new(0).to_s).to eq('-') + end + + it 'returns `-` when the number is nil' do + expect(described_class.new(nil).to_s).to eq('-') + end + + it 'returns the string representation of the number' do + expect(described_class.new(100).to_s).to eq('100') + end + end + end +end diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb index ba23c3828de..8929374fb87 100644 --- a/spec/lib/gitlab/danger/changelog_spec.rb +++ b/spec/lib/gitlab/danger/changelog_spec.rb @@ -86,14 +86,6 @@ describe Gitlab::Danger::Changelog do end end - describe '#presented_no_changelog_labels' do - subject { changelog.presented_no_changelog_labels } - - it 'returns the labels formatted' do - is_expected.to eq('~backstage, ~ci-build, ~meta') - end - end - describe '#ee_changelog?' do subject { changelog.ee_changelog? } diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb index d5d582d7d6c..c2c881fd589 100644 --- a/spec/lib/gitlab/danger/helper_spec.rb +++ b/spec/lib/gitlab/danger/helper_spec.rb @@ -363,4 +363,69 @@ describe Gitlab::Danger::Helper do expect(helper).to be_security_mr end end + + describe '#mr_has_label?' do + it 'returns false when `gitlab_helper` is unavailable' do + expect(helper).to receive(:gitlab_helper).and_return(nil) + + expect(helper.mr_has_labels?('telemetry')).to be_falsey + end + + context 'when mr has labels' do + before do + mr_labels = ['telemetry', 'telemetry::reviewed'] + expect(fake_gitlab).to receive(:mr_labels).and_return(mr_labels) + end + + it 'returns true with a matched label' do + expect(helper.mr_has_labels?('telemetry')).to be_truthy + end + + it 'returns false with unmatched label' do + expect(helper.mr_has_labels?('database')).to be_falsey + end + + it 'returns true with an array of labels' do + expect(helper.mr_has_labels?(['telemetry', 'telemetry::reviewed'])).to be_truthy + end + + it 'returns true with multi arguments with matched labels' do + expect(helper.mr_has_labels?('telemetry', 'telemetry::reviewed')).to be_truthy + end + + it 'returns false with multi arguments with unmatched labels' do + expect(helper.mr_has_labels?('telemetry', 'telemetry::non existing')).to be_falsey + end + end + end + + describe '#labels_list' do + let(:labels) { ['telemetry', 'telemetry::reviewed'] } + + it 'composes the labels string' do + expect(helper.labels_list(labels)).to eq('~"telemetry", ~"telemetry::reviewed"') + end + + context 'when passing a separator' do + it 'composes the labels string with the given separator' do + expect(helper.labels_list(labels, sep: ' ')).to eq('~"telemetry" ~"telemetry::reviewed"') + end + end + + it 'returns empty string for empty array' do + expect(helper.labels_list([])).to eq('') + end + end + + describe '#prepare_labels_for_mr' do + it 'composes the labels string' do + mr_labels = ['telemetry', 'telemetry::reviewed'] + + expect(helper.prepare_labels_for_mr(mr_labels)).to eq('/label ~"telemetry" ~"telemetry::reviewed"') + end + + it 'returns empty string for empty array' do + expect(helper.prepare_labels_for_mr([])).to eq('') + end + end end diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb index 570f4bd27cc..ea5aecbc597 100644 --- a/spec/lib/gitlab/danger/teammate_spec.rb +++ b/spec/lib/gitlab/danger/teammate_spec.rb @@ -163,6 +163,13 @@ describe Gitlab::Danger::Teammate do { message: 'OOO: massage' } | false { message: 'love it SOOO much' } | false { emoji: 'red_circle' } | false + { emoji: 'palm_tree' } | false + { emoji: 'beach' } | false + { emoji: 'beach_umbrella' } | false + { emoji: 'beach_with_umbrella' } | false + { emoji: nil } | true + { emoji: '' } | true + { emoji: 'dancer' } | true end with_them do @@ -175,9 +182,9 @@ describe Gitlab::Danger::Teammate do end it 'returns true if request fails' do - expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json) - .twice - .and_raise(Gitlab::Danger::RequestHelper::HTTPError.new) + expect(Gitlab::Danger::RequestHelper) + .to receive(:http_get_json) + .and_raise(Gitlab::Danger::RequestHelper::HTTPError.new) expect(subject.available?).to be true end diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb index 7be84b8f980..e7cb53f2dbd 100644 --- a/spec/lib/gitlab/database/batch_count_spec.rb +++ b/spec/lib/gitlab/database/batch_count_spec.rb @@ -35,6 +35,10 @@ describe Gitlab::Database::BatchCount do expect(described_class.batch_count(model, "#{model.table_name}.id")).to eq(5) end + it 'counts with Arel column' do + expect(described_class.batch_count(model, model.arel_table[:id])).to eq(5) + end + it 'counts table with batch_size 50K' do expect(described_class.batch_count(model, batch_size: 50_000)).to eq(5) end @@ -98,6 +102,10 @@ describe Gitlab::Database::BatchCount do expect(described_class.batch_distinct_count(model, "#{model.table_name}.#{column}")).to eq(2) end + it 'counts with Arel column' do + expect(described_class.batch_distinct_count(model, model.arel_table[column])).to eq(2) + end + it 'counts with :column field with batch_size of 50K' do expect(described_class.batch_distinct_count(model, column, batch_size: 50_000)).to eq(2) end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 3a0148615b9..203d39be22b 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -217,9 +217,10 @@ describe Gitlab::Database::MigrationHelpers do it 'appends ON DELETE SET NULL statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) expect(model).to receive(:execute).with(/ON DELETE SET NULL/) @@ -233,9 +234,10 @@ describe Gitlab::Database::MigrationHelpers do it 'appends ON DELETE CASCADE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) expect(model).to receive(:execute).with(/ON DELETE CASCADE/) @@ -249,9 +251,10 @@ describe Gitlab::Database::MigrationHelpers do it 'appends no ON DELETE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) expect(model).not_to receive(:execute).with(/ON DELETE/) @@ -266,10 +269,11 @@ describe Gitlab::Database::MigrationHelpers do it 'creates a concurrent foreign key and validates it' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end @@ -293,10 +297,11 @@ describe Gitlab::Database::MigrationHelpers do it 'creates a new foreign key' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo) end @@ -321,10 +326,11 @@ describe Gitlab::Database::MigrationHelpers do it 'creates a new foreign key' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+bar/) - expect(model).to receive(:execute).with(/RESET ALL/) + expect(model).to receive(:execute).ordered.with(/RESET ALL/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :bar) end @@ -361,6 +367,7 @@ describe Gitlab::Database::MigrationHelpers do aggregate_failures do expect(model).not_to receive(:concurrent_foreign_key_name) expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET ALL/) @@ -377,6 +384,7 @@ describe Gitlab::Database::MigrationHelpers do aggregate_failures do expect(model).to receive(:concurrent_foreign_key_name) expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET ALL/) @@ -527,6 +535,26 @@ describe Gitlab::Database::MigrationHelpers do end end end + + # This spec runs without an enclosing transaction (:delete truncation method for db_cleaner) + context 'when the statement_timeout is already disabled', :delete do + before do + ActiveRecord::Base.connection.execute('SET statement_timeout TO 0') + end + + after do + # Use ActiveRecord::Base.connection instead of model.execute + # so that this call is not counted below + ActiveRecord::Base.connection.execute('RESET ALL') + end + + it 'yields control without disabling the timeout or resetting' do + expect(model).not_to receive(:execute).with('SET statement_timeout TO 0') + expect(model).not_to receive(:execute).with('RESET ALL') + + expect { |block| model.disable_statement_timeout(&block) }.to yield_control + end + end end describe '#true_value' do @@ -596,140 +624,12 @@ describe Gitlab::Database::MigrationHelpers do describe '#add_column_with_default' do let(:column) { Project.columns.find { |c| c.name == "id" } } - context 'outside of a transaction' do - context 'when a column limit is not set' do - before do - expect(model).to receive(:transaction_open?) - .and_return(false) - .at_least(:once) - - expect(model).to receive(:transaction).and_yield - - expect(model).to receive(:add_column) - .with(:projects, :foo, :integer, default: nil) - - expect(model).to receive(:change_column_default) - .with(:projects, :foo, 10) - - expect(model).to receive(:column_for) - .with(:projects, :foo).and_return(column) - end - - it 'adds the column while allowing NULL values' do - expect(model).to receive(:update_column_in_batches) - .with(:projects, :foo, 10) - - expect(model).not_to receive(:change_column_null) - - model.add_column_with_default(:projects, :foo, :integer, - default: 10, - allow_null: true) - end - - it 'adds the column while not allowing NULL values' do - expect(model).to receive(:update_column_in_batches) - .with(:projects, :foo, 10) - - expect(model).to receive(:change_column_null) - .with(:projects, :foo, false) - - model.add_column_with_default(:projects, :foo, :integer, default: 10) - end - - it 'removes the added column whenever updating the rows fails' do - expect(model).to receive(:update_column_in_batches) - .with(:projects, :foo, 10) - .and_raise(RuntimeError) - - expect(model).to receive(:remove_column) - .with(:projects, :foo) - - expect do - model.add_column_with_default(:projects, :foo, :integer, default: 10) - end.to raise_error(RuntimeError) - end - - it 'removes the added column whenever changing a column NULL constraint fails' do - expect(model).to receive(:change_column_null) - .with(:projects, :foo, false) - .and_raise(RuntimeError) - - expect(model).to receive(:remove_column) - .with(:projects, :foo) - - expect do - model.add_column_with_default(:projects, :foo, :integer, default: 10) - end.to raise_error(RuntimeError) - end - end - - context 'when `update_column_in_batches_args` is given' do - let(:column) { UserDetail.columns.find { |c| c.name == "user_id" } } - - it 'uses `user_id` for `update_column_in_batches`' do - allow(model).to receive(:transaction_open?).and_return(false) - allow(model).to receive(:transaction).and_yield - allow(model).to receive(:column_for).with(:user_details, :foo).and_return(column) - allow(model).to receive(:update_column_in_batches).with(:user_details, :foo, 10, batch_column_name: :user_id) - allow(model).to receive(:change_column_null).with(:user_details, :foo, false) - allow(model).to receive(:change_column_default).with(:user_details, :foo, 10) + it 'delegates to #add_column' do + expect(model).to receive(:add_column).with(:projects, :foo, :integer, default: 10, limit: nil, null: true) - expect(model).to receive(:add_column) - .with(:user_details, :foo, :integer, default: nil) - - model.add_column_with_default( - :user_details, - :foo, - :integer, - default: 10, - update_column_in_batches_args: { batch_column_name: :user_id } - ) - end - end - - context 'when a column limit is set' do - it 'adds the column with a limit' do - allow(model).to receive(:transaction_open?).and_return(false) - allow(model).to receive(:transaction).and_yield - allow(model).to receive(:column_for).with(:projects, :foo).and_return(column) - allow(model).to receive(:update_column_in_batches).with(:projects, :foo, 10) - allow(model).to receive(:change_column_null).with(:projects, :foo, false) - allow(model).to receive(:change_column_default).with(:projects, :foo, 10) - - expect(model).to receive(:add_column) - .with(:projects, :foo, :integer, default: nil, limit: 8) - - model.add_column_with_default(:projects, :foo, :integer, default: 10, limit: 8) - end - end - - it 'adds a column with an array default value for a jsonb type' do - create(:project) - allow(model).to receive(:transaction_open?).and_return(false) - allow(model).to receive(:transaction).and_yield - expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '[{"foo":"json"}]').and_call_original - - model.add_column_with_default(:projects, :foo, :jsonb, default: [{ foo: "json" }]) - end - - it 'adds a column with an object default value for a jsonb type' do - create(:project) - allow(model).to receive(:transaction_open?).and_return(false) - allow(model).to receive(:transaction).and_yield - expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '{"foo":"json"}').and_call_original - - model.add_column_with_default(:projects, :foo, :jsonb, default: { foo: "json" }) - end - end - - context 'inside a transaction' do - it 'raises RuntimeError' do - expect(model).to receive(:transaction_open?).and_return(true) - - expect do - model.add_column_with_default(:projects, :foo, :integer, default: 10) - end.to raise_error(RuntimeError) - end + model.add_column_with_default(:projects, :foo, :integer, + default: 10, + allow_null: true) end end @@ -782,7 +682,7 @@ describe Gitlab::Database::MigrationHelpers do expect(model).to receive(:update_column_in_batches) - expect(model).to receive(:change_column_null).with(:users, :new, false) + expect(model).to receive(:add_not_null_constraint).with(:users, :new) expect(model).to receive(:copy_indexes).with(:users, :old, :new) expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new) @@ -790,6 +690,25 @@ describe Gitlab::Database::MigrationHelpers do model.rename_column_concurrently(:users, :old, :new) end + it 'passes the batch_column_name' do + expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) + expect(model).to receive(:check_trigger_permissions!).and_return(true) + + expect(model).to receive(:create_column_from).with( + :users, :old, :new, type: nil, batch_column_name: :other_batch_column + ).and_return(true) + + expect(model).to receive(:install_rename_triggers).and_return(true) + + model.rename_column_concurrently(:users, :old, :new, batch_column_name: :other_batch_column) + end + + it 'raises an error with invalid batch_column_name' do + expect do + model.rename_column_concurrently(:users, :old, :new, batch_column_name: :invalid) + end.to raise_error(RuntimeError, /Column invalid does not exist on users/) + end + context 'when default is false' do let(:old_column) do double(:column, @@ -896,7 +815,7 @@ describe Gitlab::Database::MigrationHelpers do expect(model).to receive(:update_column_in_batches) - expect(model).to receive(:change_column_null).with(:users, :old, false) + expect(model).to receive(:add_not_null_constraint).with(:users, :old) expect(model).to receive(:copy_indexes).with(:users, :new, :old) expect(model).to receive(:copy_foreign_keys).with(:users, :new, :old) @@ -904,6 +823,25 @@ describe Gitlab::Database::MigrationHelpers do model.undo_cleanup_concurrent_column_rename(:users, :old, :new) end + it 'passes the batch_column_name' do + expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) + expect(model).to receive(:check_trigger_permissions!).and_return(true) + + expect(model).to receive(:create_column_from).with( + :users, :new, :old, type: nil, batch_column_name: :other_batch_column + ).and_return(true) + + expect(model).to receive(:install_rename_triggers).and_return(true) + + model.undo_cleanup_concurrent_column_rename(:users, :old, :new, batch_column_name: :other_batch_column) + end + + it 'raises an error with invalid batch_column_name' do + expect do + model.undo_cleanup_concurrent_column_rename(:users, :old, :new, batch_column_name: :invalid) + end.to raise_error(RuntimeError, /Column invalid does not exist on users/) + end + context 'when default is false' do let(:new_column) do double(:column, @@ -1365,6 +1303,22 @@ describe Gitlab::Database::MigrationHelpers do end end + it 'returns the final expected delay' do + Sidekiq::Testing.fake! do + final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2) + + expect(final_delay.to_f).to eq(20.minutes.to_f) + end + end + + it 'returns zero when nothing gets queued' do + Sidekiq::Testing.fake! do + final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User.none, 'FooJob', 10.minutes) + + expect(final_delay).to eq(0) + end + end + context 'with batch_size option' do it 'queues jobs correctly' do Sidekiq::Testing.fake! do @@ -1389,12 +1343,25 @@ describe Gitlab::Database::MigrationHelpers do end end - context 'with other_arguments option' do + context 'with other_job_arguments option' do + it 'queues jobs correctly' do + Sidekiq::Testing.fake! do + model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2]) + + expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]]) + expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f) + end + end + end + + context 'with initial_delay option' do it 'queues jobs correctly' do - model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_arguments: [1, 2]) + Sidekiq::Testing.fake! do + model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2], initial_delay: 10.minutes) - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]]) - expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f) + expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]]) + expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(20.minutes.from_now.to_f) + end end end end @@ -2158,6 +2125,7 @@ describe Gitlab::Database::MigrationHelpers do .and_return(false).exactly(1) expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/) @@ -2201,6 +2169,7 @@ describe Gitlab::Database::MigrationHelpers do .and_return(false).exactly(1) expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/) @@ -2242,6 +2211,7 @@ describe Gitlab::Database::MigrationHelpers do expect(model).to receive(:check_constraint_exists?).and_return(true) expect(model).to receive(:disable_statement_timeout).and_call_original + expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/statement_timeout/) expect(model).to receive(:execute).ordered.with(validate_sql) expect(model).to receive(:execute).ordered.with(/RESET ALL/) @@ -2381,4 +2351,135 @@ describe Gitlab::Database::MigrationHelpers do end end end + + describe '#add_not_null_constraint' do + context 'when it is called with the default options' do + it 'calls add_check_constraint with an infered constraint name and validate: true' do + constraint_name = model.check_constraint_name(:test_table, + :name, + 'not_null') + check = "name IS NOT NULL" + + expect(model).to receive(:column_is_nullable?).and_return(true) + expect(model).to receive(:check_constraint_name).and_call_original + expect(model).to receive(:add_check_constraint) + .with(:test_table, check, constraint_name, validate: true) + + model.add_not_null_constraint(:test_table, :name) + end + end + + context 'when all parameters are provided' do + it 'calls add_check_constraint with the correct parameters' do + constraint_name = 'check_name_not_null' + check = "name IS NOT NULL" + + expect(model).to receive(:column_is_nullable?).and_return(true) + expect(model).not_to receive(:check_constraint_name) + expect(model).to receive(:add_check_constraint) + .with(:test_table, check, constraint_name, validate: false) + + model.add_not_null_constraint( + :test_table, + :name, + constraint_name: constraint_name, + validate: false + ) + end + end + + context 'when the column is defined as NOT NULL' do + it 'does not add a check constraint' do + expect(model).to receive(:column_is_nullable?).and_return(false) + expect(model).not_to receive(:check_constraint_name) + expect(model).not_to receive(:add_check_constraint) + + model.add_not_null_constraint(:test_table, :name) + end + end + end + + describe '#validate_not_null_constraint' do + context 'when constraint_name is not provided' do + it 'calls validate_check_constraint with an infered constraint name' do + constraint_name = model.check_constraint_name(:test_table, + :name, + 'not_null') + + expect(model).to receive(:check_constraint_name).and_call_original + expect(model).to receive(:validate_check_constraint) + .with(:test_table, constraint_name) + + model.validate_not_null_constraint(:test_table, :name) + end + end + + context 'when constraint_name is provided' do + it 'calls validate_check_constraint with the correct parameters' do + constraint_name = 'check_name_not_null' + + expect(model).not_to receive(:check_constraint_name) + expect(model).to receive(:validate_check_constraint) + .with(:test_table, constraint_name) + + model.validate_not_null_constraint(:test_table, :name, constraint_name: constraint_name) + end + end + end + + describe '#remove_not_null_constraint' do + context 'when constraint_name is not provided' do + it 'calls remove_check_constraint with an infered constraint name' do + constraint_name = model.check_constraint_name(:test_table, + :name, + 'not_null') + + expect(model).to receive(:check_constraint_name).and_call_original + expect(model).to receive(:remove_check_constraint) + .with(:test_table, constraint_name) + + model.remove_not_null_constraint(:test_table, :name) + end + end + + context 'when constraint_name is provided' do + it 'calls remove_check_constraint with the correct parameters' do + constraint_name = 'check_name_not_null' + + expect(model).not_to receive(:check_constraint_name) + expect(model).to receive(:remove_check_constraint) + .with(:test_table, constraint_name) + + model.remove_not_null_constraint(:test_table, :name, constraint_name: constraint_name) + end + end + end + + describe '#check_not_null_constraint_exists?' do + context 'when constraint_name is not provided' do + it 'calls check_constraint_exists? with an infered constraint name' do + constraint_name = model.check_constraint_name(:test_table, + :name, + 'not_null') + + expect(model).to receive(:check_constraint_name).and_call_original + expect(model).to receive(:check_constraint_exists?) + .with(:test_table, constraint_name) + + model.check_not_null_constraint_exists?(:test_table, :name) + end + end + + context 'when constraint_name is provided' do + it 'calls check_constraint_exists? with the correct parameters' do + constraint_name = 'check_name_not_null' + + expect(model).not_to receive(:check_constraint_name) + expect(model).to receive(:check_constraint_exists?) + .with(:test_table, constraint_name) + + model.check_not_null_constraint_exists?(:test_table, :name, constraint_name: constraint_name) + end + end + end end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb new file mode 100644 index 00000000000..77f71676252 --- /dev/null +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey do + let(:foreign_key) do + described_class.new( + to_table: 'issues', + from_table: 'issue_assignees', + from_column: 'issue_id', + to_column: 'id', + cascade_delete: true) + end + + describe 'validations' do + it 'allows keys that reference valid tables and columns' do + expect(foreign_key).to be_valid + end + + it 'does not allow keys without a valid to_table' do + foreign_key.to_table = 'this_is_not_a_real_table' + + expect(foreign_key).not_to be_valid + expect(foreign_key.errors[:to_table].first).to eq('must be a valid table') + end + + it 'does not allow keys without a valid from_table' do + foreign_key.from_table = 'this_is_not_a_real_table' + + expect(foreign_key).not_to be_valid + expect(foreign_key.errors[:from_table].first).to eq('must be a valid table') + end + + it 'does not allow keys without a valid to_column' do + foreign_key.to_column = 'this_is_not_a_real_fk' + + expect(foreign_key).not_to be_valid + expect(foreign_key.errors[:to_column].first).to eq('must be a valid column') + end + + it 'does not allow keys without a valid from_column' do + foreign_key.from_column = 'this_is_not_a_real_pk' + + expect(foreign_key).not_to be_valid + expect(foreign_key.errors[:from_column].first).to eq('must be a valid column') + end + end +end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb new file mode 100644 index 00000000000..0e2fb047469 --- /dev/null +++ b/spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb @@ -0,0 +1,230 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Database::PartitioningMigrationHelpers do + let(:model) do + ActiveRecord::Migration.new.extend(described_class) + end + let_it_be(:connection) { ActiveRecord::Base.connection } + let(:referenced_table) { :issues } + let(:function_name) { model.fk_function_name(referenced_table) } + let(:trigger_name) { model.fk_trigger_name(referenced_table) } + + before do + allow(model).to receive(:puts) + end + + describe 'adding a foreign key' do + before do + allow(model).to receive(:transaction_open?).and_return(false) + end + + context 'when the table has no foreign keys' do + it 'creates a trigger function to handle the single cascade' do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + + expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the table already has foreign keys' do + context 'when the foreign key is from a different table' do + before do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + end + + it 'creates a trigger function to handle the multiple cascades' do + model.add_partitioned_foreign_key :epic_issues, referenced_table + + expect_function_to_contain(function_name, + 'delete from issue_assignees where issue_id = old.id', + 'delete from epic_issues where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the foreign key is from the same table' do + before do + model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id + end + + context 'when the foreign key is from a different column' do + it 'creates a trigger function to handle the multiple cascades' do + model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id + + expect_function_to_contain(function_name, + 'delete from issues where moved_to_id = old.id', + 'delete from issues where duplicated_to_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the foreign key is from the same column' do + it 'ignores the duplicate and properly recreates the trigger function' do + model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id + + expect_function_to_contain(function_name, 'delete from issues where moved_to_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + end + end + + context 'when the foreign key is set to nullify' do + it 'creates a trigger function that nullifies the foreign key' do + model.add_partitioned_foreign_key :issue_assignees, referenced_table, on_delete: :nullify + + expect_function_to_contain(function_name, 'update issue_assignees set issue_id = null where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the referencing column is a custom value' do + it 'creates a trigger function with the correct column name' do + model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id + + expect_function_to_contain(function_name, 'delete from issues where duplicated_to_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the referenced column is a custom value' do + let(:referenced_table) { :user_details } + + it 'creates a trigger function with the correct column name' do + model.add_partitioned_foreign_key :user_preferences, referenced_table, column: :user_id, primary_key: :user_id + + expect_function_to_contain(function_name, 'delete from user_preferences where user_id = old.user_id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the given key definition is invalid' do + it 'raises an error with the appropriate message' do + expect do + model.add_partitioned_foreign_key :issue_assignees, referenced_table, column: :not_a_real_issue_id + end.to raise_error(/From column must be a valid column/) + end + end + + context 'when run inside a transaction' do + it 'raises an error' do + expect(model).to receive(:transaction_open?).and_return(true) + + expect do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + end.to raise_error(/can not be run inside a transaction/) + end + end + end + + context 'removing a foreign key' do + before do + allow(model).to receive(:transaction_open?).and_return(false) + end + + context 'when the table has multiple foreign keys' do + before do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + model.add_partitioned_foreign_key :epic_issues, referenced_table + end + + it 'creates a trigger function without the removed cascade' do + expect_function_to_contain(function_name, + 'delete from issue_assignees where issue_id = old.id', + 'delete from epic_issues where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + + model.remove_partitioned_foreign_key :issue_assignees, referenced_table + + expect_function_to_contain(function_name, 'delete from epic_issues where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when the table has only one remaining foreign key' do + before do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + end + + it 'removes the trigger function altogether' do + expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + + model.remove_partitioned_foreign_key :issue_assignees, referenced_table + + expect(find_function_def(function_name)).to be_nil + expect(find_trigger_def(trigger_name)).to be_nil + end + end + + context 'when the foreign key does not exist' do + before do + model.add_partitioned_foreign_key :issue_assignees, referenced_table + end + + it 'ignores the invalid key and properly recreates the trigger function' do + expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + + model.remove_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id + + expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id') + expect_valid_function_trigger(trigger_name, function_name) + end + end + + context 'when run outside a transaction' do + it 'raises an error' do + expect(model).to receive(:transaction_open?).and_return(true) + + expect do + model.remove_partitioned_foreign_key :issue_assignees, referenced_table + end.to raise_error(/can not be run inside a transaction/) + end + end + end + + def expect_function_to_contain(name, *statements) + return_stmt, *body_stmts = parsed_function_statements(name).reverse + + expect(return_stmt).to eq('return old') + expect(body_stmts).to contain_exactly(*statements) + end + + def expect_valid_function_trigger(name, fn_name) + event, activation, definition = cleaned_trigger_def(name) + + expect(event).to eq('delete') + expect(activation).to eq('after') + expect(definition).to eq("execute procedure #{fn_name}()") + end + + def parsed_function_statements(name) + cleaned_definition = find_function_def(name)['fn_body'].downcase.gsub(/\s+/, ' ') + statements = cleaned_definition.sub(/\A\s*begin\s*(.*)\s*end\s*\Z/, "\\1") + statements.split(';').map! { |stmt| stmt.strip.presence }.compact! + end + + def find_function_def(name) + connection.execute("select prosrc as fn_body from pg_proc where proname = '#{name}';").first + end + + def cleaned_trigger_def(name) + find_trigger_def(name).values_at('event', 'activation', 'definition').map!(&:downcase) + end + + def find_trigger_def(name) + connection.execute(<<~SQL).first + select + string_agg(event_manipulation, ',') as event, + action_timing as activation, + action_statement as definition + from information_schema.triggers + where trigger_name = '#{name}' + group by 2, 3 + SQL + end +end diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb index 7b8437e4874..fae57996fb6 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb @@ -242,7 +242,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete old_path, new_path = [nil, nil] Gitlab::Redis::SharedState.with do |redis| rename_info = redis.lpop(key) - old_path, new_path = JSON.parse(rename_info) + old_path, new_path = Gitlab::Json.parse(rename_info) end expect(old_path).to eq('path/to/namespace') @@ -278,7 +278,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete end expect(rename_count).to eq(1) - expect(JSON.parse(stored_renames.first)).to eq(%w(old_path new_path)) + expect(Gitlab::Json.parse(stored_renames.first)).to eq(%w(old_path new_path)) end end end diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb index b6321f2eab1..9c8c9749125 100644 --- a/spec/lib/gitlab/database/with_lock_retries_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb @@ -84,7 +84,7 @@ describe Gitlab::Database::WithLockRetries do subject.run do lock_attempts += 1 - if lock_attempts == retry_count # we reached the last retry iteration, if we kill the thread, the last try (no lock_timeout) will succeed) + if lock_attempts == retry_count # we reached the last retry iteration, if we kill the thread, the last try (no lock_timeout) will succeed lock_fiber.resume end @@ -106,9 +106,13 @@ describe Gitlab::Database::WithLockRetries do end context 'after the retries, without setting lock_timeout' do - let(:retry_count) { timing_configuration.size } + let(:retry_count) { timing_configuration.size + 1 } - it_behaves_like 'retriable exclusive lock on `projects`' + it_behaves_like 'retriable exclusive lock on `projects`' do + before do + expect(subject).to receive(:run_block_without_lock_timeout).and_call_original + end + end end context 'when statement timeout is reached' do @@ -129,11 +133,22 @@ describe Gitlab::Database::WithLockRetries do end end + context 'restore local database variables' do + it do + expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW lock_timeout").to_a } + end + + it do + expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW idle_in_transaction_session_timeout").to_a } + end + end + context 'casting durations correctly' do let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1").and_call_original + expect(ActiveRecord::Base.connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout; RESET lock_timeout').and_call_original expect(ActiveRecord::Base.connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1").and_call_original diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 61d7400b95e..d1592e60d3d 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -567,6 +567,61 @@ describe Gitlab::Diff::File do end end + describe '#alternate_viewer' do + subject { diff_file.alternate_viewer } + + where(:viewer_class) do + [ + DiffViewer::Image, + DiffViewer::Collapsed, + DiffViewer::NotDiffable, + DiffViewer::Text, + DiffViewer::NoPreview, + DiffViewer::Added, + DiffViewer::Deleted, + DiffViewer::ModeChanged, + DiffViewer::ModeChanged, + DiffViewer::NoPreview + ] + end + + with_them do + let(:viewer) { viewer_class.new(diff_file) } + + before do + allow(diff_file).to receive(:viewer).and_return(viewer) + end + + it { is_expected.to be_nil } + end + + context 'when viewer is DiffViewer::Renamed' do + let(:viewer) { DiffViewer::Renamed.new(diff_file) } + + before do + allow(diff_file).to receive(:viewer).and_return(viewer) + end + + context 'when it can be rendered as text' do + it { is_expected.to be_a(DiffViewer::Text) } + end + + context 'when it can be rendered as image' do + let(:commit) { project.commit('2f63565e7aac07bcdadb654e253078b727143ec4') } + let(:diff_file) { commit.diffs.diff_file_with_new_path('files/images/6049019_460s.jpg') } + + it { is_expected.to be_a(DiffViewer::Image) } + end + + context 'when it is something else' do + let(:commit) { project.commit('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') } + let(:diff_file) { commit.diffs.diff_file_with_new_path('Gemfile.zip') } + + it { is_expected.to be_nil } + end + end + end + describe '#rendered_as_text?' do context 'when the simple viewer is text-based' do let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') } diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb index e275ebef2c9..fa129a20e58 100644 --- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb +++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb @@ -26,6 +26,7 @@ describe Gitlab::Diff::Formatters::TextFormatter do # Specific text formatter examples let!(:formatter) { described_class.new(attrs) } + let(:attrs) { base } describe '#line_age' do subject { formatter.line_age } @@ -42,4 +43,21 @@ describe Gitlab::Diff::Formatters::TextFormatter do it { is_expected.to eq('old') } end end + + describe "#==" do + it "is false when the line_range changes" do + formatter_1 = described_class.new(base.merge(line_range: { start_line_code: "foo", end_line_code: "bar" })) + formatter_2 = described_class.new(base.merge(line_range: { start_line_code: "foo", end_line_code: "baz" })) + + expect(formatter_1).not_to eq(formatter_2) + end + + it "is true when the line_range doesn't change" do + attrs = base.merge({ line_range: { start_line_code: "foo", end_line_code: "baz" } }) + formatter_1 = described_class.new(attrs) + formatter_2 = described_class.new(attrs) + + expect(formatter_1).to eq(formatter_2) + end + end end diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb index a83c0f35d92..10749ec024d 100644 --- a/spec/lib/gitlab/diff/position_spec.rb +++ b/spec/lib/gitlab/diff/position_spec.rb @@ -639,11 +639,11 @@ describe Gitlab::Diff::Position do let(:diff_position) { described_class.new(args) } it "returns the position as JSON" do - expect(JSON.parse(diff_position.to_json)).to eq(args.stringify_keys) + expect(Gitlab::Json.parse(diff_position.to_json)).to eq(args.stringify_keys) end it "works when nested under another hash" do - expect(JSON.parse(JSON.generate(pos: diff_position))).to eq('pos' => args.stringify_keys) + expect(Gitlab::Json.parse(Gitlab::Json.generate(pos: diff_position))).to eq('pos' => args.stringify_keys) end end diff --git a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb index 8b6a19fa2c5..45a262c0e77 100644 --- a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb +++ b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb @@ -10,7 +10,7 @@ describe Gitlab::Elasticsearch::Logs::Lines do let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } } let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", pod: "production-6866bc8974-m4sk4", message: "- -\u003e /" } } - let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) } + let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/logs_response.json')) } subject { described_class.new(client) } @@ -22,13 +22,14 @@ describe Gitlab::Elasticsearch::Logs::Lines do let(:end_time) { "2019-12-13T14:35:34.034Z" } let(:cursor) { "9999934,1572449784442" } - let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) } - let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) } - let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) } - let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) } - let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) } - let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) } - let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) } + let(:body) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query.json')) } + let(:body_with_container) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_container.json')) } + let(:body_with_search) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_search.json')) } + let(:body_with_times) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_times.json')) } + let(:body_with_start_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) } + let(:body_with_end_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) } + let(:body_with_cursor) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) } + let(:body_with_filebeat_6) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) } RSpec::Matchers.define :a_hash_equal_to_json do |expected| match do |actual| @@ -85,5 +86,12 @@ describe Gitlab::Elasticsearch::Logs::Lines do result = subject.pod_logs(namespace, pod_name: pod_name, cursor: cursor) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor) end + + it 'can search on filebeat 6' do + expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_filebeat_6)).and_return(es_response) + + result = subject.pod_logs(namespace, pod_name: pod_name, chart_above_v2: false) + expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor) + end end end diff --git a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb index 0a4ab0780c5..c2c3074e965 100644 --- a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb +++ b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb @@ -5,8 +5,8 @@ require 'spec_helper' describe Gitlab::Elasticsearch::Logs::Pods do let(:client) { Elasticsearch::Transport::Client } - let(:es_query) { JSON.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) } - let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/pods_response.json')) } + let(:es_query) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) } + let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_response.json')) } let(:namespace) { "autodevops-deploy-9-production" } subject { described_class.new(client) } diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb index 5014e4c22ce..6dbf069f07c 100644 --- a/spec/lib/gitlab/email/handler_spec.rb +++ b/spec/lib/gitlab/email/handler_spec.rb @@ -6,6 +6,18 @@ describe Gitlab::Email::Handler do let(:email) { Mail.new { body 'email' } } describe '.for' do + context 'key matches the reply_key of a notification' do + it 'picks note handler' do + expect(described_class.for(email, '1234567890abcdef1234567890abcdef')).to be_an_instance_of(Gitlab::Email::Handler::CreateNoteHandler) + end + end + + context 'key matches the reply_key of a notification, along with an unsubscribe suffix' do + it 'picks unsubscribe handler' do + expect(described_class.for(email, '1234567890abcdef1234567890abcdef-unsubscribe')).to be_an_instance_of(Gitlab::Email::Handler::UnsubscribeHandler) + end + end + it 'picks issue handler if there is no merge request prefix' do expect(described_class.for(email, 'project+key')).to be_an_instance_of(Gitlab::Email::Handler::CreateIssueHandler) end diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb index 36954252b6b..31ba48e9df1 100644 --- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb +++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb @@ -5,19 +5,24 @@ require 'spec_helper' describe Gitlab::Email::Hook::SmimeSignatureInterceptor do include SmimeHelper - # cert generation is an expensive operation and they are used read-only, + # certs generation is an expensive operation and they are used read-only, # so we share them as instance variables in all tests before :context do @root_ca = generate_root - @cert = generate_cert(root_ca: @root_ca) + @intermediate_ca = generate_intermediate(signer_ca: @root_ca) + @cert = generate_cert(signer_ca: @intermediate_ca) end let(:root_certificate) do Gitlab::Email::Smime::Certificate.new(@root_ca[:key], @root_ca[:cert]) end + let(:intermediate_certificate) do + Gitlab::Email::Smime::Certificate.new(@intermediate_ca[:key], @intermediate_ca[:cert]) + end + let(:certificate) do - Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert]) + Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert], [intermediate_certificate.cert]) end let(:mail_body) { "signed hello with Unicode €áø and\r\n newlines\r\n" } @@ -48,17 +53,19 @@ describe Gitlab::Email::Hook::SmimeSignatureInterceptor do # verify signature and obtain pkcs7 encoded content p7enc = Gitlab::Email::Smime::Signer.verify_signature( - cert: certificate.cert, - ca_cert: root_certificate.cert, + ca_certs: root_certificate.cert, signed_data: mail.encoded) + expect(p7enc).not_to be_nil + # re-verify signature from a new Mail object content # See https://gitlab.com/gitlab-org/gitlab/issues/197386 - Gitlab::Email::Smime::Signer.verify_signature( - cert: certificate.cert, - ca_cert: root_certificate.cert, + p7_re_enc = Gitlab::Email::Smime::Signer.verify_signature( + ca_certs: root_certificate.cert, signed_data: Mail.new(mail).encoded) + expect(p7_re_enc).not_to be_nil + # envelope in a Mail object and obtain the body decoded_mail = Mail.new(p7enc.data) diff --git a/spec/lib/gitlab/email/smime/certificate_spec.rb b/spec/lib/gitlab/email/smime/certificate_spec.rb index 90b27602413..07b8c1e4de1 100644 --- a/spec/lib/gitlab/email/smime/certificate_spec.rb +++ b/spec/lib/gitlab/email/smime/certificate_spec.rb @@ -9,7 +9,8 @@ describe Gitlab::Email::Smime::Certificate do # so we share them as instance variables in all tests before :context do @root_ca = generate_root - @cert = generate_cert(root_ca: @root_ca) + @intermediate_ca = generate_intermediate(signer_ca: @root_ca) + @cert = generate_cert(signer_ca: @intermediate_ca) end describe 'testing environment setup' do @@ -21,11 +22,23 @@ describe Gitlab::Email::Smime::Certificate do end end + describe 'generate_intermediate' do + subject { @intermediate_ca } + + it 'generates an intermediate CA that expires a long way in the future' do + expect(subject[:cert].not_after).to be > 999.years.from_now + end + + it 'generates an intermediate CA properly signed by the root CA' do + expect(subject[:cert].issuer).to eq(@root_ca[:cert].subject) + end + end + describe 'generate_cert' do subject { @cert } - it 'generates a cert properly signed by the root CA' do - expect(subject[:cert].issuer).to eq(@root_ca[:cert].subject) + it 'generates a cert properly signed by the intermediate CA' do + expect(subject[:cert].issuer).to eq(@intermediate_ca[:cert].subject) end it 'generates a cert that expires soon' do @@ -37,7 +50,7 @@ describe Gitlab::Email::Smime::Certificate do end context 'passing in INFINITE_EXPIRY' do - subject { generate_cert(root_ca: @root_ca, expires_in: SmimeHelper::INFINITE_EXPIRY) } + subject { generate_cert(signer_ca: @intermediate_ca, expires_in: SmimeHelper::INFINITE_EXPIRY) } it 'generates a cert that expires a long way in the future' do expect(subject[:cert].not_after).to be > 999.years.from_now @@ -50,7 +63,7 @@ describe Gitlab::Email::Smime::Certificate do it 'parses correctly a certificate and key' do parsed_cert = described_class.from_strings(@cert[:key].to_s, @cert[:cert].to_pem) - common_cert_tests(parsed_cert, @cert, @root_ca) + common_cert_tests(parsed_cert, @cert, @intermediate_ca) end end @@ -61,17 +74,43 @@ describe Gitlab::Email::Smime::Certificate do parsed_cert = described_class.from_files('a_key', 'a_cert') - common_cert_tests(parsed_cert, @cert, @root_ca) + common_cert_tests(parsed_cert, @cert, @intermediate_ca) + end + + context 'with optional ca_certs' do + it 'parses correctly certificate, key and ca_certs' do + allow(File).to receive(:read).with('a_key').and_return(@cert[:key].to_s) + allow(File).to receive(:read).with('a_cert').and_return(@cert[:cert].to_pem) + allow(File).to receive(:read).with('a_ca_cert').and_return(@intermediate_ca[:cert].to_pem) + + parsed_cert = described_class.from_files('a_key', 'a_cert', 'a_ca_cert') + + common_cert_tests(parsed_cert, @cert, @intermediate_ca, with_ca_certs: [@intermediate_ca[:cert]]) + end + end + end + + context 'with no intermediate CA' do + it 'parses correctly a certificate and key' do + cert = generate_cert(signer_ca: @root_ca) + + allow(File).to receive(:read).with('a_key').and_return(cert[:key].to_s) + allow(File).to receive(:read).with('a_cert').and_return(cert[:cert].to_pem) + + parsed_cert = described_class.from_files('a_key', 'a_cert') + + common_cert_tests(parsed_cert, cert, @root_ca) end end - def common_cert_tests(parsed_cert, cert, root_ca) + def common_cert_tests(parsed_cert, cert, signer_ca, with_ca_certs: nil) expect(parsed_cert.cert).to be_a(OpenSSL::X509::Certificate) expect(parsed_cert.cert.subject).to eq(cert[:cert].subject) - expect(parsed_cert.cert.issuer).to eq(root_ca[:cert].subject) + expect(parsed_cert.cert.issuer).to eq(signer_ca[:cert].subject) expect(parsed_cert.cert.not_before).to eq(cert[:cert].not_before) expect(parsed_cert.cert.not_after).to eq(cert[:cert].not_after) expect(parsed_cert.cert.extensions).to include(an_object_having_attributes(oid: 'extendedKeyUsage', value: match('E-mail Protection'))) expect(parsed_cert.key).to be_a(OpenSSL::PKey::RSA) + expect(parsed_cert.ca_certs).to match_array(Array.wrap(with_ca_certs)) if with_ca_certs end end diff --git a/spec/lib/gitlab/email/smime/signer_spec.rb b/spec/lib/gitlab/email/smime/signer_spec.rb index 56048b7148c..d891b86da08 100644 --- a/spec/lib/gitlab/email/smime/signer_spec.rb +++ b/spec/lib/gitlab/email/smime/signer_spec.rb @@ -5,22 +5,39 @@ require 'spec_helper' describe Gitlab::Email::Smime::Signer do include SmimeHelper - it 'signs data appropriately with SMIME' do - root_certificate = generate_root - certificate = generate_cert(root_ca: root_certificate) + let_it_be(:root_ca) { generate_root } + let_it_be(:intermediate_ca) { generate_intermediate(signer_ca: root_ca) } + context 'when using an intermediate CA' do + it 'signs data appropriately with SMIME' do + cert = generate_cert(signer_ca: intermediate_ca) + + sign_and_verify('signed content', cert[:cert], cert[:key], root_ca[:cert], ca_certs: intermediate_ca[:cert]) + end + end + + context 'when not using an intermediate CA' do + it 'signs data appropriately with SMIME' do + cert = generate_cert(signer_ca: root_ca) + + sign_and_verify('signed content', cert[:cert], cert[:key], root_ca[:cert]) + end + end + + def sign_and_verify(data, cert, key, root_ca_cert, ca_certs: nil) signed_content = described_class.sign( - cert: certificate[:cert], - key: certificate[:key], - data: 'signed content') + cert: cert, + key: key, + ca_certs: ca_certs, + data: data) + expect(signed_content).not_to be_nil p7enc = described_class.verify_signature( - cert: certificate[:cert], - ca_cert: root_certificate[:cert], + ca_certs: root_ca_cert, signed_data: signed_content) expect(p7enc).not_to be_nil - expect(p7enc.data).to eq('signed content') + expect(p7enc.data).to eq(data) end end diff --git a/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb new file mode 100644 index 00000000000..8917eeec56f --- /dev/null +++ b/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ExclusiveLeaseHelpers::SleepingLock, :clean_gitlab_redis_shared_state do + include ::ExclusiveLeaseHelpers + + let(:timeout) { 1.second } + let(:delay) { 0.1.seconds } + let(:key) { SecureRandom.hex(10) } + + subject { described_class.new(key, timeout: timeout, delay: delay) } + + describe '#retried?' do + before do + stub_exclusive_lease(key, 'uuid') + end + + context 'we have not made any attempts' do + it { is_expected.not_to be_retried } + end + + context 'we just made a single (initial) attempt' do + it 'is not considered a retry' do + subject.send(:try_obtain) + + is_expected.not_to be_retried + end + end + + context 'made multiple attempts' do + it 'is considered a retry' do + 2.times { subject.send(:try_obtain) } + + is_expected.to be_retried + end + end + end + + describe '#obtain' do + context 'when the lease is not held' do + before do + stub_exclusive_lease(key, 'uuid') + end + + it 'obtains the lease on the first attempt, without sleeping' do + expect(subject).not_to receive(:sleep) + + subject.obtain(10) + + expect(subject).not_to be_retried + end + end + + context 'when the lease is held elsewhere' do + let!(:lease) { stub_exclusive_lease_taken(key) } + let(:max_attempts) { 7 } + + it 'retries to obtain a lease and raises an error' do + expect(subject).to receive(:sleep).with(delay).exactly(max_attempts - 1).times + expect(lease).to receive(:try_obtain).exactly(max_attempts).times + + expect { subject.obtain(max_attempts) }.to raise_error('Failed to obtain a lock') + end + + context 'when the delay is computed from the attempt number' do + let(:delay) { ->(n) { 3 * n } } + + it 'uses the computation to determine the sleep length' do + expect(subject).to receive(:sleep).with(3).once + expect(subject).to receive(:sleep).with(6).once + expect(subject).to receive(:sleep).with(9).once + expect(lease).to receive(:try_obtain).exactly(4).times + + expect { subject.obtain(4) }.to raise_error('Failed to obtain a lock') + end + end + + context 'when lease is granted after retry' do + it 'knows that it retried' do + expect(subject).to receive(:sleep).with(delay).exactly(3).times + expect(lease).to receive(:try_obtain).exactly(3).times { nil } + expect(lease).to receive(:try_obtain).once { 'obtained' } + + subject.obtain(max_attempts) + + expect(subject).to be_retried + end + end + end + + describe 'cancel' do + let!(:lease) { stub_exclusive_lease(key, 'uuid') } + + it 'cancels the lease' do + expect(lease).to receive(:cancel) + + subject.cancel + end + end + end +end diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb index 747fe369c78..9914518cda5 100644 --- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb +++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb @@ -22,9 +22,7 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do end context 'when the lease is not obtained yet' do - before do - stub_exclusive_lease(unique_key, 'uuid') - end + let!(:lease) { stub_exclusive_lease(unique_key, 'uuid') } it 'calls the given block' do expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false) @@ -37,7 +35,7 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do end it 'cancels the exclusive lease after the block' do - expect_to_cancel_exclusive_lease(unique_key, 'uuid') + expect(lease).to receive(:cancel).once subject end @@ -81,11 +79,32 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do end end + context 'when we specify no retries' do + let(:options) { { retries: 0 } } + + it 'never sleeps' do + expect(class_instance).not_to receive(:sleep) + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + context 'when sleep second is specified' do - let(:options) { { retries: 0, sleep_sec: 0.05.seconds } } + let(:options) { { retries: 1, sleep_sec: 0.05.seconds } } + + it 'receives the specified argument' do + expect_any_instance_of(Object).to receive(:sleep).with(0.05.seconds).once + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + + context 'when sleep second is specified as a lambda' do + let(:options) { { retries: 2, sleep_sec: ->(num) { 0.1 + num } } } it 'receives the specified argument' do - expect(class_instance).to receive(:sleep).with(0.05.seconds).once + expect_any_instance_of(Object).to receive(:sleep).with(1.1.seconds).once + expect_any_instance_of(Object).to receive(:sleep).with(2.1.seconds).once expect { subject }.to raise_error('Failed to obtain a lock') end diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb index 0739f622af5..2c0bb23a0b6 100644 --- a/spec/lib/gitlab/exclusive_lease_spec.rb +++ b/spec/lib/gitlab/exclusive_lease_spec.rb @@ -21,6 +21,27 @@ describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do end end + describe '.redis_shared_state_key' do + it 'provides a namespaced key' do + expect(described_class.redis_shared_state_key(unique_key)) + .to start_with(described_class::PREFIX) + .and include(unique_key) + end + end + + describe '.ensure_prefixed_key' do + it 'does not double prefix a key' do + prefixed = described_class.redis_shared_state_key(unique_key) + + expect(described_class.ensure_prefixed_key(unique_key)) + .to eq(described_class.ensure_prefixed_key(prefixed)) + end + + it 'raises errors when there is no key' do + expect { described_class.ensure_prefixed_key(nil) }.to raise_error(described_class::NoKey) + end + end + describe '#renew' do it 'returns true when we have the existing lease' do lease = described_class.new(unique_key, timeout: 3600) @@ -61,18 +82,61 @@ describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do end end - describe '.cancel' do - it 'can cancel a lease' do - uuid = new_lease(unique_key) - expect(uuid).to be_present - expect(new_lease(unique_key)).to eq(false) + describe 'cancellation' do + def new_lease(key) + described_class.new(key, timeout: 3600) + end - described_class.cancel(unique_key, uuid) - expect(new_lease(unique_key)).to be_present + shared_examples 'cancelling a lease' do + let(:lease) { new_lease(unique_key) } + + it 'releases the held lease' do + uuid = lease.try_obtain + expect(uuid).to be_present + expect(new_lease(unique_key).try_obtain).to eq(false) + + cancel_lease(uuid) + + expect(new_lease(unique_key).try_obtain).to be_present + end end - def new_lease(key) - described_class.new(key, timeout: 3600).try_obtain + describe '.cancel' do + def cancel_lease(uuid) + described_class.cancel(release_key, uuid) + end + + context 'when called with the unprefixed key' do + it_behaves_like 'cancelling a lease' do + let(:release_key) { unique_key } + end + end + + context 'when called with the prefixed key' do + it_behaves_like 'cancelling a lease' do + let(:release_key) { described_class.redis_shared_state_key(unique_key) } + end + end + + it 'does not raise errors when given a nil key' do + expect { described_class.cancel(nil, nil) }.not_to raise_error + end + end + + describe '#cancel' do + def cancel_lease(_uuid) + lease.cancel + end + + it_behaves_like 'cancelling a lease' + + it 'is safe to call even if the lease was never obtained' do + lease = new_lease(unique_key) + + lease.cancel + + expect(new_lease(unique_key).try_obtain).to be_present + end end end diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb index a39c50ab038..99442cb0ca6 100644 --- a/spec/lib/gitlab/experimentation_spec.rb +++ b/spec/lib/gitlab/experimentation_spec.rb @@ -6,19 +6,16 @@ describe Gitlab::Experimentation do before do stub_const('Gitlab::Experimentation::EXPERIMENTS', { test_experiment: { - feature_toggle: feature_toggle, environment: environment, - enabled_ratio: enabled_ratio, tracking_category: 'Team' } }) - stub_feature_flags(feature_toggle => true) + allow(Feature).to receive(:get).with(:test_experiment_experiment_percentage).and_return double(percentage_of_time_value: enabled_percentage) end - let(:feature_toggle) { :test_experiment_toggle } let(:environment) { Rails.env.test? } - let(:enabled_ratio) { 0.1 } + let(:enabled_percentage) { 10 } describe Gitlab::Experimentation::ControllerConcern, type: :controller do controller(ApplicationController) do @@ -251,44 +248,16 @@ describe Gitlab::Experimentation do end end - describe 'feature toggle' do - context 'feature toggle is not set' do - let(:feature_toggle) { nil } + describe 'experiment is disabled' do + let(:enabled_percentage) { 0 } - it { is_expected.to be_truthy } - end - - context 'feature toggle is not set, but a feature with the experiment key as name does exist' do - before do - stub_feature_flags(test_experiment: false) - end - - let(:feature_toggle) { nil } - - it { is_expected.to be_falsey } - end - - context 'feature toggle is disabled' do - before do - stub_feature_flags(feature_toggle => false) - end - - it { is_expected.to be_falsey } - end + it { is_expected.to be_falsey } end - describe 'environment' do - context 'environment is not set' do - let(:environment) { nil } - - it { is_expected.to be_truthy } - end - - context 'we are on the wrong environment' do - let(:environment) { ::Gitlab.com? } + describe 'we are on the wrong environment' do + let(:environment) { ::Gitlab.com? } - it { is_expected.to be_falsey } - end + it { is_expected.to be_falsey } end end @@ -312,12 +281,6 @@ describe Gitlab::Experimentation do it { is_expected.to be_truthy } - context 'enabled ratio is not set' do - let(:enabled_ratio) { nil } - - it { is_expected.to be_falsey } - end - describe 'experimentation_subject_index' do context 'experimentation_subject_index is not set' do let(:experimentation_subject_index) { nil } diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb index 084dde1f93f..335135696ef 100644 --- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb +++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb @@ -147,6 +147,18 @@ describe Gitlab::Gfm::ReferenceRewriter do it { is_expected.to eq text } end + context 'when referring to a group' do + let(:text) { "group @#{group.full_path}" } + + it { is_expected.to eq text } + end + + context 'when referring to a user' do + let(:text) { "user @#{user.full_path}" } + + it { is_expected.to eq text } + end + context 'when referable has a nil reference' do before do create(:milestone, title: '9.0', project: old_project) diff --git a/spec/lib/gitlab/git/attributes_parser_spec.rb b/spec/lib/gitlab/git/attributes_parser_spec.rb index 94b7a086e59..45db4acd3ac 100644 --- a/spec/lib/gitlab/git/attributes_parser_spec.rb +++ b/spec/lib/gitlab/git/attributes_parser_spec.rb @@ -75,6 +75,14 @@ describe Gitlab::Git::AttributesParser, :seed_helper do expect(subject.attributes('test.foo')).to eq({}) end end + + context 'when attributes data has binary data' do + let(:data) { "\xFF\xFE*\u0000.\u0000c\u0000s".b } + + it 'returns an empty Hash' do + expect(subject.attributes('test.foo')).to eq({}) + end + end end describe '#patterns' do diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb index 06f9767d58b..46d9b78c14b 100644 --- a/spec/lib/gitlab/git/blob_spec.rb +++ b/spec/lib/gitlab/git/blob_spec.rb @@ -652,4 +652,16 @@ describe Gitlab::Git::Blob, :seed_helper do expect(described_class).to respond_to(:gitlab_blob_size) end end + + describe '#lines' do + context 'when the encoding cannot be detected' do + it 'successfully splits the data' do + data = "test\nblob" + blob = Gitlab::Git::Blob.new(name: 'test', size: data.bytesize, data: data) + expect(blob).to receive(:ruby_encoding) { nil } + + expect(blob.lines).to eq(data.split("\n")) + end + end + end end diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb index c2fc228d34a..edd367673fb 100644 --- a/spec/lib/gitlab/git/commit_spec.rb +++ b/spec/lib/gitlab/git/commit_spec.rb @@ -161,6 +161,26 @@ describe Gitlab::Git::Commit, :seed_helper do expect(described_class.find(repository, "+123_4532530XYZ")).to be_nil end + it "returns nil for id started with dash" do + expect(described_class.find(repository, "-HEAD")).to be_nil + end + + it "returns nil for id containing colon" do + expect(described_class.find(repository, "HEAD:")).to be_nil + end + + it "returns nil for id containing space" do + expect(described_class.find(repository, "HE AD")).to be_nil + end + + it "returns nil for id containing tab" do + expect(described_class.find(repository, "HE\tAD")).to be_nil + end + + it "returns nil for id containing NULL" do + expect(described_class.find(repository, "HE\x00AD")).to be_nil + end + context 'with broken repo' do let(:repository) { Gitlab::Git::Repository.new('default', TEST_BROKEN_REPO_PATH, '', 'group/project') } diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb index 87db3f588ad..6d3b239c38f 100644 --- a/spec/lib/gitlab/git/tag_spec.rb +++ b/spec/lib/gitlab/git/tag_spec.rb @@ -13,6 +13,13 @@ describe Gitlab::Git::Tag, :seed_helper do it { expect(tag.target).to eq("f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8") } it { expect(tag.dereferenced_target.sha).to eq("6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9") } it { expect(tag.message).to eq("Release") } + it { expect(tag.has_signature?).to be_falsey } + it { expect(tag.signature_type).to eq(:NONE) } + it { expect(tag.signature).to be_nil } + it { expect(tag.tagger.name).to eq("Dmitriy Zaporozhets") } + it { expect(tag.tagger.email).to eq("dmitriy.zaporozhets@gmail.com") } + it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1393491299)) } + it { expect(tag.tagger.timezone).to eq("+0200") } end describe 'last tag' do @@ -22,6 +29,29 @@ describe Gitlab::Git::Tag, :seed_helper do it { expect(tag.target).to eq("2ac1f24e253e08135507d0830508febaaccf02ee") } it { expect(tag.dereferenced_target.sha).to eq("fa1b1e6c004a68b7d8763b86455da9e6b23e36d6") } it { expect(tag.message).to eq("Version 1.2.1") } + it { expect(tag.has_signature?).to be_falsey } + it { expect(tag.signature_type).to eq(:NONE) } + it { expect(tag.signature).to be_nil } + it { expect(tag.tagger.name).to eq("Douwe Maan") } + it { expect(tag.tagger.email).to eq("douwe@selenight.nl") } + it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1427789449)) } + it { expect(tag.tagger.timezone).to eq("+0200") } + end + + describe 'signed tag' do + let(:project) { create(:project, :repository) } + let(:tag) { project.repository.find_tag('v1.1.1') } + + it { expect(tag.target).to eq("8f03acbcd11c53d9c9468078f32a2622005a4841") } + it { expect(tag.dereferenced_target.sha).to eq("189a6c924013fc3fe40d6f1ec1dc20214183bc97") } + it { expect(tag.message).to eq("x509 signed tag" + "\n" + X509Helpers::User1.signed_tag_signature.chomp) } + it { expect(tag.has_signature?).to be_truthy } + it { expect(tag.signature_type).to eq(:X509) } + it { expect(tag.signature).not_to be_nil } + it { expect(tag.tagger.name).to eq("Roger Meier") } + it { expect(tag.tagger.email).to eq("r.meier@siemens.com") } + it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1574261780)) } + it { expect(tag.tagger.timezone).to eq("+0100") } end it { expect(repository.tags.size).to eq(SeedRepo::Repo::TAGS.size) } diff --git a/spec/lib/gitlab/git_access_design_spec.rb b/spec/lib/gitlab/git_access_design_spec.rb new file mode 100644 index 00000000000..d816608f7e5 --- /dev/null +++ b/spec/lib/gitlab/git_access_design_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true +require 'spec_helper' + +describe Gitlab::GitAccessDesign do + include DesignManagementTestHelpers + + let_it_be(:project) { create(:project) } + let_it_be(:user) { project.owner } + let(:protocol) { 'web' } + let(:actor) { user } + + subject(:access) do + described_class.new(actor, project, protocol, authentication_abilities: [:read_project, :download_code, :push_code]) + end + + describe '#check' do + subject { access.check('git-receive-pack', ::Gitlab::GitAccess::ANY) } + + before do + enable_design_management + end + + context 'when the user is allowed to manage designs' do + it do + is_expected.to be_a(::Gitlab::GitAccessResult::Success) + end + end + + context 'when the user is not allowed to manage designs' do + let_it_be(:user) { create(:user) } + + it 'raises an error' do + expect { subject }.to raise_error(::Gitlab::GitAccess::ForbiddenError) + end + end + + context 'when the protocol is not web' do + let(:protocol) { 'https' } + + it 'raises an error' do + expect { subject }.to raise_error(::Gitlab::GitAccess::ForbiddenError) + end + end + end +end diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb index bbc3808df12..48b425a8ec5 100644 --- a/spec/lib/gitlab/git_access_snippet_spec.rb +++ b/spec/lib/gitlab/git_access_snippet_spec.rb @@ -11,8 +11,9 @@ describe Gitlab::GitAccessSnippet do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public) } let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) } - let(:repository) { snippet.repository } + let_it_be(:migration_bot) { User.migration_bot } + let(:repository) { snippet.repository } let(:actor) { user } let(:protocol) { 'ssh' } let(:changes) { Gitlab::GitAccess::ANY } @@ -27,20 +28,19 @@ describe Gitlab::GitAccessSnippet do let(:actor) { build(:deploy_key) } it 'does not allow push and pull access' do + expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:authentication_mechanism]) expect { pull_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:authentication_mechanism]) end end - describe 'when feature flag :version_snippets is disabled' do - let(:user) { snippet.author } - - before do - stub_feature_flags(version_snippets: false) - end + shared_examples 'actor is migration bot' do + context 'when user is the migration bot' do + let(:user) { migration_bot } - it 'allows push and pull access' do - expect { pull_access_check }.not_to raise_error - expect { push_access_check }.not_to raise_error + it 'can perform git operations' do + expect { push_access_check }.not_to raise_error + expect { pull_access_check }.not_to raise_error + end end end @@ -90,6 +90,12 @@ describe Gitlab::GitAccessSnippet do expect { push_access_check }.not_to raise_error expect { pull_access_check }.not_to raise_error end + + it_behaves_like 'actor is migration bot' do + before do + expect(migration_bot.required_terms_not_accepted?).to be_truthy + end + end end context 'project snippet accessibility', :aggregate_failures do @@ -120,6 +126,7 @@ describe Gitlab::GitAccessSnippet do context 'when project is public' do it_behaves_like 'checks accessibility' + it_behaves_like 'actor is migration bot' end context 'when project is public but snippet feature is private' do @@ -130,6 +137,7 @@ describe Gitlab::GitAccessSnippet do end it_behaves_like 'checks accessibility' + it_behaves_like 'actor is migration bot' end context 'when project is not accessible' do @@ -140,11 +148,58 @@ describe Gitlab::GitAccessSnippet do let(:membership) { membership } it 'respects accessibility' do - expect { push_access_check }.to raise_error(described_class::NotFoundError) - expect { pull_access_check }.to raise_error(described_class::NotFoundError) + expect { push_access_check }.to raise_snippet_not_found + expect { pull_access_check }.to raise_snippet_not_found + end + end + end + + it_behaves_like 'actor is migration bot' + end + + context 'when project is archived' do + let(:project) { create(:project, :public, :archived) } + + [:anonymous, :non_member].each do |membership| + context membership.to_s do + let(:membership) { membership } + + it 'cannot perform git operations' do + expect { push_access_check }.to raise_error(described_class::ForbiddenError) + expect { pull_access_check }.to raise_error(described_class::ForbiddenError) + end + end + end + + [:guest, :reporter, :maintainer, :author, :admin].each do |membership| + context membership.to_s do + let(:membership) { membership } + + it 'cannot perform git pushes' do + expect { push_access_check }.to raise_error(described_class::ForbiddenError) + expect { pull_access_check }.not_to raise_error + end + end + end + + it_behaves_like 'actor is migration bot' + end + + context 'when snippet feature is disabled' do + let(:project) { create(:project, :public, :snippets_disabled) } + + [:anonymous, :non_member, :author, :admin].each do |membership| + context membership.to_s do + let(:membership) { membership } + + it 'cannot perform git operations' do + expect { push_access_check }.to raise_error(described_class::ForbiddenError) + expect { pull_access_check }.to raise_error(described_class::ForbiddenError) end end end + + it_behaves_like 'actor is migration bot' end end @@ -172,6 +227,8 @@ describe Gitlab::GitAccessSnippet do expect { pull_access_check }.to raise_error(error_class) end end + + it_behaves_like 'actor is migration bot' end end @@ -179,36 +236,66 @@ describe Gitlab::GitAccessSnippet do let(:user) { snippet.author } let!(:primary_node) { FactoryBot.create(:geo_node, :primary) } - # Without override, push access would return Gitlab::GitAccessResult::CustomAction - it 'skips geo for snippet' do + before do allow(::Gitlab::Database).to receive(:read_only?).and_return(true) allow(::Gitlab::Geo).to receive(:secondary_with_primary?).and_return(true) + end + # Without override, push access would return Gitlab::GitAccessResult::CustomAction + it 'skips geo for snippet' do expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/) end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'skips geo for snippet' do + expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/) + end + end end context 'when changes are specific' do let(:changes) { "2d1db523e11e777e49377cfb22d368deec3f0793 ddd0f15ae83993f5cb66a927a28673882e99100b master" } let(:user) { snippet.author } - it 'does not raise error if SnippetCheck does not raise error' do - expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check| - expect(check).to receive(:validate!).and_call_original + shared_examples 'snippet checks' do + it 'does not raise error if SnippetCheck does not raise error' do + expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check| + expect(check).to receive(:validate!).and_call_original + end + expect_next_instance_of(Gitlab::Checks::PushFileCountCheck) do |check| + expect(check).to receive(:validate!) + end + + expect { push_access_check }.not_to raise_error end - expect_next_instance_of(Gitlab::Checks::PushFileCountCheck) do |check| - expect(check).to receive(:validate!) + + it 'raises error if SnippetCheck raises error' do + expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check| + allow(check).to receive(:validate!).and_raise(Gitlab::GitAccess::ForbiddenError, 'foo') + end + + expect { push_access_check }.to raise_forbidden('foo') end - expect { push_access_check }.not_to raise_error - end + it 'sets the file count limit from Snippet class' do + service = double - it 'raises error if SnippetCheck raises error' do - expect_next_instance_of(Gitlab::Checks::SnippetCheck) do |check| - allow(check).to receive(:validate!).and_raise(Gitlab::GitAccess::ForbiddenError, 'foo') + expect(service).to receive(:validate!).and_return(nil) + expect(Snippet).to receive(:max_file_limit).with(user).and_return(5) + expect(Gitlab::Checks::PushFileCountCheck).to receive(:new).with(anything, hash_including(limit: 5)).and_return(service) + + push_access_check end + end + + it_behaves_like 'snippet checks' - expect { push_access_check }.to raise_forbidden('foo') + context 'when user is migration bot' do + let(:user) { migration_bot } + + it_behaves_like 'snippet checks' end end @@ -221,6 +308,16 @@ describe Gitlab::GitAccessSnippet do let(:ref) { "refs/heads/snippet/edit-file" } let(:changes) { "#{oldrev} #{newrev} #{ref}" } + shared_examples 'migration bot does not err' do + let(:actor) { migration_bot } + + it 'does not err' do + expect(snippet.repository_size_checker).not_to receive(:above_size_limit?) + + expect { push_access_check }.not_to raise_error + end + end + shared_examples_for 'a push to repository already over the limit' do it 'errs' do expect(snippet.repository_size_checker).to receive(:above_size_limit?).and_return(true) @@ -229,6 +326,8 @@ describe Gitlab::GitAccessSnippet do push_access_check end.to raise_error(described_class::ForbiddenError, /Your push has been rejected/) end + + it_behaves_like 'migration bot does not err' end shared_examples_for 'a push to repository below the limit' do @@ -241,6 +340,8 @@ describe Gitlab::GitAccessSnippet do expect { push_access_check }.not_to raise_error end + + it_behaves_like 'migration bot does not err' end shared_examples_for 'a push to repository to make it over the limit' do @@ -255,6 +356,8 @@ describe Gitlab::GitAccessSnippet do push_access_check end.to raise_error(described_class::ForbiddenError, /Your push to this repository would cause it to exceed the size limit/) end + + it_behaves_like 'migration bot does not err' end context 'when GIT_OBJECT_DIRECTORY_RELATIVE env var is set' do diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb index b5e673c9e79..e42570804a8 100644 --- a/spec/lib/gitlab/git_access_wiki_spec.rb +++ b/spec/lib/gitlab/git_access_wiki_spec.rb @@ -52,14 +52,10 @@ describe Gitlab::GitAccessWiki do end context 'when the wiki repository does not exist' do - it 'returns not found' do - wiki_repo = project.wiki.repository - Gitlab::GitalyClient::StorageSettings.allow_disk_access do - FileUtils.rm_rf(wiki_repo.path) - end + let(:project) { create(:project) } - # Sanity check for rm_rf - expect(wiki_repo.exists?).to eq(false) + it 'returns not found' do + expect(project.wiki_repository_exists?).to eq(false) expect { subject }.to raise_error(Gitlab::GitAccess::NotFoundError, 'A repository for this project does not exist yet.') end diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb index 6185b068d4c..bf6df55b71e 100644 --- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb +++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb @@ -7,6 +7,7 @@ describe Gitlab::GlRepository::RepoType do let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) } let(:project_path) { project.repository.full_path } let(:wiki_path) { project.wiki.repository.full_path } + let(:design_path) { project.design_repository.full_path } let(:personal_snippet_path) { "snippets/#{personal_snippet.id}" } let(:project_snippet_path) { "#{project.full_path}/snippets/#{project_snippet.id}" } @@ -24,6 +25,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class).not_to be_wiki expect(described_class).to be_project expect(described_class).not_to be_snippet + expect(described_class).not_to be_design end end @@ -33,6 +35,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class.valid?(wiki_path)).to be_truthy expect(described_class.valid?(personal_snippet_path)).to be_truthy expect(described_class.valid?(project_snippet_path)).to be_truthy + expect(described_class.valid?(design_path)).to be_truthy end end end @@ -51,6 +54,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class).to be_wiki expect(described_class).not_to be_project expect(described_class).not_to be_snippet + expect(described_class).not_to be_design end end @@ -60,6 +64,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class.valid?(wiki_path)).to be_truthy expect(described_class.valid?(personal_snippet_path)).to be_falsey expect(described_class.valid?(project_snippet_path)).to be_falsey + expect(described_class.valid?(design_path)).to be_falsey end end end @@ -79,6 +84,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class).to be_snippet expect(described_class).not_to be_wiki expect(described_class).not_to be_project + expect(described_class).not_to be_design end end @@ -88,6 +94,7 @@ describe Gitlab::GlRepository::RepoType do expect(described_class.valid?(wiki_path)).to be_falsey expect(described_class.valid?(personal_snippet_path)).to be_truthy expect(described_class.valid?(project_snippet_path)).to be_truthy + expect(described_class.valid?(design_path)).to be_falsey end end end @@ -115,8 +122,38 @@ describe Gitlab::GlRepository::RepoType do expect(described_class.valid?(wiki_path)).to be_falsey expect(described_class.valid?(personal_snippet_path)).to be_truthy expect(described_class.valid?(project_snippet_path)).to be_truthy + expect(described_class.valid?(design_path)).to be_falsey end end end end + + describe Gitlab::GlRepository::DESIGN do + it_behaves_like 'a repo type' do + let(:expected_identifier) { "design-#{project.id}" } + let(:expected_id) { project.id.to_s } + let(:expected_suffix) { '.design' } + let(:expected_repository) { project.design_repository } + let(:expected_container) { project } + end + + it 'knows its type' do + aggregate_failures do + expect(described_class).to be_design + expect(described_class).not_to be_project + expect(described_class).not_to be_wiki + expect(described_class).not_to be_snippet + end + end + + it 'checks if repository path is valid' do + aggregate_failures do + expect(described_class.valid?(design_path)).to be_truthy + expect(described_class.valid?(project_path)).to be_falsey + expect(described_class.valid?(wiki_path)).to be_falsey + expect(described_class.valid?(personal_snippet_path)).to be_falsey + expect(described_class.valid?(project_snippet_path)).to be_falsey + end + end + end end diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb index 858f436047e..5f5244b7116 100644 --- a/spec/lib/gitlab/gl_repository_spec.rb +++ b/spec/lib/gitlab/gl_repository_spec.rb @@ -19,6 +19,10 @@ describe ::Gitlab::GlRepository do expect(described_class.parse("snippet-#{snippet.id}")).to eq([snippet, nil, Gitlab::GlRepository::SNIPPET]) end + it 'parses a design gl_repository' do + expect(described_class.parse("design-#{project.id}")).to eq([project, project, Gitlab::GlRepository::DESIGN]) + end + it 'throws an argument error on an invalid gl_repository type' do expect { described_class.parse("badformat-#{project.id}") }.to raise_error(ArgumentError) end @@ -27,4 +31,15 @@ describe ::Gitlab::GlRepository do expect { described_class.parse("project-foo") }.to raise_error(ArgumentError) end end + + describe 'DESIGN' do + it 'uses the design access checker' do + expect(described_class::DESIGN.access_checker_class).to eq(::Gitlab::GitAccessDesign) + end + + it 'builds a design repository' do + expect(described_class::DESIGN.repository_resolver.call(create(:project))) + .to be_a(::DesignManagement::Repository) + end + end end diff --git a/spec/lib/gitlab/google_code_import/client_spec.rb b/spec/lib/gitlab/google_code_import/client_spec.rb index 2e929a62ebc..fb1c7085017 100644 --- a/spec/lib/gitlab/google_code_import/client_spec.rb +++ b/spec/lib/gitlab/google_code_import/client_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" describe Gitlab::GoogleCodeImport::Client do - let(:raw_data) { JSON.parse(fixture_file("GoogleCodeProjectHosting.json")) } + let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) } subject { described_class.new(raw_data) } diff --git a/spec/lib/gitlab/google_code_import/importer_spec.rb b/spec/lib/gitlab/google_code_import/importer_spec.rb index 7055df89c09..3118671bb5e 100644 --- a/spec/lib/gitlab/google_code_import/importer_spec.rb +++ b/spec/lib/gitlab/google_code_import/importer_spec.rb @@ -4,7 +4,7 @@ require "spec_helper" describe Gitlab::GoogleCodeImport::Importer do let(:mapped_user) { create(:user, username: "thilo123") } - let(:raw_data) { JSON.parse(fixture_file("GoogleCodeProjectHosting.json")) } + let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) } let(:client) { Gitlab::GoogleCodeImport::Client.new(raw_data) } let(:import_data) do { diff --git a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb index d3b108f60ff..84f23bb2ad9 100644 --- a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb +++ b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb @@ -30,7 +30,7 @@ describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do } end let(:time) { Time.now } - let(:result) { JSON.parse(subject) } + let(:result) { Gitlab::Json.parse(subject) } subject { described_class.new.call(:info, time, nil, log_entry) } diff --git a/spec/lib/gitlab/grape_logging/loggers/cloudflare_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/cloudflare_logger_spec.rb new file mode 100644 index 00000000000..922a433d7ac --- /dev/null +++ b/spec/lib/gitlab/grape_logging/loggers/cloudflare_logger_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::GrapeLogging::Loggers::CloudflareLogger do + subject { described_class.new } + + describe "#parameters" do + let(:mock_request) { ActionDispatch::Request.new({}) } + let(:start_time) { Time.new(2018, 01, 01) } + + describe 'with no Cloudflare headers' do + it 'returns an empty hash' do + expect(subject.parameters(mock_request, nil)).to eq({}) + end + end + + describe 'with Cloudflare headers' do + before do + mock_request.headers['Cf-Ray'] = SecureRandom.hex + mock_request.headers['Cf-Request-Id'] = SecureRandom.hex + end + + it 'returns the correct duration in seconds' do + data = subject.parameters(mock_request, nil) + + expect(data.keys).to contain_exactly(:cf_ray, :cf_request_id) + end + end + end +end diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb index c9021e2f436..cc9535d4d2c 100644 --- a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb +++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb @@ -3,14 +3,73 @@ require 'spec_helper' describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do - subject { described_class.new } - let(:mock_request) { OpenStruct.new(env: {}) } + let(:response_body) { nil } describe ".parameters" do + subject { described_class.new.parameters(mock_request, response_body) } + describe 'when no exception is available' do it 'returns an empty hash' do - expect(subject.parameters(mock_request, nil)).to eq({}) + expect(subject).to eq({}) + end + end + + describe 'with a response' do + before do + mock_request.env[::API::Helpers::API_RESPONSE_STATUS_CODE] = code + end + + context 'with a String response' do + let(:response_body) { { message: "something went wrong" }.to_json } + let(:code) { 400 } + let(:expected) { { api_error: [response_body.to_s] } } + + it 'logs the response body' do + expect(subject).to eq(expected) + end + end + + context 'with an Array response' do + let(:response_body) { ["hello world", 1] } + let(:code) { 400 } + let(:expected) { { api_error: ["hello world", "1"] } } + + it 'casts all elements to strings' do + expect(subject).to eq(expected) + end + end + + # Rack v2.0.9 can return a BodyProxy. This was changed in later versions: + # https://github.com/rack/rack/blob/2.0.9/lib/rack/response.rb#L69 + context 'with a Rack BodyProxy response' do + let(:message) { { message: "something went wrong" }.to_json } + let(:response) { Rack::Response.new(message, code, {}) } + let(:response_body) { Rack::BodyProxy.new(response) } + let(:code) { 400 } + let(:expected) { { api_error: [message] } } + + it 'logs the response body' do + expect(subject).to eq(expected) + end + end + + context 'unauthorized error' do + let(:response_body) { 'unauthorized' } + let(:code) { 401 } + + it 'does not log an api_error field' do + expect(subject).not_to have_key(:api_error) + end + end + + context 'HTTP success' do + let(:response_body) { 'success' } + let(:code) { 200 } + + it 'does not log an api_error field' do + expect(subject).not_to have_key(:api_error) + end end end @@ -32,7 +91,7 @@ describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do end it 'returns the correct fields' do - expect(subject.parameters(mock_request, nil)).to eq(expected) + expect(subject).to eq(expected) end context 'with backtrace' do @@ -43,7 +102,7 @@ describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do end it 'includes the backtrace' do - expect(subject.parameters(mock_request, nil)).to eq(expected) + expect(subject).to eq(expected) end end end diff --git a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb index 98659dbed57..c1dab5feb91 100644 --- a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb +++ b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb @@ -84,6 +84,16 @@ describe Gitlab::Graphql::Authorize::AuthorizeFieldService do end end + context 'when the field is a connection' do + context 'when it resolves to nil' do + let(:field) { type_with_field(Types::QueryType.connection_type, :read_field, nil).fields['testField'].to_graphql } + + it 'does not fail when authorizing' do + expect(resolved).to be_nil + end + end + end + context 'when the field is a specific type' do let(:custom_type) { type(:read_type) } let(:object_in_field) { double('presented in field') } diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb index fdacecbaca6..ba77bc95bb5 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb @@ -9,6 +9,14 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)} let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) } + before do + stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base)) + NoPrimaryKey.class_eval do + self.table_name = 'no_primary_key' + self.primary_key = nil + end + end + subject(:connection) do described_class.new(nodes, { context: context, max_page_size: 3 }.merge(arguments)) end @@ -18,7 +26,7 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do end def decoded_cursor(cursor) - JSON.parse(Base64Bp.urlsafe_decode64(cursor)) + Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor)) end describe '#cursor_for' do @@ -303,9 +311,4 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do end end end - - class NoPrimaryKey < ActiveRecord::Base - self.table_name = 'no_primary_key' - self.primary_key = nil - end end diff --git a/spec/lib/gitlab/graphql_logger_spec.rb b/spec/lib/gitlab/graphql_logger_spec.rb index 4977f98b83e..12cb56c78c1 100644 --- a/spec/lib/gitlab/graphql_logger_spec.rb +++ b/spec/lib/gitlab/graphql_logger_spec.rb @@ -23,18 +23,18 @@ describe Gitlab::GraphqlLogger do variables: {}, complexity: 181, depth: 0, - duration: 7 + duration_s: 7 } output = subject.format_message('INFO', now, 'test', analyzer_memo) - data = JSON.parse(output) + data = Gitlab::Json.parse(output) expect(data['severity']).to eq('INFO') expect(data['time']).to eq(now.utc.iso8601(3)) expect(data['complexity']).to eq(181) expect(data['variables']).to eq({}) expect(data['depth']).to eq(0) - expect(data['duration']).to eq(7) + expect(data['duration_s']).to eq(7) end end end diff --git a/spec/lib/gitlab/health_checks/master_check_spec.rb b/spec/lib/gitlab/health_checks/master_check_spec.rb index cb20c1188af..dcfc733d5ad 100644 --- a/spec/lib/gitlab/health_checks/master_check_spec.rb +++ b/spec/lib/gitlab/health_checks/master_check_spec.rb @@ -6,10 +6,9 @@ require_relative './simple_check_shared' describe Gitlab::HealthChecks::MasterCheck do let(:result_class) { Gitlab::HealthChecks::Result } - SUCCESS_CODE = 100 - FAILURE_CODE = 101 - before do + stub_const('SUCCESS_CODE', 100) + stub_const('FAILURE_CODE', 101) described_class.register_master end diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb index cff489e0f3b..afbc48e9ca2 100644 --- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb @@ -12,6 +12,7 @@ describe Gitlab::HookData::IssuableBuilder do include_examples 'project hook data' do let(:project) { builder.issuable.project } end + include_examples 'deprecated repository hook data' context "with a #{kind}" do diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 5d5e2fe2a33..c78b4501310 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -6,10 +6,12 @@ issues: - assignees - updated_by - milestone +- iteration - notes - resource_label_events - resource_weight_events - resource_milestone_events +- resource_state_events - sent_notifications - sentry_issue - label_links @@ -18,6 +20,7 @@ issues: - todos - user_agent_detail - moved_to +- moved_from - duplicated_to - promoted_to_epic - events @@ -39,6 +42,8 @@ issues: - related_vulnerabilities - user_mentions - system_note_metadata +- alert_management_alert +- status_page_published_incident events: - author - project @@ -111,9 +116,11 @@ merge_requests: - assignee - updated_by - milestone +- iteration - notes - resource_label_events - resource_milestone_events +- resource_state_events - label_links - labels - last_edited_by @@ -212,7 +219,7 @@ ci_pipelines: - vulnerability_findings - pipeline_config - security_scans -- daily_report_results +- daily_build_group_report_results pipeline_variables: - pipeline stages: @@ -222,6 +229,7 @@ stages: - processables - builds - bridges +- latest_statuses statuses: - project - pipeline @@ -343,6 +351,7 @@ project: - labels - events - milestones +- iterations - notes - snippets - hooks @@ -420,7 +429,6 @@ project: - mirror_user - push_rule - jenkins_service -- jenkins_deprecated_service - index_status - feature_usage - approval_rules @@ -443,6 +451,7 @@ project: - vulnerability_scanners - operations_feature_flags - operations_feature_flags_client +- operations_feature_flags_user_lists - prometheus_alerts - prometheus_alert_events - self_managed_prometheus_alert_events @@ -477,9 +486,14 @@ project: - status_page_setting - requirements - export_jobs -- daily_report_results +- daily_build_group_report_results - jira_imports - compliance_framework_setting +- metrics_users_starred_dashboards +- alert_management_alerts +- repository_storage_moves +- freeze_periods +- webex_teams_service award_emoji: - awardable - user @@ -631,3 +645,5 @@ epic_issue: system_note_metadata: - note - description_version +status_page_published_incident: +- issue diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb index 58da25bbedb..f97dafc6bf9 100644 --- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb +++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb @@ -43,7 +43,4 @@ describe 'Import/Export attribute configuration' do IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file} MSG end - - class Author < User - end end diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb new file mode 100644 index 00000000000..5662b8af280 --- /dev/null +++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ImportExport::DesignRepoRestorer do + include GitHelpers + + describe 'bundle a design Git repo' do + let(:user) { create(:user) } + let!(:project_with_design_repo) { create(:project, :design_repo) } + let!(:project) { create(:project) } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:shared) { project.import_export_shared } + let(:bundler) { Gitlab::ImportExport::DesignRepoSaver.new(project: project_with_design_repo, shared: shared) } + let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.design_repo_bundle_filename) } + let(:restorer) do + described_class.new(path_to_bundle: bundle_path, + shared: shared, + project: project) + end + + before do + allow_next_instance_of(Gitlab::ImportExport) do |instance| + allow(instance).to receive(:storage_path).and_return(export_path) + end + + bundler.save + end + + after do + FileUtils.rm_rf(export_path) + Gitlab::GitalyClient::StorageSettings.allow_disk_access do + FileUtils.rm_rf(project_with_design_repo.design_repository.path_to_repo) + FileUtils.rm_rf(project.design_repository.path_to_repo) + end + end + + it 'restores the repo successfully' do + expect(restorer.restore).to eq(true) + end + end +end diff --git a/spec/lib/gitlab/import_export/design_repo_saver_spec.rb b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb new file mode 100644 index 00000000000..bff48e8b52a --- /dev/null +++ b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ImportExport::DesignRepoSaver do + describe 'bundle a design Git repo' do + let_it_be(:user) { create(:user) } + let_it_be(:design) { create(:design, :with_file, versions_count: 1) } + let!(:project) { create(:project, :design_repo) } + let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } + let(:shared) { project.import_export_shared } + let(:design_bundler) { described_class.new(project: project, shared: shared) } + + before do + project.add_maintainer(user) + allow_next_instance_of(Gitlab::ImportExport) do |instance| + allow(instance).to receive(:storage_path).and_return(export_path) + end + end + + after do + FileUtils.rm_rf(export_path) + end + + it 'bundles the repo successfully' do + expect(design_bundler.save).to be true + end + + context 'when the repo is empty' do + let!(:project) { create(:project) } + + it 'bundles the repo successfully' do + expect(design_bundler.save).to be true + end + end + end +end diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb index 15058684229..916ed692a05 100644 --- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::ImportExport::FastHashSerializer do # Wrapping the result into JSON generating/parsing is for making # the testing more convenient. Doing this, we can check that # all items are properly serialized while traversing the simple hash. - subject { JSON.parse(JSON.generate(described_class.new(project, tree).execute)) } + subject { Gitlab::Json.parse(Gitlab::Json.generate(described_class.new(project, tree).execute)) } let!(:project) { setup_project } let(:user) { create(:user) } diff --git a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb index 3030cdf4cf8..4c926da1436 100644 --- a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb @@ -141,7 +141,7 @@ describe Gitlab::ImportExport::Group::LegacyTreeRestorer do let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" } it "imports all subgroups as #{visibility_level}" do - expect(group.children.map(&:visibility_level)).to eq(expected_visibilities) + expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities) end end end diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb new file mode 100644 index 00000000000..327f36c664e --- /dev/null +++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb @@ -0,0 +1,184 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ImportExport::Group::TreeRestorer do + include ImportExport::CommonUtil + + describe 'restore group tree' do + before_all do + # Using an admin for import, so we can check assignment of existing members + user = create(:admin, email: 'root@gitlabexample.com') + create(:user, email: 'adriene.mcclure@gitlabexample.com') + create(:user, email: 'gwendolyn_robel@gitlabexample.com') + + RSpec::Mocks.with_temporary_scope do + @group = create(:group, name: 'group', path: 'group') + @shared = Gitlab::ImportExport::Shared.new(@group) + + setup_import_export_config('group_exports/complex') + + group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group) + + expect(group_tree_restorer.restore).to be_truthy + end + end + + it 'has the group description' do + expect(Group.find_by_path('group').description).to eq('Group Description') + end + + it 'has group labels' do + expect(@group.labels.count).to eq(10) + end + + context 'issue boards' do + it 'has issue boards' do + expect(@group.boards.count).to eq(1) + end + + it 'has board label lists' do + lists = @group.boards.find_by(name: 'first board').lists + + expect(lists.count).to eq(3) + expect(lists.first.label.title).to eq('TSL') + expect(lists.second.label.title).to eq('Sosync') + end + end + + it 'has badges' do + expect(@group.badges.count).to eq(1) + end + + it 'has milestones' do + expect(@group.milestones.count).to eq(5) + end + + it 'has group children' do + expect(@group.children.count).to eq(2) + end + + it 'has group members' do + expect(@group.members.map(&:user).map(&:email)).to contain_exactly( + 'root@gitlabexample.com', + 'adriene.mcclure@gitlabexample.com', + 'gwendolyn_robel@gitlabexample.com' + ) + end + end + + context 'child with no parent' do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:shared) { Gitlab::ImportExport::Shared.new(group) } + let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) } + + before do + setup_import_export_config('group_exports/child_with_no_parent') + + expect(group_tree_restorer.restore).to be_falsey + end + + it 'fails when a child group does not have a valid parent_id' do + expect(shared.errors).to include('Parent group not found') + end + end + + context 'excluded attributes' do + let!(:source_user) { create(:user, id: 123) } + let!(:importer_user) { create(:user) } + let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') } + let(:shared) { Gitlab::ImportExport::Shared.new(group) } + let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) } + let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') } + let(:group_json) { ActiveSupport::JSON.decode(IO.read(exported_file)) } + + shared_examples 'excluded attributes' do + excluded_attributes = %w[ + id + parent_id + owner_id + created_at + updated_at + runners_token + runners_token_encrypted + saml_discovery_token + ] + + before do + group.add_owner(importer_user) + + setup_import_export_config('group_exports/complex') + + expect(File.exist?(exported_file)).to be_truthy + + group_tree_restorer.restore + group.reload + end + + it 'does not import root group name' do + expect(group.name).to eq('user-inputed-name') + end + + it 'does not import root group path' do + expect(group.path).to eq('user-inputed-path') + end + + excluded_attributes.each do |excluded_attribute| + it 'does not allow override of excluded attributes' do + unless group.public_send(excluded_attribute).nil? + expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute)) + end + end + end + end + + include_examples 'excluded attributes' + end + + context 'group.json file access check' do + let(:user) { create(:user) } + let!(:group) { create(:group, name: 'group2', path: 'group2') } + let(:shared) { Gitlab::ImportExport::Shared.new(group) } + let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) } + + it 'does not read a symlink' do + Dir.mktmpdir do |tmpdir| + FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups')) + setup_symlink(tmpdir, 'tree/groups/_all.ndjson') + + allow(shared).to receive(:export_path).and_return(tmpdir) + + expect(group_tree_restorer.restore).to eq(false) + expect(shared.errors).to include('Incorrect JSON format') + end + end + end + + context 'group visibility levels' do + let(:user) { create(:user) } + let(:shared) { Gitlab::ImportExport::Shared.new(group) } + let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) } + + before do + setup_import_export_config(filepath) + + group_tree_restorer.restore + end + + shared_examples 'with visibility level' do |visibility_level, expected_visibilities| + context "when visibility level is #{visibility_level}" do + let(:group) { create(:group, visibility_level) } + let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" } + + it "imports all subgroups as #{visibility_level}" do + expect(group.children.map(&:visibility_level)).to eq(expected_visibilities) + end + end + end + + include_examples 'with visibility level', :public, [20, 10, 0] + include_examples 'with visibility level', :private, [0, 0, 0] + include_examples 'with visibility level', :internal, [10, 10, 0] + end +end diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb new file mode 100644 index 00000000000..06e8484a3cb --- /dev/null +++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ImportExport::Group::TreeSaver do + describe 'saves the group tree into a json object' do + let_it_be(:user) { create(:user) } + let_it_be(:group) { setup_groups } + + let(:shared) { Gitlab::ImportExport::Shared.new(group) } + let(:export_path) { "#{Dir.tmpdir}/group_tree_saver_spec" } + + subject(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) } + + before_all do + group.add_maintainer(user) + end + + before do + allow_next_instance_of(Gitlab::ImportExport) do |import_export| + allow(import_export).to receive(:storage_path).and_return(export_path) + end + end + + after do + FileUtils.rm_rf(export_path) + end + + it 'saves the group successfully' do + expect(group_tree_saver.save).to be true + end + + it 'fails to export a group' do + allow_next_instance_of(Gitlab::ImportExport::JSON::NdjsonWriter) do |ndjson_writer| + allow(ndjson_writer).to receive(:write_relation_array).and_raise(RuntimeError, 'exception') + end + + expect(shared).to receive(:error).with(RuntimeError).and_call_original + + expect(group_tree_saver.save).to be false + end + + context 'exported files' do + before do + group_tree_saver.save + end + + it 'has one group per line' do + groups_catalog = + File.readlines(exported_path_for('_all.ndjson')) + .map { |line| Integer(line) } + + expect(groups_catalog.size).to eq(3) + expect(groups_catalog).to eq([ + group.id, + group.descendants.first.id, + group.descendants.first.descendants.first.id + ]) + end + + it 'has a file per group' do + group.self_and_descendants.pluck(:id).each do |id| + group_attributes_file = exported_path_for("#{id}.json") + + expect(File.exist?(group_attributes_file)).to be(true) + end + end + + context 'group attributes file' do + let(:group_attributes_file) { exported_path_for("#{group.id}.json") } + let(:group_attributes) { ::JSON.parse(File.read(group_attributes_file)) } + + it 'has a file for each group with its attributes' do + expect(group_attributes['description']).to eq(group.description) + expect(group_attributes['parent_id']).to eq(group.parent_id) + end + + shared_examples 'excluded attributes' do + excluded_attributes = %w[ + owner_id + created_at + updated_at + runners_token + runners_token_encrypted + saml_discovery_token + ] + + excluded_attributes.each do |excluded_attribute| + it 'does not contain excluded attribute' do + expect(group_attributes).not_to include(excluded_attribute => group.public_send(excluded_attribute)) + end + end + end + + include_examples 'excluded attributes' + end + + it 'has a file for each group association' do + group.self_and_descendants do |g| + %w[ + badges + boards + epics + labels + members + milestones + ].each do |association| + path = exported_path_for("#{g.id}", "#{association}.ndjson") + expect(File.exist?(path)).to eq(true), "#{path} does not exist" + end + end + end + end + end + + def exported_path_for(*file) + File.join(group_tree_saver.full_path, 'groups', *file) + end + + def setup_groups + root = setup_group + subgroup = setup_group(parent: root) + setup_group(parent: subgroup) + + root + end + + def setup_group(parent: nil) + group = create(:group, description: 'description', parent: parent) + create(:milestone, group: group) + create(:group_badge, group: group) + group_label = create(:group_label, group: group) + board = create(:board, group: group, milestone_id: Milestone::Upcoming.id) + create(:list, board: board, label: group_label) + create(:group_badge, group: group) + create(:label_priority, label: group_label, priority: 1) + + group + end +end diff --git a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb index 707975f20b6..95df9cd0e6e 100644 --- a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb +++ b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb @@ -46,8 +46,8 @@ describe Gitlab::ImportExport do export_path: test_tmp_path) ).to be true - imported_json = JSON.parse(File.read("#{test_fixture_path}/project.json")) - exported_json = JSON.parse(File.read("#{test_tmp_path}/project.json")) + imported_json = Gitlab::Json.parse(File.read("#{test_fixture_path}/project.json")) + exported_json = Gitlab::Json.parse(File.read("#{test_tmp_path}/project.json")) assert_relations_match(imported_json, exported_json) end diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb index 335b0031147..038b95809b4 100644 --- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb +++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb @@ -53,22 +53,15 @@ describe 'Test coverage of the Project Import' do ].freeze # A list of JSON fixture files we use to test Import. - # Note that we use separate fixture to test ee-only features. # Most of the relations are present in `complex/project.json` # which is our main fixture. - PROJECT_JSON_FIXTURES_EE = - if Gitlab.ee? - ['ee/spec/fixtures/lib/gitlab/import_export/designs/project.json'].freeze - else - [] - end - PROJECT_JSON_FIXTURES = [ 'spec/fixtures/lib/gitlab/import_export/complex/project.json', 'spec/fixtures/lib/gitlab/import_export/group/project.json', 'spec/fixtures/lib/gitlab/import_export/light/project.json', - 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json' - ].freeze + PROJECT_JSON_FIXTURES_EE + 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json', + 'spec/fixtures/lib/gitlab/import_export/designs/project.json' + ].freeze it 'ensures that all imported/exported relations are present in test JSONs' do not_tested_relations = (relations_from_config - tested_relations) - MUTED_RELATIONS diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb index e03c95525df..60179146416 100644 --- a/spec/lib/gitlab/import_export/importer_spec.rb +++ b/spec/lib/gitlab/import_export/importer_spec.rb @@ -51,7 +51,8 @@ describe Gitlab::ImportExport::Importer do Gitlab::ImportExport::UploadsRestorer, Gitlab::ImportExport::LfsRestorer, Gitlab::ImportExport::StatisticsRestorer, - Gitlab::ImportExport::SnippetsRepoRestorer + Gitlab::ImportExport::SnippetsRepoRestorer, + Gitlab::ImportExport::DesignRepoRestorer ].each do |restorer| it "calls the #{restorer}" do fake_restorer = double(restorer.to_s) @@ -89,36 +90,74 @@ describe Gitlab::ImportExport::Importer do end context 'when project successfully restored' do - let!(:existing_project) { create(:project, namespace: user.namespace) } - let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') } + context "with a project in a user's namespace" do + let!(:existing_project) { create(:project, namespace: user.namespace) } + let(:project) { create(:project, namespace: user.namespace, name: 'whatever', path: 'whatever') } - before do - restorers = double(:restorers, all?: true) + before do + restorers = double(:restorers, all?: true) - allow(subject).to receive(:import_file).and_return(true) - allow(subject).to receive(:check_version!).and_return(true) - allow(subject).to receive(:restorers).and_return(restorers) - allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) + allow(subject).to receive(:import_file).and_return(true) + allow(subject).to receive(:check_version!).and_return(true) + allow(subject).to receive(:restorers).and_return(restorers) + allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) + end + + context 'when import_data' do + context 'has original_path' do + it 'overwrites existing project' do + expect_next_instance_of(::Projects::OverwriteProjectService) do |service| + expect(service).to receive(:execute).with(existing_project) + end + + subject.execute + end + end + + context 'has not original_path' do + before do + allow(project).to receive(:import_data).and_return(double(data: {})) + end + + it 'does not call the overwrite service' do + expect(::Projects::OverwriteProjectService).not_to receive(:new) + + subject.execute + end + end + end end - context 'when import_data' do + context "with a project in a group namespace" do + let(:group) { create(:group) } + let!(:existing_project) { create(:project, group: group) } + let(:project) { create(:project, creator: user, group: group, name: 'whatever', path: 'whatever') } + + before do + restorers = double(:restorers, all?: true) + + allow(subject).to receive(:import_file).and_return(true) + allow(subject).to receive(:check_version!).and_return(true) + allow(subject).to receive(:restorers).and_return(restorers) + allow(project).to receive(:import_data).and_return(double(data: { 'original_path' => existing_project.path })) + end + context 'has original_path' do it 'overwrites existing project' do - expect_any_instance_of(::Projects::OverwriteProjectService).to receive(:execute).with(existing_project) + group.add_owner(user) - subject.execute - end - end + expect_next_instance_of(::Projects::OverwriteProjectService) do |service| + expect(service).to receive(:execute).with(existing_project) + end - context 'has not original_path' do - before do - allow(project).to receive(:import_data).and_return(double(data: {})) + subject.execute end - it 'does not call the overwrite service' do - expect_any_instance_of(::Projects::OverwriteProjectService).not_to receive(:execute).with(existing_project) + it 'does not allow user to overwrite existing project' do + expect(::Projects::OverwriteProjectService).not_to receive(:new) - subject.execute + expect { subject.execute }.to raise_error(Projects::ImportService::Error, + "User #{user.username} (#{user.id}) cannot overwrite a project in #{group.path}") end end end diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb index 1021ce3cd50..99932404fd9 100644 --- a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb +++ b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb @@ -7,7 +7,7 @@ describe Gitlab::ImportExport::JSON::LegacyReader::File do it_behaves_like 'import/export json legacy reader' do let(:valid_path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } let(:data) { valid_path } - let(:json_data) { JSON.parse(File.read(valid_path)) } + let(:json_data) { Gitlab::Json.parse(File.read(valid_path)) } end describe '#exist?' do diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb index 8c4dfd2f356..e793dc7339d 100644 --- a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb +++ b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb @@ -9,8 +9,8 @@ describe Gitlab::ImportExport::JSON::LegacyReader::Hash do # the hash is modified by the `LegacyReader` # we need to deep-dup it - let(:json_data) { JSON.parse(File.read(path)) } - let(:data) { JSON.parse(File.read(path)) } + let(:json_data) { Gitlab::Json.parse(File.read(path)) } + let(:data) { Gitlab::Json.parse(File.read(path)) } end describe '#exist?' do diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb index 40b784fdb87..34e8b1ddd59 100644 --- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb +++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb @@ -6,18 +6,10 @@ describe Gitlab::ImportExport::JSON::NdjsonReader do include ImportExport::CommonUtil let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/tree' } - let(:root_tree) { JSON.parse(File.read(File.join(fixture, 'project.json'))) } + let(:root_tree) { Gitlab::Json.parse(File.read(File.join(fixture, 'project.json'))) } let(:ndjson_reader) { described_class.new(dir_path) } let(:importable_path) { 'project' } - before :all do - extract_archive('spec/fixtures/lib/gitlab/import_export/light', 'tree.tar.gz') - end - - after :all do - cleanup_artifacts_from_extract_archive('light') - end - describe '#exist?' do subject { ndjson_reader.exist? } @@ -101,8 +93,8 @@ describe Gitlab::ImportExport::JSON::NdjsonReader do context 'relation file contains multiple lines' do let(:key) { 'custom_attributes' } - let(:attr_1) { JSON.parse('{"id":201,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"color","value":"red"}') } - let(:attr_2) { JSON.parse('{"id":202,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"size","value":"small"}') } + let(:attr_1) { Gitlab::Json.parse('{"id":201,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"color","value":"red"}') } + let(:attr_2) { Gitlab::Json.parse('{"id":202,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"size","value":"small"}') } it 'yields every relation value to the Enumerator' do expect(subject.to_a).to eq([[attr_1, 0], [attr_2, 1]]) diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb index a8ff7867410..e9d06573e70 100644 --- a/spec/lib/gitlab/import_export/lfs_saver_spec.rb +++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb @@ -26,7 +26,7 @@ describe Gitlab::ImportExport::LfsSaver do let(:lfs_json_file) { File.join(shared.export_path, Gitlab::ImportExport.lfs_objects_filename) } def lfs_json - JSON.parse(IO.read(lfs_json_file)) + Gitlab::Json.parse(IO.read(lfs_json_file)) end before do diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb index cf11a1df33c..dc8eb54dc14 100644 --- a/spec/lib/gitlab/import_export/project/export_task_spec.rb +++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb @@ -3,13 +3,14 @@ require 'rake_helper' describe Gitlab::ImportExport::Project::ExportTask do - let(:username) { 'root' } + let_it_be(:username) { 'root' } let(:namespace_path) { username } - let!(:user) { create(:user, username: username) } + let_it_be(:user) { create(:user, username: username) } let(:measurement_enabled) { false } let(:file_path) { 'spec/fixtures/gitlab/import_export/test_project_export.tar.gz' } let(:project) { create(:project, creator: user, namespace: user.namespace) } let(:project_name) { project.name } + let(:rake_task) { described_class.new(task_params) } let(:task_params) do { @@ -21,7 +22,7 @@ describe Gitlab::ImportExport::Project::ExportTask do } end - subject { described_class.new(task_params).export } + subject { rake_task.export } context 'when project is found' do let(:project) { create(:project, creator: user, namespace: user.namespace) } @@ -29,9 +30,13 @@ describe Gitlab::ImportExport::Project::ExportTask do around do |example| example.run ensure - File.delete(file_path) + File.delete(file_path) if File.exist?(file_path) end + include_context 'rake task object storage shared context' + + it_behaves_like 'rake task with disabled object_storage', ::Projects::ImportExport::ExportService, :success + it 'performs project export successfully' do expect { subject }.to output(/Done!/).to_stdout @@ -39,8 +44,6 @@ describe Gitlab::ImportExport::Project::ExportTask do expect(File).to exist(file_path) end - - it_behaves_like 'measurable' end context 'when project is not found' do @@ -66,4 +69,32 @@ describe Gitlab::ImportExport::Project::ExportTask do expect(subject).to eq(false) end end + + context 'when after export strategy fails' do + before do + allow_next_instance_of(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy) do |after_export_strategy| + allow(after_export_strategy).to receive(:strategy_execute).and_raise(Gitlab::ImportExport::AfterExportStrategies::BaseAfterExportStrategy::StrategyError) + end + end + + it 'error is logged' do + expect(rake_task).to receive(:error).and_call_original + + expect(subject).to eq(false) + end + end + + context 'when saving services fail' do + before do + allow_next_instance_of(::Projects::ImportExport::ExportService) do |service| + allow(service).to receive(:execute).and_raise(Gitlab::ImportExport::Error) + end + end + + it 'error is logged' do + expect(rake_task).to receive(:error).and_call_original + + expect(subject).to eq(false) + end + end end diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb index 4f4fcd3ad8a..7c11161aaa7 100644 --- a/spec/lib/gitlab/import_export/project/import_task_spec.rb +++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::ImportExport::Project::ImportTask, :request_store do let!(:user) { create(:user, username: username) } let(:measurement_enabled) { false } let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") } - let(:import_task) { described_class.new(task_params) } + let(:rake_task) { described_class.new(task_params) } let(:task_params) do { username: username, @@ -19,29 +19,16 @@ describe Gitlab::ImportExport::Project::ImportTask, :request_store do } end - before do - allow(Settings.uploads.object_store).to receive(:[]=).and_call_original - end - - around do |example| - old_direct_upload_setting = Settings.uploads.object_store['direct_upload'] - old_background_upload_setting = Settings.uploads.object_store['background_upload'] - - Settings.uploads.object_store['direct_upload'] = true - Settings.uploads.object_store['background_upload'] = true - - example.run - - Settings.uploads.object_store['direct_upload'] = old_direct_upload_setting - Settings.uploads.object_store['background_upload'] = old_background_upload_setting - end - - subject { import_task.import } + subject { rake_task.import } context 'when project import is valid' do let(:project_name) { 'import_rake_test_project' } let(:file_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' } + include_context 'rake task object storage shared context' + + it_behaves_like 'rake task with disabled object_storage', ::Projects::GitlabProjectsImportService, :execute_sidekiq_job + it 'performs project import successfully' do expect { subject }.to output(/Done!/).to_stdout expect { subject }.not_to raise_error @@ -52,30 +39,6 @@ describe Gitlab::ImportExport::Project::ImportTask, :request_store do expect(project.milestones.count).to be > 0 expect(project.import_state.status).to eq('finished') end - - it 'disables direct & background upload only during project creation' do - expect_next_instance_of(Projects::GitlabProjectsImportService) do |service| - expect(service).to receive(:execute).and_wrap_original do |m| - expect(Settings.uploads.object_store['background_upload']).to eq(false) - expect(Settings.uploads.object_store['direct_upload']).to eq(false) - - m.call - end - end - - expect(import_task).to receive(:execute_sidekiq_job).and_wrap_original do |m| - expect(Settings.uploads.object_store['background_upload']).to eq(true) - expect(Settings.uploads.object_store['direct_upload']).to eq(true) - expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false) - expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false) - - m.call - end - - subject - end - - it_behaves_like 'measurable' end context 'when project import is invalid' do diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 04e8bd05666..58589a7bbbe 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -8,6 +8,7 @@ end describe Gitlab::ImportExport::Project::TreeRestorer do include ImportExport::CommonUtil + using RSpec::Parameterized::TableSyntax let(:shared) { project.import_export_shared } @@ -44,10 +45,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do end end - after(:context) do - cleanup_artifacts_from_extract_archive('complex') - end - context 'JSON' do it 'restores models based on JSON' do expect(@restored_project_json).to be_truthy @@ -536,10 +533,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do expect(restored_project_json).to eq(true) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it 'issue system note metadata restored successfully' do note_content = 'created merge request !1 to address this issue' note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first @@ -586,10 +579,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do expect(restored_project_json).to eq(true) end - after do - cleanup_artifacts_from_extract_archive('multi_pipeline_ref_one_external_pr') - end - it_behaves_like 'restores project successfully', issues: 0, labels: 0, @@ -620,10 +609,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do .and_raise(exception) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it 'report post import error' do expect(restored_project_json).to eq(false) expect(shared.errors).to include('post_import_error') @@ -646,10 +631,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do expect(restored_project_json).to eq(true) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it_behaves_like 'restores project successfully', issues: 1, labels: 2, @@ -678,10 +659,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do setup_reader(reader) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it 'handles string versions of visibility_level' do # Project needs to be in a group for visibility level comparison # to happen @@ -747,10 +724,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do expect(restored_project_json).to eq(true) end - after do - cleanup_artifacts_from_extract_archive('group') - end - it_behaves_like 'restores project successfully', issues: 3, labels: 2, @@ -784,10 +757,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do setup_reader(reader) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it 'does not import any templated services' do expect(restored_project_json).to eq(true) @@ -835,10 +804,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do setup_reader(reader) end - after do - cleanup_artifacts_from_extract_archive('milestone-iid') - end - it 'preserves the project milestone IID' do expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) @@ -855,10 +820,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do setup_reader(reader) end - after do - cleanup_artifacts_from_extract_archive('light') - end - it 'converts empty external classification authorization labels to nil' do project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } }) @@ -1004,10 +965,6 @@ describe Gitlab::ImportExport::Project::TreeRestorer do subject end - after do - cleanup_artifacts_from_extract_archive('with_invalid_records') - end - context 'when failures occur because a relation fails to be processed' do it_behaves_like 'restores project successfully', issues: 0, @@ -1031,6 +988,69 @@ describe Gitlab::ImportExport::Project::TreeRestorer do end end end + + context 'JSON with design management data' do + let_it_be(:user) { create(:admin, email: 'user_1@gitlabexample.com') } + let_it_be(:second_user) { create(:user, email: 'user_2@gitlabexample.com') } + let_it_be(:project) do + create(:project, :builds_disabled, :issues_disabled, + { name: 'project', path: 'project' }) + end + let(:shared) { project.import_export_shared } + let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } + + subject(:restored_project_json) { project_tree_restorer.restore } + + before do + setup_import_export_config('designs') + restored_project_json + end + + it_behaves_like 'restores project successfully', issues: 2 + + it 'restores project associations correctly' do + expect(project.designs.size).to eq(7) + end + + describe 'restores issue associations correctly' do + let(:issue) { project.issues.offset(index).first } + + where(:index, :design_filenames, :version_shas, :events, :author_emails) do + 0 | %w[chirrido3.jpg jonathan_richman.jpg mariavontrap.jpeg] | %w[27702d08f5ee021ae938737f84e8fe7c38599e85 9358d1bac8ff300d3d2597adaa2572a20f7f8703 e1a4a501bcb42f291f84e5d04c8f927821542fb6] | %w[creation creation creation modification modification deletion] | %w[user_1@gitlabexample.com user_1@gitlabexample.com user_2@gitlabexample.com] + 1 | ['1 (1).jpeg', '2099743.jpg', 'a screenshot (1).jpg', 'chirrido3.jpg'] | %w[73f871b4c8c1d65c62c460635e023179fb53abc4 8587e78ab6bda3bc820a9f014c3be4a21ad4fcc8 c9b5f067f3e892122a4b12b0a25a8089192f3ac8] | %w[creation creation creation creation modification] | %w[user_1@gitlabexample.com user_2@gitlabexample.com user_2@gitlabexample.com] + end + + with_them do + it do + expect(issue.designs.pluck(:filename)).to contain_exactly(*design_filenames) + expect(issue.design_versions.pluck(:sha)).to contain_exactly(*version_shas) + expect(issue.design_versions.flat_map(&:actions).map(&:event)).to contain_exactly(*events) + expect(issue.design_versions.map(&:author).map(&:email)).to contain_exactly(*author_emails) + end + end + end + + describe 'restores design version associations correctly' do + let(:project_designs) { project.designs.reorder(:filename, :issue_id) } + let(:design) { project_designs.offset(index).first } + + where(:index, :version_shas) do + 0 | %w[73f871b4c8c1d65c62c460635e023179fb53abc4 c9b5f067f3e892122a4b12b0a25a8089192f3ac8] + 1 | %w[73f871b4c8c1d65c62c460635e023179fb53abc4] + 2 | %w[c9b5f067f3e892122a4b12b0a25a8089192f3ac8] + 3 | %w[27702d08f5ee021ae938737f84e8fe7c38599e85 9358d1bac8ff300d3d2597adaa2572a20f7f8703 e1a4a501bcb42f291f84e5d04c8f927821542fb6] + 4 | %w[8587e78ab6bda3bc820a9f014c3be4a21ad4fcc8] + 5 | %w[27702d08f5ee021ae938737f84e8fe7c38599e85 e1a4a501bcb42f291f84e5d04c8f927821542fb6] + 6 | %w[27702d08f5ee021ae938737f84e8fe7c38599e85] + end + + with_them do + it do + expect(design.versions.pluck(:sha)).to contain_exactly(*version_shas) + end + end + end + end end context 'enable ndjson import' do diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb index 8adc360026d..b9bfe253f10 100644 --- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb @@ -168,6 +168,28 @@ describe Gitlab::ImportExport::Project::TreeSaver do it 'has issue resource label events' do expect(subject.first['resource_label_events']).not_to be_empty end + + it 'saves the issue designs correctly' do + expect(subject.first['designs'].size).to eq(1) + end + + it 'saves the issue design notes correctly' do + expect(subject.first['designs'].first['notes']).not_to be_empty + end + + it 'saves the issue design versions correctly' do + issue_json = subject.first + actions = issue_json['design_versions'].flat_map { |v| v['actions'] } + + expect(issue_json['design_versions'].size).to eq(2) + issue_json['design_versions'].each do |version| + expect(version['author_id']).to be_kind_of(Integer) + end + expect(actions.size).to eq(2) + actions.each do |action| + expect(action['design']).to be_present + end + end end context 'with ci_pipelines' do @@ -442,6 +464,9 @@ describe Gitlab::ImportExport::Project::TreeSaver do board = create(:board, project: project, name: 'TestBoard') create(:list, board: board, position: 0, label: project_label) + design = create(:design, :with_file, versions_count: 2, issue: issue) + create(:diff_note_on_design, noteable: design, project: project, author: user) + project end end diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb index 0b58a75220d..8fe419da450 100644 --- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb @@ -64,7 +64,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do shared_examples 'logging of relations creation' do context 'when log_import_export_relation_creation feature flag is enabled' do before do - stub_feature_flags(log_import_export_relation_creation: { enabled: true, thing: group }) + stub_feature_flags(log_import_export_relation_creation: group) end it 'logs top-level relation creation' do @@ -79,7 +79,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do context 'when log_import_export_relation_creation feature flag is disabled' do before do - stub_feature_flags(log_import_export_relation_creation: { enabled: false, thing: group }) + stub_feature_flags(log_import_export_relation_creation: false) end it 'does not log top-level relation creation' do @@ -126,14 +126,6 @@ describe Gitlab::ImportExport::RelationTreeRestorer do let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) } - before :all do - extract_archive('spec/fixtures/lib/gitlab/import_export/complex', 'tree.tar.gz') - end - - after :all do - cleanup_artifacts_from_extract_archive('complex') - end - it_behaves_like 'import project successfully' end end @@ -156,7 +148,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do let(:reader) do Gitlab::ImportExport::Reader.new( shared: shared, - config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h + config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.legacy_group_config_file).to_h ) end diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 88d7fdaef36..c29a85ce624 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -480,6 +480,7 @@ Service: - pipeline_events - job_events - comment_on_event_enabled +- comment_detail - category - default - wiki_page_events @@ -487,6 +488,7 @@ Service: - confidential_note_events - deployment_events - description +- inherit_from_id ProjectHook: - id - url diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb index 858fa044a52..fdb842dac0f 100644 --- a/spec/lib/gitlab/instrumentation_helper_spec.rb +++ b/spec/lib/gitlab/instrumentation_helper_spec.rb @@ -49,12 +49,12 @@ describe Gitlab::InstrumentationHelper do describe '.queue_duration_for_job' do where(:enqueued_at, :created_at, :time_now, :expected_duration) do "2019-06-01T00:00:00.000+0000" | nil | "2019-06-01T02:00:00.000+0000" | 2.hours.to_f - "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.0 + "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.001 "2019-06-01T02:00:00.000+0000" | "2019-05-01T02:00:00.000+0000" | "2019-06-01T02:00:01.000+0000" | 1 - nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.0 + nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.001 nil | nil | "2019-06-01T02:00:00.001+0000" | nil "2019-06-01T02:00:00.000+0200" | nil | "2019-06-01T02:00:00.000-0200" | 4.hours.to_f - 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.00 + 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.001832 1571825569 | nil | "2019-10-23T12:13:16.000+0200" | 27 "invalid_date" | nil | "2019-10-23T12:13:16.000+0200" | nil "" | nil | "2019-10-23T12:13:16.000+0200" | nil diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb index f22efcb8743..ecaf3def589 100644 --- a/spec/lib/gitlab/jira_import/base_importer_spec.rb +++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb @@ -3,12 +3,17 @@ require 'spec_helper' describe Gitlab::JiraImport::BaseImporter do + include JiraServiceHelper + let(:project) { create(:project) } describe 'with any inheriting class' do - context 'when feature flag disabled' do + context 'when an error is returned from the project validation' do before do stub_feature_flags(jira_issue_import: false) + + allow(project).to receive(:validate_jira_import_settings!) + .and_raise(Projects::ImportService::Error, 'Jira import feature is disabled.') end it 'raises exception' do @@ -16,20 +21,17 @@ describe Gitlab::JiraImport::BaseImporter do end end - context 'when feature flag enabled' do + context 'when project validation is ok' do + let!(:jira_service) { create(:jira_service, project: project) } + before do stub_feature_flags(jira_issue_import: true) - end + stub_jira_service_test - context 'when Jira service was not setup' do - it 'raises exception' do - expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Jira integration not configured.') - end + allow(project).to receive(:validate_jira_import_settings!) end context 'when Jira service exists' do - let!(:jira_service) { create(:jira_service, project: project) } - context 'when Jira import data is not present' do it 'raises exception' do expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Unable to find Jira project to import data from.') diff --git a/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb b/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb new file mode 100644 index 00000000000..0eeff180575 --- /dev/null +++ b/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::JiraImport::HandleLabelsService do + describe '#execute' do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + + let_it_be(:project_label) { create(:label, project: project, title: 'bug') } + let_it_be(:other_project_label) { create(:label, title: 'feature') } + let_it_be(:group_label) { create(:group_label, group: group, title: 'dev') } + let(:jira_labels) { %w(bug feature dev group::new) } + + subject { described_class.new(project, jira_labels).execute } + + context 'when some provided jira labels are missing' do + def created_labels + project.labels.reorder(id: :desc).first(2) + end + + it 'creates the missing labels on the project level' do + expect { subject }.to change { Label.count }.from(3).to(5) + + expect(created_labels.map(&:title)).to match_array(%w(feature group::new)) + end + + it 'returns the id of all labels matching the title' do + expect(subject).to match_array([project_label.id, group_label.id] + created_labels.map(&:id)) + end + end + + context 'when no provided jira labels are missing' do + let(:jira_labels) { %w(bug dev) } + + it 'does not create any new labels' do + expect { subject }.not_to change { Label.count }.from(3) + end + + it 'returns the id of all labels matching the title' do + expect(subject).to match_array([project_label.id, group_label.id]) + end + end + + context 'when no labels are provided' do + let(:jira_labels) { [] } + + it 'does not create any new labels' do + expect { subject }.not_to change { Label.count }.from(3) + end + end + end +end diff --git a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb index 808ed6ee2fa..ce38a1234cf 100644 --- a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb +++ b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb @@ -4,7 +4,12 @@ require 'spec_helper' describe Gitlab::JiraImport::IssueSerializer do describe '#execute' do - let_it_be(:project) { create(:project) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:project_label) { create(:label, project: project, title: 'bug') } + let_it_be(:other_project_label) { create(:label, project: project, title: 'feature') } + let_it_be(:group_label) { create(:group_label, group: group, title: 'dev') } + let_it_be(:current_user) { create(:user) } let(:iid) { 5 } let(:key) { 'PROJECT-5' } @@ -12,28 +17,21 @@ describe Gitlab::JiraImport::IssueSerializer do let(:description) { 'basic description' } let(:created_at) { '2020-01-01 20:00:00' } let(:updated_at) { '2020-01-10 20:00:00' } - let(:assignee) { double(displayName: 'Solver') } + let(:assignee) { double(attrs: { 'displayName' => 'Solver', 'emailAddress' => 'assignee@example.com' }) } + let(:reporter) { double(attrs: { 'displayName' => 'Reporter', 'emailAddress' => 'reporter@example.com' }) } let(:jira_status) { 'new' } let(:parent_field) do { 'key' => 'FOO-2', 'id' => '1050', 'fields' => { 'summary' => 'parent issue FOO' } } end - let(:issue_type_field) { { 'name' => 'Task' } } - let(:fix_versions_field) { [{ 'name' => '1.0' }, { 'name' => '1.1' }] } let(:priority_field) { { 'name' => 'Medium' } } - let(:labels_field) { %w(bug backend) } - let(:environment_field) { 'staging' } - let(:duedate_field) { '2020-03-01' } + let(:labels_field) { %w(bug dev backend frontend) } let(:fields) do { 'parent' => parent_field, - 'issuetype' => issue_type_field, - 'fixVersions' => fix_versions_field, 'priority' => priority_field, - 'labels' => labels_field, - 'environment' => environment_field, - 'duedate' => duedate_field + 'labels' => labels_field } end @@ -46,7 +44,7 @@ describe Gitlab::JiraImport::IssueSerializer do created: created_at, updated: updated_at, assignee: assignee, - reporter: double(displayName: 'Reporter'), + reporter: reporter, status: double(statusCategory: { 'key' => jira_status }), fields: fields ) @@ -54,27 +52,18 @@ describe Gitlab::JiraImport::IssueSerializer do let(:params) { { iid: iid } } - subject { described_class.new(project, jira_issue, params).execute } + subject { described_class.new(project, jira_issue, current_user.id, params).execute } let(:expected_description) do <<~MD - *Created by: Reporter* - - *Assigned to: Solver* - basic description --- **Issue metadata** - - Issue type: Task - Priority: Medium - - Labels: bug, backend - - Environment: staging - - Due date: 2020-03-01 - Parent issue: [FOO-2] parent issue FOO - - Fix versions: 1.0, 1.1 MD end @@ -88,55 +77,102 @@ describe Gitlab::JiraImport::IssueSerializer do state_id: 1, updated_at: updated_at, created_at: created_at, - author_id: project.creator_id + author_id: current_user.id, + assignee_ids: nil, + label_ids: [project_label.id, group_label.id] + Label.reorder(id: :asc).last(2).pluck(:id) ) end - context 'when some metadata fields are missing' do - let(:assignee) { nil } - let(:parent_field) { nil } - let(:fix_versions_field) { [] } - let(:labels_field) { [] } - let(:environment_field) { nil } - let(:duedate_field) { '2020-03-01' } + it 'creates a hash for valid issue' do + expect(Issue.new(subject)).to be_valid + end + + context 'labels' do + it 'creates all missing labels (on project level)' do + expect { subject }.to change { Label.count }.from(3).to(5) + + expect(Label.find_by(title: 'frontend').project).to eq(project) + expect(Label.find_by(title: 'backend').project).to eq(project) + end + + context 'when there are no new labels' do + let(:labels_field) { %w(bug dev) } - it 'skips the missing fields' do - expected_description = <<~MD - *Created by: Reporter* + it 'assigns the labels to the Issue hash' do + expect(subject[:label_ids]).to match_array([project_label.id, group_label.id]) + end - basic description + it 'does not create new labels' do + expect { subject }.not_to change { Label.count }.from(3) + end + end + end - --- + context 'author' do + context 'when reporter maps to a valid GitLab user' do + let!(:user) { create(:user, email: 'reporter@example.com') } - **Issue metadata** + it 'sets the issue author to the mapped user' do + project.add_developer(user) - - Issue type: Task - - Priority: Medium - - Due date: 2020-03-01 - MD + expect(subject[:author_id]).to eq(user.id) + end + end - expect(subject[:description]).to eq(expected_description.strip) + context 'when reporter does not map to a valid Gitlab user' do + it 'defaults the issue author to project creator' do + expect(subject[:author_id]).to eq(current_user.id) + end + end + + context 'when reporter field is empty' do + let(:reporter) { nil } + + it 'defaults the issue author to project creator' do + expect(subject[:author_id]).to eq(current_user.id) + end + end + + context 'when reporter field is missing email address' do + let(:reporter) { double(attrs: { 'displayName' => 'Reporter' }) } + + it 'defaults the issue author to project creator' do + expect(subject[:author_id]).to eq(current_user.id) + end end end - context 'when all metadata fields are missing' do - let(:assignee) { nil } - let(:parent_field) { nil } - let(:issue_type_field) { nil } - let(:fix_versions_field) { [] } - let(:priority_field) { nil } - let(:labels_field) { [] } - let(:environment_field) { nil } - let(:duedate_field) { nil } + context 'assignee' do + context 'when assignee maps to a valid GitLab user' do + let!(:user) { create(:user, email: 'assignee@example.com') } + + it 'sets the issue assignees to the mapped user' do + project.add_developer(user) - it 'skips the whole metadata secction' do - expected_description = <<~MD - *Created by: Reporter* + expect(subject[:assignee_ids]).to eq([user.id]) + end + end + + context 'when assignee does not map to a valid GitLab user' do + it 'leaves the assignee empty' do + expect(subject[:assignee_ids]).to be_nil + end + end + + context 'when assginee field is empty' do + let(:assignee) { nil } + + it 'leaves the assignee empty' do + expect(subject[:assignee_ids]).to be_nil + end + end - basic description - MD + context 'when assginee field is missing email address' do + let(:assignee) { double(attrs: { 'displayName' => 'Reporter' }) } - expect(subject[:description]).to eq(expected_description.strip) + it 'leaves the assignee empty' do + expect(subject[:assignee_ids]).to be_nil + end end end end diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb index 8e16fd3e978..6cf06c20e19 100644 --- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb +++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb @@ -3,15 +3,19 @@ require 'spec_helper' describe Gitlab::JiraImport::IssuesImporter do + include JiraServiceHelper + let_it_be(:user) { create(:user) } + let_it_be(:current_user) { create(:user) } let_it_be(:project) { create(:project) } - let_it_be(:jira_import) { create(:jira_import_state, project: project) } + let_it_be(:jira_import) { create(:jira_import_state, project: project, user: current_user) } let_it_be(:jira_service) { create(:jira_service, project: project) } subject { described_class.new(project) } before do stub_feature_flags(jira_issue_import: true) + stub_jira_service_test end describe '#imported_items_cache_key' do @@ -36,8 +40,16 @@ describe Gitlab::JiraImport::IssuesImporter do context 'with results returned' do JiraIssue = Struct.new(:id) - let_it_be(:jira_issue1) { JiraIssue.new(1) } - let_it_be(:jira_issue2) { JiraIssue.new(2) } + let_it_be(:jira_issues) { [JiraIssue.new(1), JiraIssue.new(2)] } + + def mock_issue_serializer(count) + serializer = instance_double(Gitlab::JiraImport::IssueSerializer, execute: { key: 'data' }) + + count.times do |i| + expect(Gitlab::JiraImport::IssueSerializer).to receive(:new) + .with(project, jira_issues[i], current_user.id, { iid: i + 1 }).and_return(serializer) + end + end context 'when single page of results is returned' do before do @@ -45,13 +57,11 @@ describe Gitlab::JiraImport::IssuesImporter do end it 'schedules 2 import jobs' do - expect(subject).to receive(:fetch_issues).and_return([jira_issue1, jira_issue2]) + expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[1]]) expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).twice expect(Gitlab::Cache::Import::Caching).to receive(:set_add).twice.and_call_original expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.and_call_original - allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance| - allow(instance).to receive(:execute).and_return({ key: 'data' }) - end + mock_issue_serializer(2) job_waiter = subject.execute @@ -66,13 +76,11 @@ describe Gitlab::JiraImport::IssuesImporter do end it 'schedules 3 import jobs' do - expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issue1, jira_issue2]) + expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[1]]) expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).twice.times expect(Gitlab::Cache::Import::Caching).to receive(:set_add).twice.times.and_call_original expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.times.and_call_original - allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance| - allow(instance).to receive(:execute).and_return({ key: 'data' }) - end + mock_issue_serializer(2) job_waiter = subject.execute @@ -87,13 +95,11 @@ describe Gitlab::JiraImport::IssuesImporter do end it 'schedules 2 import jobs' do - expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issue1, jira_issue1]) + expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[0]]) expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).once expect(Gitlab::Cache::Import::Caching).to receive(:set_add).once.and_call_original expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.times.and_call_original - allow_next_instance_of(Gitlab::JiraImport::IssueSerializer) do |instance| - allow(instance).to receive(:execute).and_return({ key: 'data' }) - end + mock_issue_serializer(1) job_waiter = subject.execute diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb index 3eb4666a74f..67eb541d376 100644 --- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb +++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb @@ -3,35 +3,100 @@ require 'spec_helper' describe Gitlab::JiraImport::LabelsImporter do + include JiraServiceHelper + let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } let_it_be(:jira_service) { create(:jira_service, project: project) } - subject { described_class.new(project).execute } + let(:importer) { described_class.new(project) } + + subject { importer.execute } before do stub_feature_flags(jira_issue_import: true) + stub_const('Gitlab::JiraImport::LabelsImporter::MAX_LABELS', 2) end describe '#execute', :clean_gitlab_redis_cache do + before do + stub_jira_service_test + end + context 'when label is missing from jira import' do let_it_be(:no_label_jira_import) { create(:jira_import_state, label: nil, project: project) } it 'raises error' do - expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to find import label for jira import.') + expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to find import label for Jira import.') end end - context 'when label exists' do - let_it_be(:label) { create(:label) } + context 'when jira import label exists' do + let_it_be(:label) { create(:label) } let_it_be(:jira_import_with_label) { create(:jira_import_state, label: label, project: project) } + let_it_be(:issue_label) { create(:label, project: project, title: 'bug') } + + let(:jira_labels_1) { { "maxResults" => 2, "startAt" => 0, "total" => 3, "isLast" => false, "values" => %w(backend bug) } } + let(:jira_labels_2) { { "maxResults" => 2, "startAt" => 2, "total" => 3, "isLast" => true, "values" => %w(feature) } } + + context 'when labels are returned from jira' do + before do + client = double + expect(importer).to receive(:client).twice.and_return(client) + allow(client).to receive(:get).twice.and_return(jira_labels_1, jira_labels_2) + end + + it 'caches import label' do + expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.import_label_cache_key(project.id))).to be nil + + subject + + expect(Gitlab::JiraImport.get_import_label_id(project.id).to_i).to eq(label.id) + end + + it 'calls Gitlab::JiraImport::HandleLabelsService' do + expect(Gitlab::JiraImport::HandleLabelsService).to receive(:new).with(project, %w(backend bug)).and_return(double(execute: [1, 2])) + expect(Gitlab::JiraImport::HandleLabelsService).to receive(:new).with(project, %w(feature)).and_return(double(execute: [3])) + + subject + end + end + + context 'when there are no labels to be handled' do + shared_examples 'no labels handling' do + it 'does not call Gitlab::JiraImport::HandleLabelsService' do + expect(Gitlab::JiraImport::HandleLabelsService).not_to receive(:new) + + subject + end + end + + let(:jira_labels) { { "maxResults" => 2, "startAt" => 0, "total" => 3, "values" => [] } } + + before do + client = double + expect(importer).to receive(:client).and_return(client) + allow(client).to receive(:get).and_return(jira_labels) + end + + context 'when the labels field is empty' do + let(:jira_labels) { { "maxResults" => 2, "startAt" => 0, "isLast" => true, "total" => 3, "values" => [] } } + + it_behaves_like 'no labels handling' + end + + context 'when the labels field is missing' do + let(:jira_labels) { { "maxResults" => 2, "startAt" => 0, "isLast" => true, "total" => 3 } } - it 'caches import label' do - expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.import_label_cache_key(project.id))).to be nil + it_behaves_like 'no labels handling' + end - subject + context 'when the isLast argument is missing' do + let(:jira_labels) { { "maxResults" => 2, "startAt" => 0, "total" => 3, "values" => %w(bug dev) } } - expect(Gitlab::JiraImport.get_import_label_id(project.id).to_i).to eq(label.id) + it_behaves_like 'no labels handling' + end end end end diff --git a/spec/lib/gitlab/jira_import/metadata_collector_spec.rb b/spec/lib/gitlab/jira_import/metadata_collector_spec.rb new file mode 100644 index 00000000000..af479810df0 --- /dev/null +++ b/spec/lib/gitlab/jira_import/metadata_collector_spec.rb @@ -0,0 +1,178 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::JiraImport::MetadataCollector do + describe '#execute' do + let(:key) { 'PROJECT-5' } + let(:summary) { 'some title' } + let(:description) { 'basic description' } + let(:created_at) { '2020-01-01 20:00:00' } + let(:updated_at) { '2020-01-10 20:00:00' } + let(:jira_status) { 'new' } + + let(:parent_field) do + { 'key' => 'FOO-2', 'id' => '1050', 'fields' => { 'summary' => 'parent issue FOO' } } + end + let(:issue_type_field) { { 'name' => 'Task' } } + let(:fix_versions_field) { [{ 'name' => '1.0' }, { 'name' => '1.1' }] } + let(:priority_field) { { 'name' => 'Medium' } } + let(:environment_field) { 'staging' } + let(:duedate_field) { '2020-03-01' } + + let(:fields) do + { + 'parent' => parent_field, + 'issuetype' => issue_type_field, + 'fixVersions' => fix_versions_field, + 'priority' => priority_field, + 'environment' => environment_field, + 'duedate' => duedate_field + } + end + let(:jira_issue) do + double( + id: '1234', + key: key, + summary: summary, + description: description, + created: created_at, + updated: updated_at, + status: double(statusCategory: { 'key' => jira_status }), + fields: fields + ) + end + + subject { described_class.new(jira_issue).execute } + + context 'when all metadata fields are present' do + it 'writes all fields' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + - Priority: Medium + - Environment: staging + - Due date: 2020-03-01 + - Parent issue: [FOO-2] parent issue FOO + - Fix versions: 1.0, 1.1 + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + + context 'when some fields are in incorrect format' do + let(:parent_field) { nil } + let(:fix_versions_field) { [] } + let(:priority_field) { nil } + let(:environment_field) { nil } + let(:duedate_field) { nil } + + context 'when fixVersions field is not an array' do + let(:fix_versions_field) { { 'title' => '1.0', 'name' => '1.1' } } + + it 'skips these fields' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + + context 'when a fixVersions element is in incorrect format' do + let(:fix_versions_field) { [{ 'title' => '1.0' }, { 'name' => '1.1' }] } + + it 'skips the element' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + - Fix versions: 1.1 + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + + context 'when a parent field has incorrectly formatted summary' do + let(:parent_field) do + { 'key' => 'FOO-2', 'id' => '1050', 'other_field' => { 'summary' => 'parent issue FOO' } } + end + + it 'skips the summary' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + - Parent issue: [FOO-2] + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + + context 'when a parent field is missing the key' do + let(:parent_field) do + { 'not_key' => 'FOO-2', 'id' => '1050', 'other_field' => { 'summary' => 'parent issue FOO' } } + end + + it 'skips the field' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + end + + context 'when some metadata fields are missing' do + let(:parent_field) { nil } + let(:fix_versions_field) { [] } + let(:environment_field) { nil } + + it 'skips the missing fields' do + expected_result = <<~MD + --- + + **Issue metadata** + + - Issue type: Task + - Priority: Medium + - Due date: 2020-03-01 + MD + + expect(subject.strip).to eq(expected_result.strip) + end + end + + context 'when all metadata fields are missing' do + let(:parent_field) { nil } + let(:issue_type_field) { nil } + let(:fix_versions_field) { [] } + let(:priority_field) { nil } + let(:environment_field) { nil } + let(:duedate_field) { nil } + + it 'returns nil' do + expect(subject).to be_nil + end + end + end +end diff --git a/spec/lib/gitlab/jira_import/user_mapper_spec.rb b/spec/lib/gitlab/jira_import/user_mapper_spec.rb new file mode 100644 index 00000000000..c8c8bd3c5b0 --- /dev/null +++ b/spec/lib/gitlab/jira_import/user_mapper_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::JiraImport::UserMapper do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:user) { create(:user, email: 'user@example.com') } + let_it_be(:email) { create(:email, user: user, email: 'second_email@example.com', confirmed_at: nil) } + + let(:jira_user) { { 'acountId' => '1a2b', 'emailAddress' => 'user@example.com' } } + + describe '#execute' do + subject { described_class.new(project, jira_user).execute } + + context 'when jira_user is nil' do + let(:jira_user) { nil } + + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when Gitlab user is not found by email' do + let(:jira_user) { { 'acountId' => '1a2b', 'emailAddress' => 'other@example.com' } } + + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when jira_user emailAddress is nil' do + let(:jira_user) { { 'acountId' => '1a2b', 'emailAddress' => nil } } + + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when jira_user emailAddress key is missing' do + let(:jira_user) { { 'acountId' => '1a2b' } } + + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when found user is not a project member' do + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when found user is a project member' do + it 'returns the found user' do + project.add_developer(user) + + expect(subject).to eq(user) + end + end + + context 'when user found by unconfirmd secondary address is a project member' do + let(:jira_user) { { 'acountId' => '1a2b', 'emailAddress' => 'second_email@example.com' } } + + it 'returns the found user' do + project.add_developer(user) + + expect(subject).to eq(user) + end + end + + context 'when user is a group member' do + it 'returns the found user' do + group.add_developer(user) + + expect(subject).to eq(user) + end + end + end +end diff --git a/spec/lib/gitlab/json_logger_spec.rb b/spec/lib/gitlab/json_logger_spec.rb index 5d544198c40..41dafc84ef2 100644 --- a/spec/lib/gitlab/json_logger_spec.rb +++ b/spec/lib/gitlab/json_logger_spec.rb @@ -14,7 +14,7 @@ describe Gitlab::JsonLogger do it 'formats strings' do output = subject.format_message('INFO', now, 'test', 'Hello world') - data = JSON.parse(output) + data = Gitlab::Json.parse(output) expect(data['severity']).to eq('INFO') expect(data['time']).to eq(now.utc.iso8601(3)) @@ -24,7 +24,7 @@ describe Gitlab::JsonLogger do it 'formats hashes' do output = subject.format_message('INFO', now, 'test', { hello: 1 }) - data = JSON.parse(output) + data = Gitlab::Json.parse(output) expect(data['severity']).to eq('INFO') expect(data['time']).to eq(now.utc.iso8601(3)) diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb index 5186ab041da..ee7c98a5a54 100644 --- a/spec/lib/gitlab/json_spec.rb +++ b/spec/lib/gitlab/json_spec.rb @@ -3,47 +3,151 @@ require "spec_helper" RSpec.describe Gitlab::Json do + before do + stub_feature_flags(json_wrapper_legacy_mode: true) + end + describe ".parse" do - it "parses an object" do - expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" }) - end + context "legacy_mode is disabled by default" do + it "parses an object" do + expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" }) + end - it "parses an array" do - expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }]) - end + it "parses an array" do + expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }]) + end - it "raises an error on a string" do - expect { subject.parse('"foo"') }.to raise_error(JSON::ParserError) + it "parses a string" do + expect(subject.parse('"foo"', legacy_mode: false)).to eq("foo") + end + + it "parses a true bool" do + expect(subject.parse("true", legacy_mode: false)).to be(true) + end + + it "parses a false bool" do + expect(subject.parse("false", legacy_mode: false)).to be(false) + end end - it "raises an error on a true bool" do - expect { subject.parse("true") }.to raise_error(JSON::ParserError) + context "legacy_mode is enabled" do + it "parses an object" do + expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" }) + end + + it "parses an array" do + expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }]) + end + + it "raises an error on a string" do + expect { subject.parse('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError) + end + + it "raises an error on a true bool" do + expect { subject.parse("true", legacy_mode: true) }.to raise_error(JSON::ParserError) + end + + it "raises an error on a false bool" do + expect { subject.parse("false", legacy_mode: true) }.to raise_error(JSON::ParserError) + end end - it "raises an error on a false bool" do - expect { subject.parse("false") }.to raise_error(JSON::ParserError) + context "feature flag is disabled" do + before do + stub_feature_flags(json_wrapper_legacy_mode: false) + end + + it "parses an object" do + expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" }) + end + + it "parses an array" do + expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }]) + end + + it "parses a string" do + expect(subject.parse('"foo"', legacy_mode: true)).to eq("foo") + end + + it "parses a true bool" do + expect(subject.parse("true", legacy_mode: true)).to be(true) + end + + it "parses a false bool" do + expect(subject.parse("false", legacy_mode: true)).to be(false) + end end end describe ".parse!" do - it "parses an object" do - expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" }) - end + context "legacy_mode is disabled by default" do + it "parses an object" do + expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" }) + end - it "parses an array" do - expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }]) - end + it "parses an array" do + expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }]) + end + + it "parses a string" do + expect(subject.parse!('"foo"', legacy_mode: false)).to eq("foo") + end - it "raises an error on a string" do - expect { subject.parse!('"foo"') }.to raise_error(JSON::ParserError) + it "parses a true bool" do + expect(subject.parse!("true", legacy_mode: false)).to be(true) + end + + it "parses a false bool" do + expect(subject.parse!("false", legacy_mode: false)).to be(false) + end end - it "raises an error on a true bool" do - expect { subject.parse!("true") }.to raise_error(JSON::ParserError) + context "legacy_mode is enabled" do + it "parses an object" do + expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" }) + end + + it "parses an array" do + expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }]) + end + + it "raises an error on a string" do + expect { subject.parse!('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError) + end + + it "raises an error on a true bool" do + expect { subject.parse!("true", legacy_mode: true) }.to raise_error(JSON::ParserError) + end + + it "raises an error on a false bool" do + expect { subject.parse!("false", legacy_mode: true) }.to raise_error(JSON::ParserError) + end end - it "raises an error on a false bool" do - expect { subject.parse!("false") }.to raise_error(JSON::ParserError) + context "feature flag is disabled" do + before do + stub_feature_flags(json_wrapper_legacy_mode: false) + end + + it "parses an object" do + expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" }) + end + + it "parses an array" do + expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }]) + end + + it "parses a string" do + expect(subject.parse!('"foo"', legacy_mode: true)).to eq("foo") + end + + it "parses a true bool" do + expect(subject.parse!("true", legacy_mode: true)).to be(true) + end + + it "parses a false bool" do + expect(subject.parse!("false", legacy_mode: true)).to be(false) + end end end diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb index 8147990ecc3..1f925fd45af 100644 --- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb @@ -92,7 +92,6 @@ describe Gitlab::Kubernetes::Helm::API do allow(client).to receive(:get_config_map).and_return(nil) allow(client).to receive(:create_config_map).and_return(nil) allow(client).to receive(:create_service_account).and_return(nil) - allow(client).to receive(:create_cluster_role_binding).and_return(nil) allow(client).to receive(:delete_pod).and_return(nil) allow(namespace).to receive(:ensure_exists!).once end @@ -136,7 +135,7 @@ describe Gitlab::Kubernetes::Helm::API do context 'without a service account' do it 'does not create a service account on kubeclient' do expect(client).not_to receive(:create_service_account) - expect(client).not_to receive(:create_cluster_role_binding) + expect(client).not_to receive(:update_cluster_role_binding) subject.install(command) end @@ -160,15 +159,14 @@ describe Gitlab::Kubernetes::Helm::API do ) end - context 'service account and cluster role binding does not exist' do + context 'service account does not exist' do before do expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) - expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) end it 'creates a service account, followed the cluster role binding on kubeclient' do expect(client).to receive(:create_service_account).with(service_account_resource).once.ordered - expect(client).to receive(:create_cluster_role_binding).with(cluster_role_binding_resource).once.ordered + expect(client).to receive(:update_cluster_role_binding).with(cluster_role_binding_resource).once.ordered subject.install(command) end @@ -177,21 +175,6 @@ describe Gitlab::Kubernetes::Helm::API do context 'service account already exists' do before do expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) - expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) - end - - it 'updates the service account, followed by creating the cluster role binding' do - expect(client).to receive(:update_service_account).with(service_account_resource).once.ordered - expect(client).to receive(:create_cluster_role_binding).with(cluster_role_binding_resource).once.ordered - - subject.install(command) - end - end - - context 'service account and cluster role binding already exists' do - before do - expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) - expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_return(cluster_role_binding_resource) end it 'updates the service account, followed by creating the cluster role binding' do @@ -216,7 +199,7 @@ describe Gitlab::Kubernetes::Helm::API do context 'legacy abac cluster' do it 'does not create a service account on kubeclient' do expect(client).not_to receive(:create_service_account) - expect(client).not_to receive(:create_cluster_role_binding) + expect(client).not_to receive(:update_cluster_role_binding) subject.install(command) end diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb index a11a9d08503..2a4a911cf38 100644 --- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb @@ -3,6 +3,10 @@ require 'spec_helper' describe Gitlab::Kubernetes::Helm::BaseCommand do + subject(:base_command) do + test_class.new(rbac) + end + let(:application) { create(:clusters_applications_helm) } let(:rbac) { false } @@ -30,87 +34,17 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do end end - let(:base_command) do - test_class.new(rbac) - end - - subject { base_command } - - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) { '' } end - describe '#pod_resource' do - subject { base_command.pod_resource } - - it 'returns a kubeclient resoure with pod content for application' do - is_expected.to be_an_instance_of ::Kubeclient::Resource - end - - context 'when rbac is true' do - let(:rbac) { true } - - it 'also returns a kubeclient resource' do - is_expected.to be_an_instance_of ::Kubeclient::Resource - end - end - end - describe '#pod_name' do subject { base_command.pod_name } it { is_expected.to eq('install-test-class-name') } end - describe '#service_account_resource' do - let(:resource) do - Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' }) - end - - subject { base_command.service_account_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a Kubeclient resource for the tiller ServiceAccount' do - is_expected.to eq(resource) - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates nothing' do - is_expected.to be_nil - end - end - end - - describe '#cluster_role_binding_resource' do - let(:resource) do - Kubeclient::Resource.new( - metadata: { name: 'tiller-admin' }, - roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' }, - subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }] - ) - end - - subject { base_command.cluster_role_binding_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do - is_expected.to eq(resource) - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates nothing' do - is_expected.to be_nil - end - end + it_behaves_like 'helm command' do + let(:command) { base_command } end end diff --git a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb index 82e15864687..95d60c18d56 100644 --- a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb @@ -3,14 +3,13 @@ require 'spec_helper' describe Gitlab::Kubernetes::Helm::DeleteCommand do + subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) } + let(:app_name) { 'app-name' } let(:rbac) { true } let(:files) { {} } - let(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) } - - subject { delete_command } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -26,7 +25,7 @@ describe Gitlab::Kubernetes::Helm::DeleteCommand do stub_feature_flags(managed_apps_local_tiller: false) end - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm init --upgrade @@ -48,7 +47,7 @@ describe Gitlab::Kubernetes::Helm::DeleteCommand do EOS end - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm init --upgrade @@ -67,29 +66,19 @@ describe Gitlab::Kubernetes::Helm::DeleteCommand do end end - describe '#pod_resource' do - subject { delete_command.pod_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a pod that uses the tiller serviceAccountName' do - expect(subject.spec.serviceAccountName).to eq('tiller') - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates a pod that uses the default serviceAccountName' do - expect(subject.spec.serviceAcccountName).to be_nil - end - end - end - describe '#pod_name' do subject { delete_command.pod_name } it { is_expected.to eq('uninstall-app-name') } end + + it_behaves_like 'helm command' do + let(:command) { delete_command } + end + + describe '#delete_command' do + it 'deletes the release' do + expect(subject.delete_command).to eq('helm delete --purge app-name') + end + end end diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb index 13021a08f9f..05d9b63d12b 100644 --- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb @@ -3,25 +3,24 @@ require 'spec_helper' describe Gitlab::Kubernetes::Helm::InitCommand do + subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) } + let(:application) { create(:clusters_applications_helm) } let(:rbac) { false } let(:files) { {} } - let(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) } - let(:commands) do - <<~EOS - helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem - EOS + it_behaves_like 'helm command generator' do + let(:commands) do + <<~EOS + helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem + EOS + end end - subject { init_command } - - it_behaves_like 'helm commands' - context 'on a rbac-enabled cluster' do let(:rbac) { true } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem --service-account tiller @@ -30,57 +29,7 @@ describe Gitlab::Kubernetes::Helm::InitCommand do end end - describe '#rbac?' do - subject { init_command.rbac? } - - context 'rbac is enabled' do - let(:rbac) { true } - - it { is_expected.to be_truthy } - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it { is_expected.to be_falsey } - end - end - - describe '#config_map_resource' do - let(:metadata) do - { - name: 'values-content-configuration-helm', - namespace: 'gitlab-managed-apps', - labels: { name: 'values-content-configuration-helm' } - } - end - - let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) } - - subject { init_command.config_map_resource } - - it 'returns a KubeClient resource with config map content for the application' do - is_expected.to eq(resource) - end - end - - describe '#pod_resource' do - subject { init_command.pod_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a pod that uses the tiller serviceAccountName' do - expect(subject.spec.serviceAccountName).to eq('tiller') - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates a pod that uses the default serviceAccountName' do - expect(subject.spec.serviceAcccountName).to be_nil - end - end + it_behaves_like 'helm command' do + let(:command) { init_command } end end diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb index a5ed8f57bf3..abd29e97505 100644 --- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb @@ -3,14 +3,7 @@ require 'spec_helper' describe Gitlab::Kubernetes::Helm::InstallCommand do - let(:files) { { 'ca.pem': 'some file content' } } - let(:repository) { 'https://repository.example.com' } - let(:rbac) { false } - let(:version) { '1.2.3' } - let(:preinstall) { nil } - let(:postinstall) { nil } - - let(:install_command) do + subject(:install_command) do described_class.new( name: 'app-name', chart: 'chart-name', @@ -23,9 +16,14 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do ) end - subject { install_command } + let(:files) { { 'ca.pem': 'some file content' } } + let(:repository) { 'https://repository.example.com' } + let(:rbac) { false } + let(:version) { '1.2.3' } + let(:preinstall) { nil } + let(:postinstall) { nil } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -66,7 +64,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do EOS end - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm init --upgrade @@ -97,7 +95,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do context 'when rbac is true' do let(:rbac) { true } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -128,7 +126,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do context 'when there is a pre-install script' do let(:preinstall) { ['/bin/date', '/bin/true'] } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -161,7 +159,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do context 'when there is a post-install script' do let(:postinstall) { ['/bin/date', "/bin/false\n"] } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -194,7 +192,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do context 'when there is no ca.pem file' do let(:files) { { 'file.txt': 'some content' } } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -225,7 +223,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do context 'when there is no version' do let(:version) { nil } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -252,57 +250,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do end end - describe '#rbac?' do - subject { install_command.rbac? } - - context 'rbac is enabled' do - let(:rbac) { true } - - it { is_expected.to be_truthy } - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it { is_expected.to be_falsey } - end - end - - describe '#pod_resource' do - subject { install_command.pod_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a pod that uses the tiller serviceAccountName' do - expect(subject.spec.serviceAccountName).to eq('tiller') - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates a pod that uses the default serviceAccountName' do - expect(subject.spec.serviceAcccountName).to be_nil - end - end - end - - describe '#config_map_resource' do - let(:metadata) do - { - name: "values-content-configuration-app-name", - namespace: 'gitlab-managed-apps', - labels: { name: "values-content-configuration-app-name" } - } - end - - let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) } - - subject { install_command.config_map_resource } - - it 'returns a KubeClient resource with config map content for the application' do - is_expected.to eq(resource) - end + it_behaves_like 'helm command' do + let(:command) { install_command } end end diff --git a/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb b/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb new file mode 100644 index 00000000000..0ad5dc189c0 --- /dev/null +++ b/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +describe Gitlab::Kubernetes::Helm::Parsers::ListV2 do + let(:valid_file_contents) do + <<~EOF + { + "Next": "", + "Releases": [ + { + "Name": "certmanager", + "Revision": 2, + "Updated": "Sun Mar 29 06:55:42 2020", + "Status": "DEPLOYED", + "Chart": "cert-manager-v0.10.1", + "AppVersion": "v0.10.1", + "Namespace": "gitlab-managed-apps" + }, + { + "Name": "certmanager-crds", + "Revision": 2, + "Updated": "Sun Mar 29 06:55:32 2020", + "Status": "DEPLOYED", + "Chart": "cert-manager-crds-v0.2.0", + "AppVersion": "release-0.10", + "Namespace": "gitlab-managed-apps" + }, + { + "Name": "certmanager-issuer", + "Revision": 1, + "Updated": "Tue Feb 18 10:04:04 2020", + "Status": "FAILED", + "Chart": "cert-manager-issuer-v0.1.0", + "AppVersion": "", + "Namespace": "gitlab-managed-apps" + }, + { + "Name": "runner", + "Revision": 2, + "Updated": "Sun Mar 29 07:01:01 2020", + "Status": "DEPLOYED", + "Chart": "gitlab-runner-0.14.0", + "AppVersion": "12.8.0", + "Namespace": "gitlab-managed-apps" + } + ] + } + EOF + end + + describe '#initialize' do + it 'initializes without error' do + expect do + described_class.new(valid_file_contents) + end.not_to raise_error + end + + it 'raises an error on invalid JSON' do + expect do + described_class.new('') + end.to raise_error(described_class::ParserError) + end + end + + describe '#releases' do + subject(:list_v2) { described_class.new(valid_file_contents) } + + it 'returns list of releases' do + expect(list_v2.releases).to match([ + a_hash_including('Name' => 'certmanager', 'Status' => 'DEPLOYED'), + a_hash_including('Name' => 'certmanager-crds', 'Status' => 'DEPLOYED'), + a_hash_including('Name' => 'certmanager-issuer', 'Status' => 'FAILED'), + a_hash_including('Name' => 'runner', 'Status' => 'DEPLOYED') + ]) + end + + context 'empty Releases' do + let(:valid_file_contents) { '{}' } + + it 'returns an empty array' do + expect(list_v2.releases).to eq([]) + end + end + + context 'invalid Releases' do + let(:invalid_file_contents) do + '{ "Releases" : ["a", "b"] }' + end + + subject(:list_v2) { described_class.new(invalid_file_contents) } + + it 'raises an error' do + expect do + list_v2.releases + end.to raise_error(described_class::ParserError, 'Invalid format for Releases') + end + end + end +end diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb index e69570f5371..eee842fa7d6 100644 --- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb @@ -33,7 +33,7 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do EOS end - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm init --upgrade @@ -57,7 +57,7 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do end end - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -83,7 +83,7 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do context 'when rbac is true' do let(:rbac) { true } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -110,7 +110,7 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do context 'when there is no ca.pem file' do let(:files) { { 'file.txt': 'some content' } } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS export HELM_HOST="localhost:44134" @@ -134,69 +134,19 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do end end - describe '#pod_name' do - subject { patch_command.pod_name } - - it { is_expected.to eq 'install-app-name' } - end - context 'when there is no version' do let(:version) { nil } it { expect { patch_command }.to raise_error(ArgumentError, 'version is required') } end - describe '#rbac?' do - subject { patch_command.rbac? } - - context 'rbac is enabled' do - let(:rbac) { true } - - it { is_expected.to be_truthy } - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it { is_expected.to be_falsey } - end - end - - describe '#pod_resource' do - subject { patch_command.pod_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a pod that uses the tiller serviceAccountName' do - expect(subject.spec.serviceAccountName).to eq('tiller') - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } + describe '#pod_name' do + subject { patch_command.pod_name } - it 'generates a pod that uses the default serviceAccountName' do - expect(subject.spec.serviceAcccountName).to be_nil - end - end + it { is_expected.to eq 'install-app-name' } end - describe '#config_map_resource' do - let(:metadata) do - { - name: "values-content-configuration-app-name", - namespace: 'gitlab-managed-apps', - labels: { name: "values-content-configuration-app-name" } - } - end - - let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) } - - subject { patch_command.config_map_resource } - - it 'returns a KubeClient resource with config map content for the application' do - is_expected.to eq(resource) - end + it_behaves_like 'helm command' do + let(:command) { patch_command } end end diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb index 3c62219a9a5..ea32ac96213 100644 --- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb @@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do it 'generates the appropriate specifications for the container' do container = subject.generate.spec.containers.first expect(container.name).to eq('helm') - expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.3-kube-1.13.12') + expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.6-kube-1.13.12') expect(container.env.count).to eq(3) expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT]) expect(container.command).to match_array(["/bin/sh"]) diff --git a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb index 2a89b04723d..981bb4e4abf 100644 --- a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb @@ -3,14 +3,13 @@ require 'spec_helper' describe Gitlab::Kubernetes::Helm::ResetCommand do + subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) } + let(:rbac) { true } let(:name) { 'helm' } let(:files) { {} } - let(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) } - - subject { reset_command } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS helm reset @@ -23,7 +22,7 @@ describe Gitlab::Kubernetes::Helm::ResetCommand do context 'when there is a ca.pem file' do let(:files) { { 'ca.pem': 'some file content' } } - it_behaves_like 'helm commands' do + it_behaves_like 'helm command generator' do let(:commands) do <<~EOS1.squish + "\n" + <<~EOS2 helm reset @@ -39,29 +38,13 @@ describe Gitlab::Kubernetes::Helm::ResetCommand do end end - describe '#pod_resource' do - subject { reset_command.pod_resource } - - context 'rbac is enabled' do - let(:rbac) { true } - - it 'generates a pod that uses the tiller serviceAccountName' do - expect(subject.spec.serviceAccountName).to eq('tiller') - end - end - - context 'rbac is not enabled' do - let(:rbac) { false } - - it 'generates a pod that uses the default serviceAccountName' do - expect(subject.spec.serviceAcccountName).to be_nil - end - end - end - describe '#pod_name' do subject { reset_command.pod_name } it { is_expected.to eq('uninstall-helm') } end + + it_behaves_like 'helm command' do + let(:command) { reset_command } + end end diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb index 1959fbca33b..32597aa4f5a 100644 --- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb +++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb @@ -64,6 +64,45 @@ describe Gitlab::Kubernetes::KubeClient do end end + describe '.graceful_request' do + context 'successful' do + before do + allow(client).to receive(:foo).and_return(true) + end + + it 'returns connected status and foo response' do + result = described_class.graceful_request(1) { client.foo } + + expect(result).to eq({ status: :connected, response: true }) + end + end + + context 'errored' do + using RSpec::Parameterized::TableSyntax + + where(:error, :error_status) do + SocketError | :unreachable + OpenSSL::X509::CertificateError | :authentication_failure + StandardError | :unknown_failure + Kubeclient::HttpError.new(408, "timed out", nil) | :unreachable + Kubeclient::HttpError.new(408, "timeout", nil) | :unreachable + Kubeclient::HttpError.new(408, "", nil) | :authentication_failure + end + + with_them do + before do + allow(client).to receive(:foo).and_raise(error) + end + + it 'returns error status' do + result = described_class.graceful_request(1) { client.foo } + + expect(result).to eq({ status: error_status }) + end + end + end + end + describe '#initialize' do shared_examples 'local address' do it 'blocks local addresses' do @@ -174,10 +213,39 @@ describe Gitlab::Kubernetes::KubeClient do end end + describe '#networking_client' do + subject { client.networking_client } + + it_behaves_like 'a Kubeclient' + + it 'has the networking API group endpoint' do + expect(subject.api_endpoint.to_s).to match(%r{\/apis\/networking.k8s.io\Z}) + end + + it 'has the api_version' do + expect(subject.instance_variable_get(:@api_version)).to eq('v1') + end + end + + describe '#metrics_client' do + subject { client.metrics_client } + + it_behaves_like 'a Kubeclient' + + it 'has the metrics API group endpoint' do + expect(subject.api_endpoint.to_s).to match(%r{\/apis\/metrics.k8s.io\Z}) + end + + it 'has the api_version' do + expect(subject.instance_variable_get(:@api_version)).to eq('v1beta1') + end + end + describe 'core API' do let(:core_client) { client.core_client } [ + :get_nodes, :get_pods, :get_secrets, :get_config_map, @@ -220,8 +288,6 @@ describe Gitlab::Kubernetes::KubeClient do :create_role, :get_role, :update_role, - :create_cluster_role_binding, - :get_cluster_role_binding, :update_cluster_role_binding ].each do |method| describe "##{method}" do @@ -290,6 +356,30 @@ describe Gitlab::Kubernetes::KubeClient do end end + describe 'networking API group' do + let(:networking_client) { client.networking_client } + + [ + :create_network_policy, + :get_network_policies, + :update_network_policy, + :delete_network_policy + ].each do |method| + describe "##{method}" do + include_examples 'redirection not allowed', method + include_examples 'dns rebinding not allowed', method + + it 'delegates to the networking client' do + expect(client).to delegate_method(method).to(:networking_client) + end + + it 'responds to the method' do + expect(client).to respond_to method + end + end + end + end + describe 'non-entity methods' do it 'does not proxy for non-entity methods' do expect(client).not_to respond_to :proxy_url @@ -316,6 +406,16 @@ describe Gitlab::Kubernetes::KubeClient do end end + shared_examples 'create_or_update method using put' do + let(:update_method) { "update_#{resource_type}" } + + it 'calls the update method' do + expect(client).to receive(update_method).with(resource) + + subject + end + end + shared_examples 'create_or_update method' do let(:get_method) { "get_#{resource_type}" } let(:update_method) { "update_#{resource_type}" } @@ -355,7 +455,7 @@ describe Gitlab::Kubernetes::KubeClient do subject { client.create_or_update_cluster_role_binding(resource) } - it_behaves_like 'create_or_update method' + it_behaves_like 'create_or_update method using put' end describe '#create_or_update_role_binding' do @@ -367,7 +467,7 @@ describe Gitlab::Kubernetes::KubeClient do subject { client.create_or_update_role_binding(resource) } - it_behaves_like 'create_or_update method' + it_behaves_like 'create_or_update method using put' end describe '#create_or_update_service_account' do diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb new file mode 100644 index 00000000000..f23d215a9a1 --- /dev/null +++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb @@ -0,0 +1,224 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Kubernetes::NetworkPolicy do + let(:policy) do + described_class.new( + name: name, + namespace: namespace, + creation_timestamp: '2020-04-14T00:08:30Z', + pod_selector: pod_selector, + policy_types: %w(Ingress Egress), + ingress: ingress, + egress: egress + ) + end + + let(:name) { 'example-name' } + let(:namespace) { 'example-namespace' } + let(:pod_selector) { { matchLabels: { role: 'db' } } } + + let(:ingress) do + [ + { + from: [ + { namespaceSelector: { matchLabels: { project: 'myproject' } } } + ] + } + ] + end + + let(:egress) do + [ + { + ports: [{ port: 5978 }] + } + ] + end + + describe '.from_yaml' do + let(:manifest) do + <<-POLICY +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: example-name + namespace: example-namespace +spec: + podSelector: + matchLabels: + role: db + policyTypes: + - Ingress + ingress: + - from: + - namespaceSelector: + matchLabels: + project: myproject + POLICY + end + let(:resource) do + ::Kubeclient::Resource.new( + metadata: { name: name, namespace: namespace }, + spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil } + ) + end + + subject { Gitlab::Kubernetes::NetworkPolicy.from_yaml(manifest)&.generate } + + it { is_expected.to eq(resource) } + + context 'with nil manifest' do + let(:manifest) { nil } + + it { is_expected.to be_nil } + end + + context 'with invalid manifest' do + let(:manifest) { "\tfoo: bar" } + + it { is_expected.to be_nil } + end + + context 'with manifest without metadata' do + let(:manifest) do + <<-POLICY +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +spec: + podSelector: + matchLabels: + role: db + policyTypes: + - Ingress + ingress: + - from: + - namespaceSelector: + matchLabels: + project: myproject + POLICY + end + + it { is_expected.to be_nil } + end + + context 'with manifest without spec' do + let(:manifest) do + <<-POLICY +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: example-name + namespace: example-namespace + POLICY + end + + it { is_expected.to be_nil } + end + + context 'with disallowed class' do + let(:manifest) do + <<-POLICY +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: example-name + namespace: example-namespace + creationTimestamp: 2020-04-14T00:08:30Z +spec: + podSelector: + matchLabels: + role: db + policyTypes: + - Ingress + ingress: + - from: + - namespaceSelector: + matchLabels: + project: myproject + POLICY + end + + it { is_expected.to be_nil } + end + end + + describe '.from_resource' do + let(:resource) do + ::Kubeclient::Resource.new( + metadata: { name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z', resourceVersion: '4990' }, + spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil } + ) + end + let(:generated_resource) do + ::Kubeclient::Resource.new( + metadata: { name: name, namespace: namespace }, + spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil } + ) + end + + subject { Gitlab::Kubernetes::NetworkPolicy.from_resource(resource)&.generate } + + it { is_expected.to eq(generated_resource) } + + context 'with nil resource' do + let(:resource) { nil } + + it { is_expected.to be_nil } + end + + context 'with resource without metadata' do + let(:resource) do + ::Kubeclient::Resource.new( + spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil } + ) + end + + it { is_expected.to be_nil } + end + + context 'with resource without spec' do + let(:resource) do + ::Kubeclient::Resource.new( + metadata: { name: name, namespace: namespace, uid: '128cf288-7de4-11ea-aceb-42010a800089', resourceVersion: '4990' } + ) + end + + it { is_expected.to be_nil } + end + end + + describe '#generate' do + let(:resource) do + ::Kubeclient::Resource.new( + metadata: { name: name, namespace: namespace }, + spec: { podSelector: pod_selector, policyTypes: %w(Ingress Egress), ingress: ingress, egress: egress } + ) + end + + subject { policy.generate } + + it { is_expected.to eq(resource) } + end + + describe '#as_json' do + let(:json_policy) do + { + name: name, + namespace: namespace, + creation_timestamp: '2020-04-14T00:08:30Z', + manifest: YAML.dump( + { + metadata: { name: name, namespace: namespace }, + spec: { podSelector: pod_selector, policyTypes: %w(Ingress Egress), ingress: ingress, egress: egress } + }.deep_stringify_keys + ) + } + end + + subject { policy.as_json } + + it { is_expected.to eq(json_policy) } + end +end diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb index af0bffa91a5..8cc3fd8efbd 100644 --- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb +++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb @@ -294,6 +294,7 @@ describe Gitlab::LegacyGithubImport::Importer do it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute' do let(:expected_not_called) { [:import_releases, [:import_comments, :pull_requests]] } end + it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs' it_behaves_like 'Gitlab::LegacyGithubImport unit-testing' diff --git a/spec/lib/gitlab/logging/cloudflare_helper_spec.rb b/spec/lib/gitlab/logging/cloudflare_helper_spec.rb new file mode 100644 index 00000000000..2b73fb7bc1c --- /dev/null +++ b/spec/lib/gitlab/logging/cloudflare_helper_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Logging::CloudflareHelper do + let(:helper) do + Class.new do + include Gitlab::Logging::CloudflareHelper + end.new + end + + describe '#store_cloudflare_headers!' do + let(:payload) { {} } + let(:env) { {} } + let(:request) { ActionDispatch::Request.new(env) } + + before do + request.headers.merge!(headers) + end + + context 'with normal headers' do + let(:headers) { { 'Cf-Ray' => '592f0aa22b3dea38-IAD', 'Cf-Request-Id' => SecureRandom.hex } } + + it 'adds Cf-Ray-Id and Cf-Request-Id' do + helper.store_cloudflare_headers!(payload, request) + + expect(payload[:cf_ray]).to eq(headers['Cf-Ray']) + expect(payload[:cf_request_id]).to eq(headers['Cf-Request-Id']) + end + end + + context 'with header values with long strings' do + let(:headers) { { 'Cf-Ray' => SecureRandom.hex(33), 'Cf-Request-Id' => SecureRandom.hex(33) } } + + it 'filters invalid header values' do + helper.store_cloudflare_headers!(payload, request) + + expect(payload.keys).not_to include(:cf_ray, :cf_request_id) + end + end + + context 'with header values with non-alphanumeric characters' do + let(:headers) { { 'Cf-Ray' => "Bad\u0000ray", 'Cf-Request-Id' => "Bad\u0000req" } } + + it 'filters invalid header values' do + helper.store_cloudflare_headers!(payload, request) + + expect(payload.keys).not_to include(:cf_ray, :cf_request_id) + end + end + end +end diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb index 48d06283b7a..7ae8baa31b5 100644 --- a/spec/lib/gitlab/lograge/custom_options_spec.rb +++ b/spec/lib/gitlab/lograge/custom_options_spec.rb @@ -19,7 +19,13 @@ describe Gitlab::Lograge::CustomOptions do 1, 2, 'transaction_id', - { params: params, user_id: 'test' } + { + params: params, + user_id: 'test', + cf_ray: SecureRandom.hex, + cf_request_id: SecureRandom.hex, + metadata: { 'meta.user' => 'jane.doe' } + } ) end @@ -46,5 +52,30 @@ describe Gitlab::Lograge::CustomOptions do it 'adds the user id' do expect(subject[:user_id]).to eq('test') end + + it 'adds Cloudflare headers' do + expect(subject[:cf_ray]).to eq(event.payload[:cf_ray]) + expect(subject[:cf_request_id]).to eq(event.payload[:cf_request_id]) + end + + it 'adds the metadata' do + expect(subject['meta.user']).to eq('jane.doe') + end + + context 'when metadata is missing' do + let(:event) do + ActiveSupport::Notifications::Event.new( + 'test', + 1, + 2, + 'transaction_id', + { params: {} } + ) + end + + it 'does not break' do + expect { subject }.not_to raise_error + end + end end end diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb index 5d41ee06263..4b09205a181 100644 --- a/spec/lib/gitlab/mail_room/mail_room_spec.rb +++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb @@ -13,7 +13,8 @@ describe Gitlab::MailRoom do start_tls: false, mailbox: 'inbox', idle_timeout: 60, - log_path: Rails.root.join('log', 'mail_room_json.log').to_s + log_path: Rails.root.join('log', 'mail_room_json.log').to_s, + expunge_deleted: false } end diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb index d87d2c839ad..84f405d7369 100644 --- a/spec/lib/gitlab/metrics/background_transaction_spec.rb +++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb @@ -7,12 +7,6 @@ describe Gitlab::Metrics::BackgroundTransaction do subject { described_class.new(test_worker_class) } - describe '#action' do - it 'returns transaction action name' do - expect(subject.action).to eq('TestWorker#perform') - end - end - describe '#label' do it 'returns labels based on class name' do expect(subject.labels).to eq(controller: 'TestWorker', action: 'perform') diff --git a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb index e41004bb57e..5d4bd4512e3 100644 --- a/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb +++ b/spec/lib/gitlab/metrics/dashboard/stages/grafana_formatter_spec.rb @@ -9,9 +9,9 @@ describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do let_it_be(:project) { create(:project, namespace: namespace, name: 'bar') } describe '#transform!' do - let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) } - let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) } - let(:expected_dashboard) { JSON.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) } + let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) } + let(:datasource) { Gitlab::Json.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) } + let(:expected_dashboard) { Gitlab::Json.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) } subject(:dashboard) { described_class.new(project, {}, params).transform! } diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb index 9ccd1c06d6b..75f9f99c8a6 100644 --- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb +++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb @@ -3,17 +3,21 @@ require 'spec_helper' describe Gitlab::Metrics::Dashboard::Url do + include Gitlab::Routing.url_helpers + describe '#metrics_regex' do - let(:url) do - Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url( + let(:url_params) do + [ 'foo', 'bar', 1, - start: '2019-08-02T05:43:09.000Z', - dashboard: 'config/prometheus/common_metrics.yml', - group: 'awesome group', - anchor: 'title' - ) + { + start: '2019-08-02T05:43:09.000Z', + dashboard: 'config/prometheus/common_metrics.yml', + group: 'awesome group', + anchor: 'title' + } + ] end let(:expected_params) do @@ -29,12 +33,22 @@ describe Gitlab::Metrics::Dashboard::Url do subject { described_class.metrics_regex } - it_behaves_like 'regex which matches url when expected' + context 'for metrics route' do + let(:url) { metrics_namespace_project_environment_url(*url_params) } + + it_behaves_like 'regex which matches url when expected' + end + + context 'for metrics_dashboard route' do + let(:url) { metrics_dashboard_namespace_project_environment_url(*url_params) } + + it_behaves_like 'regex which matches url when expected' + end end describe '#grafana_regex' do let(:url) do - Gitlab::Routing.url_helpers.namespace_project_grafana_api_metrics_dashboard_url( + namespace_project_grafana_api_metrics_dashboard_url( 'foo', 'bar', start: '2019-08-02T05:43:09.000Z', diff --git a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb index a415b6407d5..0b820fdbde9 100644 --- a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb +++ b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb @@ -53,7 +53,7 @@ describe Gitlab::Metrics::Exporter::SidekiqExporter do .with( class: described_class.to_s, message: 'Cannot start sidekiq_exporter', - exception: anything) + 'exception.message' => anything) exporter.start end diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb index 3b5e04e2df5..229db67ec88 100644 --- a/spec/lib/gitlab/metrics/method_call_spec.rb +++ b/spec/lib/gitlab/metrics/method_call_spec.rb @@ -76,25 +76,6 @@ describe Gitlab::Metrics::MethodCall do end end - describe '#to_metric' do - it 'returns a Metric instance' do - expect(method_call).to receive(:real_time).and_return(4.0001).twice - expect(method_call).to receive(:cpu_time).and_return(3.0001) - - method_call.measure { 'foo' } - metric = method_call.to_metric - - expect(metric).to be_an_instance_of(Gitlab::Metrics::Metric) - expect(metric.series).to eq('rails_method_calls') - - expect(metric.values[:duration]).to eq(4000) - expect(metric.values[:cpu_duration]).to eq(3000) - expect(metric.values[:call_count]).to be_an(Integer) - - expect(metric.tags).to eq({ method: 'Foo#bar' }) - end - end - describe '#above_threshold?' do before do allow(Gitlab::Metrics).to receive(:method_call_threshold).and_return(100) diff --git a/spec/lib/gitlab/metrics/metric_spec.rb b/spec/lib/gitlab/metrics/metric_spec.rb deleted file mode 100644 index 611b59231ba..00000000000 --- a/spec/lib/gitlab/metrics/metric_spec.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -describe Gitlab::Metrics::Metric do - let(:metric) do - described_class.new('foo', { number: 10 }, { host: 'localtoast' }) - end - - describe '#series' do - subject { metric.series } - - it { is_expected.to eq('foo') } - end - - describe '#values' do - subject { metric.values } - - it { is_expected.to eq({ number: 10 }) } - end - - describe '#tags' do - subject { metric.tags } - - it { is_expected.to eq({ host: 'localtoast' }) } - end - - describe '#type' do - subject { metric.type } - - it { is_expected.to eq(:metric) } - end - - describe '#event?' do - it 'returns false for a regular metric' do - expect(metric.event?).to eq(false) - end - - it 'returns true for an event metric' do - expect(metric).to receive(:type).and_return(:event) - - expect(metric.event?).to eq(true) - end - end - - describe '#to_hash' do - it 'returns a Hash' do - expect(metric.to_hash).to be_an_instance_of(Hash) - end - - describe 'the returned Hash' do - let(:hash) { metric.to_hash } - - it 'includes the series' do - expect(hash[:series]).to eq('foo') - end - - it 'includes the tags' do - expect(hash[:tags]).to be_an_instance_of(Hash) - end - - it 'includes the values' do - expect(hash[:values]).to eq({ number: 10 }) - end - - it 'includes the timestamp' do - expect(hash[:timestamp]).to be_an(Integer) - end - end - end -end diff --git a/spec/lib/gitlab/metrics/rack_middleware_spec.rb b/spec/lib/gitlab/metrics/rack_middleware_spec.rb index 1c1681cc5ab..dd1dbf7a1f4 100644 --- a/spec/lib/gitlab/metrics/rack_middleware_spec.rb +++ b/spec/lib/gitlab/metrics/rack_middleware_spec.rb @@ -10,10 +10,6 @@ describe Gitlab::Metrics::RackMiddleware do let(:env) { { 'REQUEST_METHOD' => 'GET', 'REQUEST_URI' => '/foo' } } describe '#call' do - before do - expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish) - end - it 'tracks a transaction' do expect(app).to receive(:call).with(env).and_return('yay') @@ -36,26 +32,5 @@ describe Gitlab::Metrics::RackMiddleware do it 'returns a Transaction' do expect(transaction).to be_an_instance_of(Gitlab::Metrics::WebTransaction) end - - it 'stores the request method and URI in the transaction as values' do - expect(transaction.values[:request_method]).to eq('GET') - expect(transaction.values[:request_uri]).to eq('/foo') - end - - context "when URI includes sensitive parameters" do - let(:env) do - { - 'REQUEST_METHOD' => 'GET', - 'REQUEST_URI' => '/foo?private_token=my-token', - 'PATH_INFO' => '/foo', - 'QUERY_STRING' => 'private_token=my_token', - 'action_dispatch.parameter_filter' => [:private_token] - } - end - - it 'stores the request URI with the sensitive parameters filtered' do - expect(transaction.values[:request_uri]).to eq('/foo?private_token=[FILTERED]') - end - end end end diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb new file mode 100644 index 00000000000..fdf3b5bd045 --- /dev/null +++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Metrics::Samplers::DatabaseSampler do + subject { described_class.new(described_class::SAMPLING_INTERVAL_SECONDS) } + + describe '#sample' do + before do + described_class::METRIC_DESCRIPTIONS.each_key do |metric| + allow(subject.metrics[metric]).to receive(:set) + end + end + + context 'for ActiveRecord::Base' do + let(:labels) do + { + class: 'ActiveRecord::Base', + host: Gitlab::Database.config['host'], + port: Gitlab::Database.config['port'] + } + end + + context 'when the database is connected' do + it 'samples connection pool statistics' do + expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0) + expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0) + + subject.sample + end + end + + context 'when the database is not connected' do + before do + allow(ActiveRecord::Base).to receive(:connected?).and_return(false) + end + + it 'records no samples' do + expect(subject.metrics[:size]).not_to receive(:set).with(labels, anything) + + subject.sample + end + end + end + end +end diff --git a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb deleted file mode 100644 index 939c057c342..00000000000 --- a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb +++ /dev/null @@ -1,105 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -describe Gitlab::Metrics::Samplers::InfluxSampler do - let(:sampler) { described_class.new(5) } - - describe '#start' do - it 'runs once and gathers a sample at a given interval' do - expect(sampler).to receive(:sleep).with(a_kind_of(Numeric)).twice - expect(sampler).to receive(:sample).once - expect(sampler).to receive(:running).and_return(true, false) - - sampler.start.join - end - end - - describe '#sample' do - it 'samples various statistics' do - expect(sampler).to receive(:sample_memory_usage) - expect(sampler).to receive(:sample_file_descriptors) - expect(sampler).to receive(:flush) - - sampler.sample - end - end - - describe '#flush' do - it 'schedules the metrics using Sidekiq' do - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([an_instance_of(Hash)]) - - sampler.sample_memory_usage - sampler.flush - end - end - - describe '#sample_memory_usage' do - it 'adds a metric containing the memory usage' do - expect(Gitlab::Metrics::System).to receive(:memory_usage) - .and_return(9000) - - expect(sampler).to receive(:add_metric) - .with(/memory_usage/, value: 9000) - .and_call_original - - sampler.sample_memory_usage - end - end - - describe '#sample_file_descriptors' do - it 'adds a metric containing the amount of open file descriptors' do - expect(Gitlab::Metrics::System).to receive(:file_descriptor_count) - .and_return(4) - - expect(sampler).to receive(:add_metric) - .with(/file_descriptors/, value: 4) - .and_call_original - - sampler.sample_file_descriptors - end - end - - describe '#add_metric' do - it 'prefixes the series name for a Rails process' do - expect(Gitlab::Runtime).to receive(:sidekiq?).and_return(false) - - expect(Gitlab::Metrics::Metric).to receive(:new) - .with('rails_cats', { value: 10 }, {}) - .and_call_original - - sampler.add_metric('cats', value: 10) - end - - it 'prefixes the series name for a Sidekiq process' do - expect(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) - - expect(Gitlab::Metrics::Metric).to receive(:new) - .with('sidekiq_cats', { value: 10 }, {}) - .and_call_original - - sampler.add_metric('cats', value: 10) - end - end - - describe '#sleep_interval' do - it 'returns a Numeric' do - expect(sampler.sleep_interval).to be_a_kind_of(Numeric) - end - - # Testing random behaviour is very hard, so treat this test as a basic smoke - # test instead of a very accurate behaviour/unit test. - it 'does not return the same interval twice in a row' do - last = nil - - 100.times do - interval = sampler.sleep_interval - - expect(interval).not_to eq(last) - - last = interval - end - end - end -end diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb index 8c4071a7ed1..ead650a27f0 100644 --- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb @@ -19,24 +19,38 @@ describe Gitlab::Metrics::Samplers::RubySampler do end describe '#sample' do - it 'samples various statistics' do - expect(Gitlab::Metrics::System).to receive(:cpu_time) - expect(Gitlab::Metrics::System).to receive(:file_descriptor_count) - expect(Gitlab::Metrics::System).to receive(:memory_usage) - expect(Gitlab::Metrics::System).to receive(:max_open_file_descriptors) - expect(sampler).to receive(:sample_gc) + it 'adds a metric containing the process resident memory bytes' do + expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return(9000) + + expect(sampler.metrics[:process_resident_memory_bytes]).to receive(:set).with({}, 9000) sampler.sample end - it 'adds a metric containing the process resident memory bytes' do - expect(Gitlab::Metrics::System).to receive(:memory_usage).and_return(9000) + it 'adds a metric containing the process unique and proportional memory bytes' do + expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return(uss: 9000, pss: 10_000) - expect(sampler.metrics[:process_resident_memory_bytes]).to receive(:set).with({}, 9000) + expect(sampler.metrics[:process_unique_memory_bytes]).to receive(:set).with({}, 9000) + expect(sampler.metrics[:process_proportional_memory_bytes]).to receive(:set).with({}, 10_000) sampler.sample end + context 'when USS+PSS sampling is disabled via environment' do + before do + stub_env('enable_memory_uss_pss', "0") + end + + it 'does not sample USS or PSS' do + expect(Gitlab::Metrics::System).not_to receive(:memory_usage_uss_pss) + + expect(sampler.metrics[:process_unique_memory_bytes]).not_to receive(:set) + expect(sampler.metrics[:process_proportional_memory_bytes]).not_to receive(:set) + + sampler.sample + end + end + it 'adds a metric containing the amount of open file descriptors' do expect(Gitlab::Metrics::System).to receive(:file_descriptor_count) .and_return(4) diff --git a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb index bb95d5ab2ad..67336cf83e6 100644 --- a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb +++ b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb @@ -17,8 +17,6 @@ describe Gitlab::Metrics::SidekiqMiddleware do expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set) .with(:sidekiq_queue_duration, instance_of(Float)) - expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish) - middleware.call(worker, message, :test) { nil } end @@ -32,8 +30,6 @@ describe Gitlab::Metrics::SidekiqMiddleware do expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set) .with(:sidekiq_queue_duration, instance_of(Float)) - expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:finish) - middleware.call(worker, {}, :test) { nil } end @@ -46,9 +42,6 @@ describe Gitlab::Metrics::SidekiqMiddleware do expect_any_instance_of(Gitlab::Metrics::Transaction) .to receive(:add_event).with(:sidekiq_exception) - expect_any_instance_of(Gitlab::Metrics::Transaction) - .to receive(:finish) - expect { middleware.call(worker, message, :test) } .to raise_error(RuntimeError) end diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb index 25c0e7b695a..857e54d3432 100644 --- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb @@ -21,15 +21,9 @@ describe Gitlab::Metrics::Subscribers::ActionView do describe '#render_template' do it 'tracks rendering of a template' do - values = { duration: 2.1 } - tags = { view: 'app/views/x.html.haml' } - expect(transaction).to receive(:increment) .with(:view_duration, 2.1) - expect(transaction).to receive(:add_metric) - .with(described_class::SERIES, values, tags) - subscriber.render_template(event) end diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb index a5aa80686fd..abb6a0096d6 100644 --- a/spec/lib/gitlab/metrics/system_spec.rb +++ b/spec/lib/gitlab/metrics/system_spec.rb @@ -3,33 +3,122 @@ require 'spec_helper' describe Gitlab::Metrics::System do - if File.exist?('/proc') - describe '.memory_usage' do - it "returns the process' memory usage in bytes" do - expect(described_class.memory_usage).to be > 0 + context 'when /proc files exist' do + # Fixtures pulled from: + # Linux carbon 5.3.0-7648-generic #41~1586789791~19.10~9593806-Ubuntu SMP Mon Apr 13 17:50:40 UTC x86_64 x86_64 x86_64 GNU/Linux + let(:proc_status) do + # most rows omitted for brevity + <<~SNIP + Name: less + VmHWM: 2468 kB + VmRSS: 2468 kB + RssAnon: 260 kB + SNIP + end + + let(:proc_smaps_rollup) do + # full snapshot + <<~SNIP + Rss: 2564 kB + Pss: 503 kB + Pss_Anon: 312 kB + Pss_File: 191 kB + Pss_Shmem: 0 kB + Shared_Clean: 2100 kB + Shared_Dirty: 0 kB + Private_Clean: 152 kB + Private_Dirty: 312 kB + Referenced: 2564 kB + Anonymous: 312 kB + LazyFree: 0 kB + AnonHugePages: 0 kB + ShmemPmdMapped: 0 kB + Shared_Hugetlb: 0 kB + Private_Hugetlb: 0 kB + Swap: 0 kB + SwapPss: 0 kB + Locked: 0 kB + SNIP + end + + let(:proc_limits) do + # full snapshot + <<~SNIP + Limit Soft Limit Hard Limit Units + Max cpu time unlimited unlimited seconds + Max file size unlimited unlimited bytes + Max data size unlimited unlimited bytes + Max stack size 8388608 unlimited bytes + Max core file size 0 unlimited bytes + Max resident set unlimited unlimited bytes + Max processes 126519 126519 processes + Max open files 1024 1048576 files + Max locked memory 67108864 67108864 bytes + Max address space unlimited unlimited bytes + Max file locks unlimited unlimited locks + Max pending signals 126519 126519 signals + Max msgqueue size 819200 819200 bytes + Max nice priority 0 0 + Max realtime priority 0 0 + Max realtime timeout unlimited unlimited us + SNIP + end + + describe '.memory_usage_rss' do + it "returns the process' resident set size (RSS) in bytes" do + mock_existing_proc_file('/proc/self/status', proc_status) + + expect(described_class.memory_usage_rss).to eq(2527232) end end describe '.file_descriptor_count' do it 'returns the amount of open file descriptors' do - expect(described_class.file_descriptor_count).to be > 0 + expect(Dir).to receive(:glob).and_return(['/some/path', '/some/other/path']) + + expect(described_class.file_descriptor_count).to eq(2) end end describe '.max_open_file_descriptors' do it 'returns the max allowed open file descriptors' do - expect(described_class.max_open_file_descriptors).to be > 0 + mock_existing_proc_file('/proc/self/limits', proc_limits) + + expect(described_class.max_open_file_descriptors).to eq(1024) + end + end + + describe '.memory_usage_uss_pss' do + it "returns the process' unique and porportional set size (USS/PSS) in bytes" do + mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup) + + # (Private_Clean (152 kB) + Private_Dirty (312 kB) + Private_Hugetlb (0 kB)) * 1024 + expect(described_class.memory_usage_uss_pss).to eq(uss: 475136, pss: 515072) end end - else - describe '.memory_usage' do - it 'returns 0.0' do - expect(described_class.memory_usage).to eq(0.0) + end + + context 'when /proc files do not exist' do + before do + mock_missing_proc_file + end + + describe '.memory_usage_rss' do + it 'returns 0' do + expect(described_class.memory_usage_rss).to eq(0) + end + end + + describe '.memory_usage_uss_pss' do + it "returns 0 for all components" do + expect(described_class.memory_usage_uss_pss).to eq(uss: 0, pss: 0) end end describe '.file_descriptor_count' do it 'returns 0' do + expect(Dir).to receive(:glob).and_return([]) + expect(described_class.file_descriptor_count).to eq(0) end end @@ -98,4 +187,12 @@ describe Gitlab::Metrics::System do expect(described_class.thread_cpu_duration(start_time)).to be_nil end end + + def mock_existing_proc_file(path, content) + allow(File).to receive(:foreach).with(path) { |_path, &block| content.each_line(&block) } + end + + def mock_missing_proc_file + allow(File).to receive(:foreach).and_raise(Errno::ENOENT) + end end diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/transaction_spec.rb index 08de2426c5a..cf46fa3e91c 100644 --- a/spec/lib/gitlab/metrics/transaction_spec.rb +++ b/spec/lib/gitlab/metrics/transaction_spec.rb @@ -4,7 +4,6 @@ require 'spec_helper' describe Gitlab::Metrics::Transaction do let(:transaction) { described_class.new } - let(:metric) { transaction.metrics[0] } let(:sensitive_tags) do { @@ -13,12 +12,6 @@ describe Gitlab::Metrics::Transaction do } end - shared_examples 'tag filter' do |sane_tags| - it 'filters potentially sensitive tags' do - expect(metric.tags).to eq(sane_tags) - end - end - describe '#duration' do it 'returns the duration of a transaction in seconds' do transaction.run { } @@ -61,25 +54,6 @@ describe Gitlab::Metrics::Transaction do end end - describe '#add_metric' do - it 'adds a metric to the transaction' do - transaction.add_metric('foo', value: 1) - - expect(metric.series).to eq('rails_foo') - expect(metric.tags).to eq({}) - expect(metric.values).to eq(value: 1) - end - - context 'with sensitive tags' do - before do - transaction - .add_metric('foo', { value: 1 }, **sensitive_tags.merge(sane: 'yes')) - end - - it_behaves_like 'tag filter', sane: 'yes' - end - end - describe '#method_call_for' do it 'returns a MethodCall' do method = transaction.method_call_for('Foo#bar', :Foo, '#bar') @@ -88,133 +62,23 @@ describe Gitlab::Metrics::Transaction do end end - describe '#increment' do - it 'increments a counter' do - transaction.increment(:time, 1) - transaction.increment(:time, 2) - - values = metric_values(time: 3) - - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self - end - end - - describe '#set' do - it 'sets a value' do - transaction.set(:number, 10) - - values = metric_values(number: 10) - - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self - end - end - - describe '#finish' do - it 'tracks the transaction details and submits them to Sidekiq' do - expect(transaction).to receive(:track_self) - expect(transaction).to receive(:submit) - - transaction.finish - end - end - - describe '#track_self' do - it 'adds a metric for the transaction itself' do - values = metric_values - - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self - end - end - - describe '#submit' do - it 'submits the metrics to Sidekiq' do - transaction.track_self - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([an_instance_of(Hash)]) - - transaction.submit - end - - it 'adds the action as a tag for every metric' do - allow(transaction) - .to receive(:labels) - .and_return(controller: 'Foo', action: 'bar') - - transaction.track_self - - hash = { - series: 'rails_transactions', - tags: { action: 'Foo#bar' }, - values: metric_values, - timestamp: a_kind_of(Integer) - } - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([hash]) - - transaction.submit - end - - it 'does not add an action tag for events' do - allow(transaction) - .to receive(:labels) - .and_return(controller: 'Foo', action: 'bar') - - transaction.add_event(:meow) - - hash = { - series: 'events', - tags: { event: :meow }, - values: { count: 1 }, - timestamp: a_kind_of(Integer) - } - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([hash]) - - transaction.submit - end - end - describe '#add_event' do - it 'adds a metric' do - transaction.add_event(:meow) + let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil) } - expect(metric).to be_an_instance_of(Gitlab::Metrics::Metric) + before do + allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric) end - it "does not prefix the metric's series name" do - transaction.add_event(:meow) - - expect(metric.series).to eq(described_class::EVENT_SERIES) - end - - it 'tracks a counter for every event' do - transaction.add_event(:meow) - - expect(metric.values).to eq(count: 1) - end + it 'adds a metric' do + expect(prometheus_metric).to receive(:increment) - it 'tracks the event name' do transaction.add_event(:meow) - - expect(metric.tags).to eq(event: :meow) end it 'allows tracking of custom tags' do - transaction.add_event(:bau, animal: 'dog') + expect(prometheus_metric).to receive(:increment).with(hash_including(animal: "dog")) - expect(metric.tags).to eq(event: :bau, animal: 'dog') + transaction.add_event(:bau, animal: 'dog') end context 'with sensitive tags' do @@ -222,16 +86,11 @@ describe Gitlab::Metrics::Transaction do transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes')) end - it_behaves_like 'tag filter', event: :baubau, sane: 'yes' - end - end - - private + it 'filters tags' do + expect(prometheus_metric).not_to receive(:increment).with(hash_including(sensitive_tags)) - def metric_values(opts = {}) - { - duration: 0.0, - allocated_memory: a_kind_of(Numeric) - }.merge(opts) + transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes')) + end + end end end diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb index 21a762dbf25..47f1bd3bd10 100644 --- a/spec/lib/gitlab/metrics/web_transaction_spec.rb +++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb @@ -5,6 +5,11 @@ require 'spec_helper' describe Gitlab::Metrics::WebTransaction do let(:env) { {} } let(:transaction) { described_class.new(env) } + let(:prometheus_metric) { double("prometheus metric") } + + before do + allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric) + end describe '#duration' do it 'returns the duration of a transaction in seconds' do @@ -40,15 +45,6 @@ describe Gitlab::Metrics::WebTransaction do end end - describe '#add_metric' do - it 'adds a metric to the transaction' do - expect(Gitlab::Metrics::Metric).to receive(:new) - .with('rails_foo', { number: 10 }, {}) - - transaction.add_metric('foo', number: 10) - end - end - describe '#method_call_for' do it 'returns a MethodCall' do method = transaction.method_call_for('Foo#bar', :Foo, '#bar') @@ -59,101 +55,17 @@ describe Gitlab::Metrics::WebTransaction do describe '#increment' do it 'increments a counter' do - transaction.increment(:time, 1) - transaction.increment(:time, 2) - - values = { duration: 0.0, time: 3, allocated_memory: a_kind_of(Numeric) } + expect(prometheus_metric).to receive(:increment).with({}, 1) - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self + transaction.increment(:time, 1) end end describe '#set' do it 'sets a value' do - transaction.set(:number, 10) - - values = { - duration: 0.0, - number: 10, - allocated_memory: a_kind_of(Numeric) - } - - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self - end - end - - describe '#finish' do - it 'tracks the transaction details and submits them to Sidekiq' do - expect(transaction).to receive(:track_self) - expect(transaction).to receive(:submit) - - transaction.finish - end - end - - describe '#track_self' do - it 'adds a metric for the transaction itself' do - values = { - duration: transaction.duration, - allocated_memory: a_kind_of(Numeric) - } - - expect(transaction).to receive(:add_metric) - .with('transactions', values, {}) - - transaction.track_self - end - end - - describe '#submit' do - it 'submits the metrics to Sidekiq' do - transaction.track_self - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([an_instance_of(Hash)]) - - transaction.submit - end + expect(prometheus_metric).to receive(:set).with({}, 10) - it 'adds the action as a tag for every metric' do - allow(transaction).to receive(:labels).and_return(controller: 'Foo', action: 'bar') - transaction.track_self - - hash = { - series: 'rails_transactions', - tags: { action: 'Foo#bar' }, - values: { duration: 0.0, allocated_memory: a_kind_of(Numeric) }, - timestamp: a_kind_of(Integer) - } - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([hash]) - - transaction.submit - end - - it 'does not add an action tag for events' do - allow(transaction).to receive(:labels).and_return(controller: 'Foo', action: 'bar') - - transaction.add_event(:meow) - - hash = { - series: 'events', - tags: { event: :meow }, - values: { count: 1 }, - timestamp: a_kind_of(Integer) - } - - expect(Gitlab::Metrics).to receive(:submit_metrics) - .with([hash]) - - transaction.submit + transaction.set(:number, 10) end end @@ -167,7 +79,6 @@ describe Gitlab::Metrics::WebTransaction do end it 'provides labels with the method and path of the route in the grape endpoint' do expect(transaction.labels).to eq({ controller: 'Grape', action: 'GET /projects/:id/archive' }) - expect(transaction.action).to eq('Grape#GET /projects/:id/archive') end it 'does not provide labels if route infos are missing' do @@ -177,7 +88,6 @@ describe Gitlab::Metrics::WebTransaction do env['api.endpoint'] = endpoint expect(transaction.labels).to eq({}) - expect(transaction.action).to be_nil end end @@ -193,7 +103,6 @@ describe Gitlab::Metrics::WebTransaction do it 'tags a transaction with the name and action of a controller' do expect(transaction.labels).to eq({ controller: 'TestController', action: 'show' }) - expect(transaction.action).to eq('TestController#show') end context 'when the request content type is not :html' do @@ -201,7 +110,6 @@ describe Gitlab::Metrics::WebTransaction do it 'appends the mime type to the transaction action' do expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json' }) - expect(transaction.action).to eq('TestController#show.json') end end @@ -210,54 +118,26 @@ describe Gitlab::Metrics::WebTransaction do it 'does not append the MIME type to the transaction action' do expect(transaction.labels).to eq({ controller: 'TestController', action: 'show' }) - expect(transaction.action).to eq('TestController#show') end end end it 'returns no labels when no route information is present in env' do expect(transaction.labels).to eq({}) - expect(transaction.action).to eq(nil) end end describe '#add_event' do it 'adds a metric' do - transaction.add_event(:meow) + expect(prometheus_metric).to receive(:increment) - expect(transaction.metrics[0]).to be_an_instance_of(Gitlab::Metrics::Metric) - end - - it "does not prefix the metric's series name" do transaction.add_event(:meow) - - metric = transaction.metrics[0] - - expect(metric.series).to eq(described_class::EVENT_SERIES) - end - - it 'tracks a counter for every event' do - transaction.add_event(:meow) - - metric = transaction.metrics[0] - - expect(metric.values).to eq(count: 1) - end - - it 'tracks the event name' do - transaction.add_event(:meow) - - metric = transaction.metrics[0] - - expect(metric.tags).to eq(event: :meow) end it 'allows tracking of custom tags' do - transaction.add_event(:bau, animal: 'dog') - - metric = transaction.metrics[0] + expect(prometheus_metric).to receive(:increment).with(animal: "dog") - expect(metric.tags).to eq(event: :bau, animal: 'dog') + transaction.add_event(:bau, animal: 'dog') end end end diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb index f0ba12c1cd0..2ebe1958487 100644 --- a/spec/lib/gitlab/metrics_spec.rb +++ b/spec/lib/gitlab/metrics_spec.rb @@ -53,60 +53,6 @@ describe Gitlab::Metrics do end end - describe '.influx_metrics_enabled?' do - it 'returns a boolean' do - expect(described_class.influx_metrics_enabled?).to be_in([true, false]) - end - end - - describe '.submit_metrics' do - it 'prepares and writes the metrics to InfluxDB' do - connection = double(:connection) - pool = double(:pool) - - expect(pool).to receive(:with).and_yield(connection) - expect(connection).to receive(:write_points).with(an_instance_of(Array)) - expect(described_class).to receive(:pool).and_return(pool) - - described_class.submit_metrics([{ 'series' => 'kittens', 'tags' => {} }]) - end - end - - describe '.prepare_metrics' do - it 'returns a Hash with the keys as Symbols' do - metrics = described_class - .prepare_metrics([{ 'values' => {}, 'tags' => {} }]) - - expect(metrics).to eq([{ values: {}, tags: {} }]) - end - - it 'escapes tag values' do - metrics = described_class.prepare_metrics([ - { 'values' => {}, 'tags' => { 'foo' => 'bar=' } } - ]) - - expect(metrics).to eq([{ values: {}, tags: { 'foo' => 'bar\\=' } }]) - end - - it 'drops empty tags' do - metrics = described_class.prepare_metrics([ - { 'values' => {}, 'tags' => { 'cats' => '', 'dogs' => nil } } - ]) - - expect(metrics).to eq([{ values: {}, tags: {} }]) - end - end - - describe '.escape_value' do - it 'escapes an equals sign' do - expect(described_class.escape_value('foo=')).to eq('foo\\=') - end - - it 'casts values to Strings' do - expect(described_class.escape_value(10)).to eq('10') - end - end - describe '.measure' do context 'without a transaction' do it 'returns the return value of the block' do @@ -145,30 +91,6 @@ describe Gitlab::Metrics do end end - describe '.action=' do - context 'without a transaction' do - it 'does nothing' do - expect_any_instance_of(Gitlab::Metrics::Transaction) - .not_to receive(:action=) - - described_class.action = 'foo' - end - end - - context 'with a transaction' do - it 'sets the action of a transaction' do - trans = Gitlab::Metrics::WebTransaction.new({}) - - expect(described_class).to receive(:current_transaction) - .and_return(trans) - - expect(trans).to receive(:action=).with('foo') - - described_class.action = 'foo' - end - end - end - describe '#series_prefix' do it 'returns a String' do expect(described_class.series_prefix).to be_an_instance_of(String) diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb index c99281ee12c..705164d5445 100644 --- a/spec/lib/gitlab/middleware/multipart_spec.rb +++ b/spec/lib/gitlab/middleware/multipart_spec.rb @@ -195,6 +195,17 @@ describe Gitlab::Middleware::Multipart do end end + it 'allows files in the lfs upload path' do + with_tmp_dir('lfs-objects') do |dir, env| + expect(LfsObjectUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'lfs-objects')) + expect(app).to receive(:call) do |env| + expect(get_params(env)['file']).to be_a(::UploadedFile) + end + + middleware.call(env) + end + end + it 'allows symlinks for uploads dir' do Tempfile.open('two-levels') do |tempfile| symlinked_dir = '/some/dir/uploads' diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb index 99684bb2ab2..4afe4545891 100644 --- a/spec/lib/gitlab/omniauth_initializer_spec.rb +++ b/spec/lib/gitlab/omniauth_initializer_spec.rb @@ -86,6 +86,22 @@ describe Gitlab::OmniauthInitializer do subject.execute([cas3_config]) end + it 'configures defaults for google_oauth2' do + google_config = { + 'name' => 'google_oauth2', + "args" => { "access_type" => "offline", "approval_prompt" => '' } + } + + expect(devise_config).to receive(:omniauth).with( + :google_oauth2, + access_type: "offline", + approval_prompt: "", + client_options: { connection_opts: { request: { timeout: Gitlab::OmniauthInitializer::OAUTH2_TIMEOUT_SECONDS } } } + ) + + subject.execute([google_config]) + end + it 'converts client_auth_method to a Symbol for openid_connect' do openid_connect_config = { 'name' => 'openid_connect', diff --git a/spec/lib/gitlab/pagination/keyset_spec.rb b/spec/lib/gitlab/pagination/keyset_spec.rb index bde280c5fca..0ac40080872 100644 --- a/spec/lib/gitlab/pagination/keyset_spec.rb +++ b/spec/lib/gitlab/pagination/keyset_spec.rb @@ -3,6 +3,18 @@ require 'spec_helper' describe Gitlab::Pagination::Keyset do + describe '.available_for_type?' do + subject { described_class } + + it 'returns true for Project' do + expect(subject.available_for_type?(Project.all)).to be_truthy + end + + it 'return false for other types of relations' do + expect(subject.available_for_type?(User.all)).to be_falsey + end + end + describe '.available?' do subject { described_class } diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index 8dabe5a756b..50b045c6aad 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -170,6 +170,11 @@ describe Gitlab::PathRegex do expect(described_class::TOP_LEVEL_ROUTES) .to contain_exactly(*top_level_words), failure_block end + + # We ban new items in this list, see https://gitlab.com/gitlab-org/gitlab/-/issues/215362 + it 'does not allow expansion' do + expect(described_class::TOP_LEVEL_ROUTES.size).to eq(41) + end end describe 'GROUP_ROUTES' do @@ -184,6 +189,11 @@ describe Gitlab::PathRegex do expect(described_class::GROUP_ROUTES) .to contain_exactly(*paths_after_group_id), failure_block end + + # We ban new items in this list, see https://gitlab.com/gitlab-org/gitlab/-/issues/215362 + it 'does not allow expansion' do + expect(described_class::GROUP_ROUTES.size).to eq(1) + end end describe 'PROJECT_WILDCARD_ROUTES' do @@ -195,6 +205,11 @@ describe Gitlab::PathRegex do end end end + + # We ban new items in this list, see https://gitlab.com/gitlab-org/gitlab/-/issues/215362 + it 'does not allow expansion' do + expect(described_class::PROJECT_WILDCARD_ROUTES.size).to eq(21) + end end describe '.root_namespace_route_regex' do diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb index 816db49d94a..7b79cc82816 100644 --- a/spec/lib/gitlab/performance_bar_spec.rb +++ b/spec/lib/gitlab/performance_bar_spec.rb @@ -3,42 +3,7 @@ require 'spec_helper' describe Gitlab::PerformanceBar do - shared_examples 'allowed user IDs are cached' do - before do - # Warm the caches - described_class.enabled_for_user?(user) - end - - it 'caches the allowed user IDs in cache', :use_clean_rails_memory_store_caching do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).not_to receive(:fetch) - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(0) - end - - it 'caches the allowed user IDs in L1 cache for 1 minute', :use_clean_rails_memory_store_caching do - Timecop.travel 2.minutes do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(0) - end - end - - it 'caches the allowed user IDs in L2 cache for 5 minutes', :use_clean_rails_memory_store_caching do - Timecop.travel 6.minutes do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(2) - end - end - end - - it { expect(described_class.l1_cache_backend).to eq(Gitlab::ThreadMemoryCache.cache_backend) } + it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) } it { expect(described_class.l2_cache_backend).to eq(Rails.cache) } describe '.enabled_for_user?' do @@ -82,7 +47,16 @@ describe Gitlab::PerformanceBar do expect(described_class.enabled_for_user?(user)).to be_falsy end - it_behaves_like 'allowed user IDs are cached' + context 'caching of allowed user IDs' do + subject { described_class.enabled_for_user?(user) } + + before do + # Warm the caches + described_class.enabled_for_user?(user) + end + + it_behaves_like 'allowed user IDs are cached' + end end context 'when user is a member of the allowed group' do @@ -94,7 +68,16 @@ describe Gitlab::PerformanceBar do expect(described_class.enabled_for_user?(user)).to be_truthy end - it_behaves_like 'allowed user IDs are cached' + context 'caching of allowed user IDs' do + subject { described_class.enabled_for_user?(user) } + + before do + # Warm the caches + described_class.enabled_for_user?(user) + end + + it_behaves_like 'allowed user IDs are cached' + end end end diff --git a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb index a8596968f14..1ffb811cbc1 100644 --- a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb +++ b/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb @@ -2,8 +2,8 @@ require 'spec_helper' describe Gitlab::PhabricatorImport::Conduit::Response do - let(:response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))} - let(:error_response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/auth_failed.json'))) } + let(:response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))} + let(:error_response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/auth_failed.json'))) } describe '.parse!' do it 'raises a ResponseError if the http response was not successfull' do diff --git a/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb index 4b4c2a6276e..2cc12ee0165 100644 --- a/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb +++ b/spec/lib/gitlab/phabricator_import/conduit/tasks_response_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' describe Gitlab::PhabricatorImport::Conduit::TasksResponse do let(:conduit_response) do Gitlab::PhabricatorImport::Conduit::Response - .new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json'))) + .new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json'))) end subject(:response) { described_class.new(conduit_response) } diff --git a/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb index 00778ad90fd..999a986b73c 100644 --- a/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb +++ b/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' describe Gitlab::PhabricatorImport::Conduit::UsersResponse do let(:conduit_response) do Gitlab::PhabricatorImport::Conduit::Response - .new(JSON.parse(fixture_file('phabricator_responses/user.search.json'))) + .new(Gitlab::Json.parse(fixture_file('phabricator_responses/user.search.json'))) end subject(:response) { described_class.new(conduit_response) } diff --git a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb index 667321409da..02dafd4bb3b 100644 --- a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb +++ b/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb @@ -7,7 +7,7 @@ describe Gitlab::PhabricatorImport::Issues::Importer do let(:response) do Gitlab::PhabricatorImport::Conduit::TasksResponse.new( Gitlab::PhabricatorImport::Conduit::Response - .new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json'))) + .new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json'))) ) end diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb index d206d31eb96..64f80b5d736 100644 --- a/spec/lib/gitlab/project_search_results_spec.rb +++ b/spec/lib/gitlab/project_search_results_spec.rb @@ -45,22 +45,36 @@ describe Gitlab::ProjectSearchResults do expect(results.formatted_count(scope)).to eq(expected) end end + + context 'blobs' do + it "limits the search to #{described_class::COUNT_LIMIT} items" do + expect(results).to receive(:blobs).with(limit: described_class::COUNT_LIMIT).and_call_original + expect(results.formatted_count('blobs')).to eq('0') + end + end + + context 'wiki_blobs' do + it "limits the search to #{described_class::COUNT_LIMIT} items" do + expect(results).to receive(:wiki_blobs).with(limit: described_class::COUNT_LIMIT).and_call_original + expect(results.formatted_count('wiki_blobs')).to eq('0') + end + end end - shared_examples 'general blob search' do |entity_type, blob_kind| + shared_examples 'general blob search' do |entity_type, blob_type| let(:query) { 'files' } subject(:results) { described_class.new(user, project, query).objects(blob_type) } context "when #{entity_type} is disabled" do let(:project) { disabled_project } - it "hides #{blob_kind} from members" do + it "hides #{blob_type} from members" do project.add_reporter(user) is_expected.to be_empty end - it "hides #{blob_kind} from non-members" do + it "hides #{blob_type} from non-members" do is_expected.to be_empty end end @@ -68,13 +82,13 @@ describe Gitlab::ProjectSearchResults do context "when #{entity_type} is internal" do let(:project) { private_project } - it "finds #{blob_kind} for members" do + it "finds #{blob_type} for members" do project.add_reporter(user) is_expected.not_to be_empty end - it "hides #{blob_kind} from non-members" do + it "hides #{blob_type} from non-members" do is_expected.to be_empty end end @@ -96,7 +110,7 @@ describe Gitlab::ProjectSearchResults do end end - shared_examples 'blob search repository ref' do |entity_type| + shared_examples 'blob search repository ref' do |entity_type, blob_type| let(:query) { 'files' } let(:file_finder) { double } let(:project_branch) { 'project_branch' } @@ -139,9 +153,41 @@ describe Gitlab::ProjectSearchResults do end end + shared_examples 'blob search pagination' do |blob_type| + let(:per_page) { 20 } + let(:count_limit) { described_class::COUNT_LIMIT } + let(:file_finder) { instance_double('Gitlab::FileFinder') } + let(:results) { described_class.new(user, project, query) } + let(:repository_ref) { 'master' } + + before do + allow(file_finder).to receive(:find).and_return([]) + expect(Gitlab::FileFinder).to receive(:new).with(project, repository_ref).and_return(file_finder) + end + + it 'limits search results based on the first page' do + expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit) + results.objects(blob_type, page: 1, per_page: per_page) + end + + it 'limits search results based on the second page' do + expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page) + results.objects(blob_type, page: 2, per_page: per_page) + end + + it 'limits search results based on the third page' do + expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page * 2) + results.objects(blob_type, page: 3, per_page: per_page) + end + + it 'uses the per_page value when passed' do + expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + 10 * 2) + results.objects(blob_type, page: 3, per_page: 10) + end + end + describe 'blob search' do let(:project) { create(:project, :public, :repository) } - let(:blob_type) { 'blobs' } it_behaves_like 'general blob search', 'repository', 'blobs' do let(:disabled_project) { create(:project, :public, :repository, :repository_disabled) } @@ -150,37 +196,11 @@ describe Gitlab::ProjectSearchResults do let(:expected_file_by_content) { 'CHANGELOG' } end - it_behaves_like 'blob search repository ref', 'project' do + it_behaves_like 'blob search repository ref', 'project', 'blobs' do let(:entity) { project } end - context 'pagination' do - let(:per_page) { 20 } - let(:count_limit) { described_class::COUNT_LIMIT } - let(:file_finder) { instance_double('Gitlab::FileFinder') } - let(:results) { described_class.new(user, project, query, per_page: per_page) } - let(:repository_ref) { 'master' } - - before do - allow(file_finder).to receive(:find).and_return([]) - expect(Gitlab::FileFinder).to receive(:new).with(project, repository_ref).and_return(file_finder) - end - - it 'limits search results based on the first page' do - expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit) - results.objects(blob_type, 1) - end - - it 'limits search results based on the second page' do - expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page) - results.objects(blob_type, 2) - end - - it 'limits search results based on the third page' do - expect(file_finder).to receive(:find).with(query, content_match_cutoff: count_limit + per_page * 2) - results.objects(blob_type, 3) - end - end + it_behaves_like 'blob search pagination', 'blobs' end describe 'wiki search' do @@ -192,7 +212,7 @@ describe Gitlab::ProjectSearchResults do wiki.create_page('CHANGELOG', 'Files example') end - it_behaves_like 'general blob search', 'wiki', 'wiki blobs' do + it_behaves_like 'general blob search', 'wiki', 'wiki_blobs' do let(:blob_type) { 'wiki_blobs' } let(:disabled_project) { create(:project, :public, :wiki_repo, :wiki_disabled) } let(:private_project) { create(:project, :public, :wiki_repo, :wiki_private) } @@ -200,10 +220,27 @@ describe Gitlab::ProjectSearchResults do let(:expected_file_by_content) { 'CHANGELOG.md' } end - it_behaves_like 'blob search repository ref', 'wiki' do - let(:blob_type) { 'wiki_blobs' } + it_behaves_like 'blob search repository ref', 'wiki', 'wiki_blobs' do let(:entity) { project.wiki } end + + it_behaves_like 'blob search pagination', 'wiki_blobs' + + context 'return type' do + let(:blobs) { [Gitlab::Search::FoundBlob.new(project: project)] } + let(:results) { described_class.new(user, project, "Files", per_page: 20) } + + before do + allow(results).to receive(:wiki_blobs).and_return(blobs) + end + + it 'returns list of FoundWikiPage type object' do + objects = results.objects('wiki_blobs') + + expect(objects).to be_present + expect(objects).to all(be_a(Gitlab::Search::FoundWikiPage)) + end + end end it 'does not list issues on private projects' do diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb index e869a384b29..4ff53b50a50 100644 --- a/spec/lib/gitlab/prometheus_client_spec.rb +++ b/spec/lib/gitlab/prometheus_client_spec.rb @@ -313,7 +313,7 @@ describe Gitlab::PrometheusClient do req_stub = stub_prometheus_request(query_url, body: prometheus_value_body('vector')) response = subject.proxy('query', { query: prometheus_query }) - json_response = JSON.parse(response.body) + json_response = Gitlab::Json.parse(response.body) expect(response.code).to eq(200) expect(json_response).to eq({ @@ -332,7 +332,7 @@ describe Gitlab::PrometheusClient do req_stub = stub_prometheus_request(query_url, status: 400, body: { error: 'error' }) response = subject.proxy('query', { query: prometheus_query }) - json_response = JSON.parse(response.body) + json_response = Gitlab::Json.parse(response.body) expect(req_stub).to have_been_requested expect(response.code).to eq(400) diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 5a2cf2eda8b..9e596400904 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -130,4 +130,37 @@ describe Gitlab::Regex do it { is_expected.not_to match('aa-1234-cc') } it { is_expected.not_to match('9/9/2018') } end + + describe '.kubernetes_namespace_regex' do + subject { described_class.kubernetes_namespace_regex } + + it { is_expected.to match('foo') } + it { is_expected.to match('foo-bar') } + it { is_expected.to match('1foo-bar') } + it { is_expected.to match('foo-bar2') } + it { is_expected.to match('foo-1bar') } + it { is_expected.not_to match('foo.bar') } + it { is_expected.not_to match('Foo') } + it { is_expected.not_to match('FoO') } + it { is_expected.not_to match('FoO-') } + it { is_expected.not_to match('-foo-') } + it { is_expected.not_to match('foo/bar') } + end + + describe '.kubernetes_dns_subdomain_regex' do + subject { described_class.kubernetes_dns_subdomain_regex } + + it { is_expected.to match('foo') } + it { is_expected.to match('foo-bar') } + it { is_expected.to match('foo.bar') } + it { is_expected.to match('foo1.bar') } + it { is_expected.to match('foo1.2bar') } + it { is_expected.to match('foo.bar1') } + it { is_expected.to match('1foo.bar1') } + it { is_expected.not_to match('Foo') } + it { is_expected.not_to match('FoO') } + it { is_expected.not_to match('FoO-') } + it { is_expected.not_to match('-foo-') } + it { is_expected.not_to match('foo/bar') } + end end diff --git a/spec/lib/gitlab/repository_url_builder_spec.rb b/spec/lib/gitlab/repository_url_builder_spec.rb index 3d8870ecb53..a5797146cc5 100644 --- a/spec/lib/gitlab/repository_url_builder_spec.rb +++ b/spec/lib/gitlab/repository_url_builder_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::RepositoryUrlBuilder do where(:factory, :path_generator) do :project | ->(project) { project.full_path } :project_snippet | ->(snippet) { "#{snippet.project.full_path}/snippets/#{snippet.id}" } - :project_wiki | ->(wiki) { "#{wiki.project.full_path}.wiki" } + :project_wiki | ->(wiki) { "#{wiki.container.full_path}.wiki" } :personal_snippet | ->(snippet) { "snippets/#{snippet.id}" } end diff --git a/spec/lib/gitlab/request_context_spec.rb b/spec/lib/gitlab/request_context_spec.rb index 7e2e05c9f1b..d7af0765d53 100644 --- a/spec/lib/gitlab/request_context_spec.rb +++ b/spec/lib/gitlab/request_context_spec.rb @@ -5,6 +5,10 @@ require 'spec_helper' describe Gitlab::RequestContext, :request_store do subject { described_class.instance } + before do + allow(subject).to receive(:enabled?).and_return(true) + end + it { is_expected.to have_attributes(client_ip: nil, start_thread_cpu_time: nil, request_start_time: nil) } describe '#request_deadline' do diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb index 34a775fc206..8f920bb2e01 100644 --- a/spec/lib/gitlab/runtime_spec.rb +++ b/spec/lib/gitlab/runtime_spec.rb @@ -105,4 +105,17 @@ describe Gitlab::Runtime do it_behaves_like "valid runtime", :rails_runner, 1 end + + context "action_cable" do + before do + stub_const('ACTION_CABLE_SERVER', true) + stub_const('::Puma', Module.new) + + allow(Gitlab::Application).to receive_message_chain(:config, :action_cable, :worker_pool_size).and_return(8) + end + + it "reports its maximum concurrency based on ActionCable's worker pool size" do + expect(subject.max_threads).to eq(9) + end + end end diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb index 86dde15cc8a..ab14602a468 100644 --- a/spec/lib/gitlab/search_results_spec.rb +++ b/spec/lib/gitlab/search_results_spec.rb @@ -28,7 +28,15 @@ describe Gitlab::SearchResults do end it 'returns with counts collection when requested' do - expect(results.objects('projects', 1, false)).not_to be_kind_of(Kaminari::PaginatableWithoutCount) + expect(results.objects('projects', page: 1, per_page: 1, without_count: false)).not_to be_kind_of(Kaminari::PaginatableWithoutCount) + end + + it 'uses page and per_page to paginate results' do + project2 = create(:project, name: 'foo') + + expect(results.objects('projects', page: 1, per_page: 1).to_a).to eq([project]) + expect(results.objects('projects', page: 2, per_page: 1).to_a).to eq([project2]) + expect(results.objects('projects', page: 1, per_page: 2).count).to eq(2) end end diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb index 0aaff12f278..80e8da58f23 100644 --- a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb +++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb @@ -54,14 +54,6 @@ describe Gitlab::SidekiqConfig::CliMethods do end end - context 'when the file contains an array of strings' do - before do - stub_contents(['queue_a'], ['queue_b']) - end - - include_examples 'valid file contents' - end - context 'when the file contains an array of hashes' do before do stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }]) diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb index 2f5343627d8..283140d7fdf 100644 --- a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::SidekiqLogging::JSONFormatter do let(:timestamp_iso8601) { now.iso8601(3) } describe 'with a Hash' do - subject { JSON.parse(described_class.new.call('INFO', now, 'my program', hash_input)) } + subject { Gitlab::Json.parse(described_class.new.call('INFO', now, 'my program', hash_input)) } let(:hash_input) do { @@ -34,7 +34,8 @@ describe Gitlab::SidekiqLogging::JSONFormatter do 'started_at' => timestamp_iso8601, 'retried_at' => timestamp_iso8601, 'failed_at' => timestamp_iso8601, - 'completed_at' => timestamp_iso8601 + 'completed_at' => timestamp_iso8601, + 'retry' => 0 } ) @@ -57,13 +58,33 @@ describe Gitlab::SidekiqLogging::JSONFormatter do expect(subject['args']).to eq(["1", "test", "2", %({"test"=>1})]) end + + context 'when the job has a non-integer value for retry' do + using RSpec::Parameterized::TableSyntax + + where(:retry_in_job, :retry_in_logs) do + 3 | 3 + true | 25 + false | 0 + nil | 0 + 'string' | -1 + end + + with_them do + it 'logs as the correct integer' do + hash_input['retry'] = retry_in_job + + expect(subject['retry']).to eq(retry_in_logs) + end + end + end end describe 'with a String' do it 'accepts strings with no changes' do result = subject.call('DEBUG', now, 'my string', message) - data = JSON.parse(result) + data = Gitlab::Json.parse(result) expected_output = { severity: 'DEBUG', time: timestamp_iso8601, diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb index f4b939c3013..a4bbb51baae 100644 --- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb @@ -44,7 +44,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do 'job_status' => 'done', 'duration_s' => 0.0, 'completed_at' => timestamp.to_f, - 'cpu_s' => 1.11, + 'cpu_s' => 1.111112, 'db_duration_s' => 0.0 ) end @@ -218,13 +218,34 @@ describe Gitlab::SidekiqLogging::StructuredLogger do subject.call(job, 'test_queue') { } end end + + context 'when there is extra metadata set for the done log' do + let(:expected_start_payload) { start_payload.except('args') } + + let(:expected_end_payload) do + end_payload.except('args').merge("#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1" => 15, "#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2" => 16) + end + + it 'logs it in the done log' do + Timecop.freeze(timestamp) do + expect(logger).to receive(:info).with(expected_start_payload).ordered + expect(logger).to receive(:info).with(expected_end_payload).ordered + + subject.call(job, 'test_queue') do + job["#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1"] = 15 + job["#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2"] = 16 + job['key that will be ignored because it does not start with extra.'] = 17 + end + end + end + end end describe '#add_time_keys!' do let(:time) { { duration: 0.1231234, cputime: 1.2342345 } } let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } } let(:current_utc_time) { Time.now.utc } - let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration_s' => 0.12, 'cpu_s' => 1.23, 'completed_at' => current_utc_time.to_f } } + let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration_s' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } } subject { described_class.new } diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index 6e8a8c03aad..929df0a7ffb 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -113,22 +113,27 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_r end describe 'droppable?' do - where(:idempotent, :duplicate) do - # [true, false].repeated_permutation(2) - [[true, true], - [true, false], - [false, true], - [false, false]] + where(:idempotent, :duplicate, :prevent_deduplication) do + # [true, false].repeated_permutation(3) + [[true, true, true], + [true, true, false], + [true, false, true], + [true, false, false], + [false, true, true], + [false, true, false], + [false, false, true], + [false, false, false]] end with_them do before do allow(AuthorizedProjectsWorker).to receive(:idempotent?).and_return(idempotent) allow(duplicate_job).to receive(:duplicate?).and_return(duplicate) + stub_feature_flags("disable_#{queue}_deduplication" => prevent_deduplication) end it 'is droppable when all conditions are met' do - if idempotent && duplicate + if idempotent && duplicate && !prevent_deduplication expect(duplicate_job).to be_droppable else expect(duplicate_job).not_to be_droppable diff --git a/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb b/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb new file mode 100644 index 00000000000..98847885e62 --- /dev/null +++ b/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata do + # Cannot use Class.new for this as ApplicationWorker will need the class to + # have a name during `included do`. + let(:worker) { AdminEmailWorker.new } + + let(:worker_without_application_worker) do + Class.new do + end.new + end + + subject { described_class.new } + + let(:job) { { 'jid' => 123 } } + let(:queue) { 'test_queue' } + + describe '#call' do + it 'merges Application#logging_extras in to job' do + worker.log_extra_metadata_on_done(:key1, 15) + worker.log_extra_metadata_on_done(:key2, 16) + expect { |b| subject.call(worker, job, queue, &b) }.to yield_control + + expect(job).to eq({ 'jid' => 123, 'extra.admin_email_worker.key1' => 15, 'extra.admin_email_worker.key2' => 16 }) + end + + it 'does not raise when the worker does not respond to #done_log_extra_metadata' do + expect { |b| subject.call(worker_without_application_worker, job, queue, &b) }.to yield_control + + expect(job).to eq({ 'jid' => 123 }) + end + end +end diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb index 752ec6a0a3f..6fe61fb42a5 100644 --- a/spec/lib/gitlab/sidekiq_middleware_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb @@ -4,12 +4,17 @@ require 'spec_helper' require 'sidekiq/testing' describe Gitlab::SidekiqMiddleware do - class TestWorker - include Sidekiq::Worker + before do + stub_const('TestWorker', Class.new) - def perform(_arg) - Gitlab::SafeRequestStore['gitaly_call_actual'] = 1 - Gitlab::GitalyClient.query_time = 5 + TestWorker.class_eval do + include Sidekiq::Worker + include ApplicationWorker + + def perform(_arg) + Gitlab::SafeRequestStore['gitaly_call_actual'] = 1 + Gitlab::GitalyClient.query_time = 5 + end end end @@ -32,8 +37,7 @@ describe Gitlab::SidekiqMiddleware do described_class.server_configurator( metrics: metrics, arguments_logger: arguments_logger, - memory_killer: memory_killer, - request_store: request_store + memory_killer: memory_killer ).call(chain) example.run @@ -52,6 +56,7 @@ describe Gitlab::SidekiqMiddleware do Gitlab::SidekiqMiddleware::ArgumentsLogger, Gitlab::SidekiqMiddleware::MemoryKiller, Gitlab::SidekiqMiddleware::RequestStoreMiddleware, + Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata, Gitlab::SidekiqMiddleware::WorkerContext::Server, Gitlab::SidekiqMiddleware::AdminMode::Server, Gitlab::SidekiqMiddleware::DuplicateJobs::Server @@ -77,13 +82,11 @@ describe Gitlab::SidekiqMiddleware do let(:metrics) { false } let(:arguments_logger) { false } let(:memory_killer) { false } - let(:request_store) { false } let(:disabled_sidekiq_middlewares) do [ Gitlab::SidekiqMiddleware::ServerMetrics, Gitlab::SidekiqMiddleware::ArgumentsLogger, - Gitlab::SidekiqMiddleware::MemoryKiller, - Gitlab::SidekiqMiddleware::RequestStoreMiddleware + Gitlab::SidekiqMiddleware::MemoryKiller ] end @@ -94,7 +97,6 @@ describe Gitlab::SidekiqMiddleware do let(:metrics) { true } let(:arguments_logger) { true } let(:memory_killer) { true } - let(:request_store) { true } let(:disabled_sidekiq_middlewares) { [] } it_behaves_like "a server middleware chain" diff --git a/spec/lib/gitlab/snippet_search_results_spec.rb b/spec/lib/gitlab/snippet_search_results_spec.rb index 47f26fdebe2..a41be0eaa95 100644 --- a/spec/lib/gitlab/snippet_search_results_spec.rb +++ b/spec/lib/gitlab/snippet_search_results_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' describe Gitlab::SnippetSearchResults do include SearchHelpers - let!(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') } + let_it_be(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') } let(:results) { described_class.new(snippet.author, 'foo') } describe '#snippet_titles_count' do @@ -14,27 +14,20 @@ describe Gitlab::SnippetSearchResults do end end - describe '#snippet_blobs_count' do - it 'returns the amount of matched snippet blobs' do - expect(results.limited_snippet_blobs_count).to eq(1) + describe '#formatted_count' do + it 'returns the expected formatted count' do + expect(results).to receive(:limited_snippet_titles_count).and_return(1234) + expect(results.formatted_count('snippet_titles')).to eq(max_limited_count) end end - describe '#formatted_count' do - using RSpec::Parameterized::TableSyntax - - where(:scope, :count_method, :expected) do - 'snippet_titles' | :limited_snippet_titles_count | max_limited_count - 'snippet_blobs' | :limited_snippet_blobs_count | max_limited_count - 'projects' | :limited_projects_count | max_limited_count - 'unknown' | nil | nil - end + describe '#objects' do + it 'uses page and per_page to paginate results' do + snippet2 = create(:snippet, :public, content: 'foo', file_name: 'foo') - with_them do - it 'returns the expected formatted count' do - expect(results).to receive(count_method).and_return(1234) if count_method - expect(results.formatted_count(scope)).to eq(expected) - end + expect(results.objects('snippet_titles', page: 1, per_page: 1).to_a).to eq([snippet2]) + expect(results.objects('snippet_titles', page: 2, per_page: 1).to_a).to eq([snippet]) + expect(results.objects('snippet_titles', page: 1, per_page: 2).count).to eq(2) end end end diff --git a/spec/lib/gitlab/static_site_editor/config_spec.rb b/spec/lib/gitlab/static_site_editor/config_spec.rb index 8f61476722d..a1db567db1a 100644 --- a/spec/lib/gitlab/static_site_editor/config_spec.rb +++ b/spec/lib/gitlab/static_site_editor/config_spec.rb @@ -5,9 +5,10 @@ require 'spec_helper' describe Gitlab::StaticSiteEditor::Config do subject(:config) { described_class.new(repository, ref, file_path, return_url) } - let(:project) { create(:project, :public, :repository, name: 'project', namespace: namespace) } - let(:namespace) { create(:namespace, name: 'namespace') } - let(:repository) { project.repository } + let_it_be(:namespace) { create(:namespace, name: 'namespace') } + let_it_be(:project) { create(:project, :public, :repository, name: 'project', namespace: namespace) } + let_it_be(:repository) { project.repository } + let(:ref) { 'master' } let(:file_path) { 'README.md' } let(:return_url) { 'http://example.com' } @@ -24,38 +25,45 @@ describe Gitlab::StaticSiteEditor::Config do project: 'project', project_id: project.id, return_url: 'http://example.com', - is_supported_content: true + is_supported_content: 'true', + base_url: '/namespace/project/-/sse/master%2FREADME.md' ) end + context 'when file path is nested' do + let(:file_path) { 'lib/README.md' } + + it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') } + end + context 'when branch is not master' do let(:ref) { 'my-branch' } - it { is_expected.to include(is_supported_content: false) } + it { is_expected.to include(is_supported_content: 'false') } end context 'when file does not have a markdown extension' do let(:file_path) { 'README.txt' } - it { is_expected.to include(is_supported_content: false) } + it { is_expected.to include(is_supported_content: 'false') } end context 'when file does not have an extension' do let(:file_path) { 'README' } - it { is_expected.to include(is_supported_content: false) } + it { is_expected.to include(is_supported_content: 'false') } end context 'when file does not exist' do let(:file_path) { 'UNKNOWN.md' } - it { is_expected.to include(is_supported_content: false) } + it { is_expected.to include(is_supported_content: 'false') } end context 'when repository is empty' do - let(:project) { create(:project_empty_repo) } + let(:repository) { create(:project_empty_repo).repository } - it { is_expected.to include(is_supported_content: false) } + it { is_expected.to include(is_supported_content: 'false') } end end end diff --git a/spec/lib/gitlab/throttle_spec.rb b/spec/lib/gitlab/throttle_spec.rb index 674646a5f06..e3679a1a721 100644 --- a/spec/lib/gitlab/throttle_spec.rb +++ b/spec/lib/gitlab/throttle_spec.rb @@ -6,82 +6,10 @@ describe Gitlab::Throttle do describe '.protected_paths_enabled?' do subject { described_class.protected_paths_enabled? } - context 'when omnibus protected paths throttle should be used' do - before do - expect(described_class).to receive(:should_use_omnibus_protected_paths?).and_return(true) - end + it 'returns Application Settings throttle_protected_paths_enabled?' do + expect(Gitlab::CurrentSettings.current_application_settings).to receive(:throttle_protected_paths_enabled?) - it { is_expected.to be_falsey } - end - - context 'when omnibus protected paths throttle should not be used' do - before do - expect(described_class).to receive(:should_use_omnibus_protected_paths?).and_return(false) - end - - it 'returns Application Settings throttle_protected_paths_enabled?' do - expect(Gitlab::CurrentSettings.current_application_settings).to receive(:throttle_protected_paths_enabled?) - - subject - end - end - end - - describe '.should_use_omnibus_protected_paths?' do - subject { described_class.should_use_omnibus_protected_paths? } - - context 'when rack_attack.admin_area_protected_paths_enabled config is unspecified' do - context 'when the omnibus protected paths throttle has been recently used (it has data)' do - before do - expect(described_class).to receive(:omnibus_protected_paths_present?).and_return(true) - end - - it { is_expected.to be_truthy } - end - - context 'when the omnibus protected paths throttle has not been recently used' do - before do - expect(described_class).to receive(:omnibus_protected_paths_present?).and_return(false) - end - - it { is_expected.to be_falsey } - end - end - - context 'when rack_attack.admin_area_protected_paths_enabled config is false' do - before do - stub_config(rack_attack: { - admin_area_protected_paths_enabled: false - }) - end - - context 'when the omnibus protected paths throttle has been recently used (it has data)' do - before do - expect(described_class).to receive(:omnibus_protected_paths_present?).and_return(true) - end - - it { is_expected.to be_truthy } - end - - context 'when the omnibus protected paths throttle has not been recently used' do - before do - expect(described_class).to receive(:omnibus_protected_paths_present?).and_return(false) - end - - it { is_expected.to be_falsey } - end - end - - context 'when rack_attack.admin_area_protected_paths_enabled config is true' do - before do - stub_config(rack_attack: { - admin_area_protected_paths_enabled: true - }) - - expect(described_class).not_to receive(:omnibus_protected_paths_present?) - end - - it { is_expected.to be_falsey } + subject end end end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index efb07d9dc95..2e65f98a085 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -28,17 +28,13 @@ describe Gitlab::Tracking do end it 'enables features using feature flags' do - stub_feature_flags(additional_snowplow_tracking: true) - allow(Feature).to receive(:enabled?).with( - :additional_snowplow_tracking, - '_group_' - ).and_return(false) + stub_feature_flags(additional_snowplow_tracking: :__group__) addition_feature_fields = { formTracking: false, linkClickTracking: false } - expect(subject.snowplow_options('_group_')).to include(addition_feature_fields) + expect(subject.snowplow_options(:_group_)).to include(addition_feature_fields) end end diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb index d64b826ba9b..593b8655e80 100644 --- a/spec/lib/gitlab/tree_summary_spec.rb +++ b/spec/lib/gitlab/tree_summary_spec.rb @@ -8,12 +8,13 @@ describe Gitlab::TreeSummary do let(:project) { create(:project, :empty_repo) } let(:repo) { project.repository } let(:commit) { repo.head_commit } + let_it_be(:user) { create(:user) } let(:path) { nil } let(:offset) { nil } let(:limit) { nil } - subject(:summary) { described_class.new(commit, project, path: path, offset: offset, limit: limit) } + subject(:summary) { described_class.new(commit, project, user, path: path, offset: offset, limit: limit) } describe '#initialize' do it 'defaults offset to 0' do @@ -72,7 +73,8 @@ describe Gitlab::TreeSummary do expected_commit_path = Gitlab::Routing.url_helpers.project_commit_path(project, commit) expect(entry[:commit]).to be_a(::Commit) - expect(entry[:commit_path]).to eq expected_commit_path + expect(entry[:commit_path]).to eq(expected_commit_path) + expect(entry[:commit_title_html]).to eq(commit.message) end context 'in a good subdirectory' do @@ -140,6 +142,16 @@ describe Gitlab::TreeSummary do expect(entry).to include(:commit) end end + + context 'rendering commits' do + it 'does not perform N + 1 request' do + summary + + queries = ActiveRecord::QueryRecorder.new { summary.summarize } + + expect(queries.count).to be <= 3 + end + end end describe '#more?' do diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb index 1b23f331b89..66826bcb3b1 100644 --- a/spec/lib/gitlab/url_builder_spec.rb +++ b/spec/lib/gitlab/url_builder_spec.rb @@ -23,8 +23,9 @@ describe Gitlab::UrlBuilder do :merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" } :project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" } :project_snippet | ->(snippet) { "/#{snippet.project.full_path}/snippets/#{snippet.id}" } - :project_wiki | ->(wiki) { "/#{wiki.project.full_path}/-/wikis/home" } + :project_wiki | ->(wiki) { "/#{wiki.container.full_path}/-/wikis/home" } :ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" } + :design | ->(design) { "/#{design.project.full_path}/-/design_management/designs/#{design.id}/raw_image" } :group | ->(group) { "/groups/#{group.full_path}" } :group_milestone | ->(milestone) { "/groups/#{milestone.group.full_path}/-/milestones/#{milestone.iid}" } @@ -95,6 +96,16 @@ describe Gitlab::UrlBuilder do end end + context 'when passing a DesignManagement::Design' do + let(:design) { build_stubbed(:design) } + + it 'uses the given ref and size in the URL' do + url = subject.build(design, ref: 'feature', size: 'small') + + expect(url).to eq "#{Settings.gitlab['url']}/#{design.project.full_path}/-/design_management/designs/#{design.id}/feature/resized_image/small" + end + end + context 'when passing an unsupported class' do let(:object) { Object.new } diff --git a/spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb new file mode 100644 index 00000000000..deaf7ebc7f3 --- /dev/null +++ b/spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::UsageDataCounters::DesignsCounter do + it_behaves_like 'a redis usage counter', 'Designs', :create + it_behaves_like 'a redis usage counter', 'Designs', :update + it_behaves_like 'a redis usage counter', 'Designs', :delete + + it_behaves_like 'a redis usage counter with totals', :design_management_designs, + create: 5, + update: 3, + delete: 2 +end diff --git a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb index 96ebeb8ff76..42abbecead0 100644 --- a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb @@ -3,35 +3,35 @@ require 'spec_helper' describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_state do - shared_examples 'counter examples' do + shared_examples 'counter examples' do |event| it 'increments counter and return the total count' do - expect(described_class.public_send(total_counter_method)).to eq(0) + expect(described_class.public_send(:total_count, event)).to eq(0) - 2.times { described_class.public_send(increment_counter_method) } + 2.times { described_class.public_send(:"increment_#{event}_count") } - expect(described_class.public_send(total_counter_method)).to eq(2) + redis_key = "web_ide_#{event}_count".upcase + expect(described_class.public_send(:total_count, redis_key)).to eq(2) end end describe 'commits counter' do - let(:increment_counter_method) { :increment_commits_count } - let(:total_counter_method) { :total_commits_count } - - it_behaves_like 'counter examples' + it_behaves_like 'counter examples', 'commits' end describe 'merge requests counter' do - let(:increment_counter_method) { :increment_merge_requests_count } - let(:total_counter_method) { :total_merge_requests_count } - - it_behaves_like 'counter examples' + it_behaves_like 'counter examples', 'merge_requests' end describe 'views counter' do - let(:increment_counter_method) { :increment_views_count } - let(:total_counter_method) { :total_views_count } + it_behaves_like 'counter examples', 'views' + end - it_behaves_like 'counter examples' + describe 'terminals counter' do + it_behaves_like 'counter examples', 'terminals' + end + + describe 'pipelines counter' do + it_behaves_like 'counter examples', 'pipelines' end describe 'previews counter' do @@ -42,21 +42,19 @@ describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_st end context 'when web ide clientside preview is enabled' do - let(:increment_counter_method) { :increment_previews_count } - let(:total_counter_method) { :total_previews_count } - - it_behaves_like 'counter examples' + it_behaves_like 'counter examples', 'previews' end context 'when web ide clientside preview is not enabled' do let(:setting_enabled) { false } it 'does not increment the counter' do - expect(described_class.total_previews_count).to eq(0) + redis_key = 'WEB_IDE_PREVIEWS_COUNT' + expect(described_class.total_count(redis_key)).to eq(0) 2.times { described_class.increment_previews_count } - expect(described_class.total_previews_count).to eq(0) + expect(described_class.total_count(redis_key)).to eq(0) end end end @@ -66,6 +64,8 @@ describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_st merge_requests = 3 views = 2 previews = 4 + terminals = 1 + pipelines = 2 before do stub_application_setting(web_ide_clientside_preview_enabled: true) @@ -74,6 +74,8 @@ describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_st merge_requests.times { described_class.increment_merge_requests_count } views.times { described_class.increment_views_count } previews.times { described_class.increment_previews_count } + terminals.times { described_class.increment_terminals_count } + pipelines.times { described_class.increment_pipelines_count } end it 'can report all totals' do @@ -81,7 +83,8 @@ describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_shared_st web_ide_commits: commits, web_ide_views: views, web_ide_merge_requests: merge_requests, - web_ide_previews: previews + web_ide_previews: previews, + web_ide_terminals: terminals ) end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index a46778bb6c3..9c6aab10083 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -7,6 +7,8 @@ describe Gitlab::UsageData, :aggregate_failures do before do allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) + + stub_object_store_settings end shared_examples "usage data execution" do @@ -42,6 +44,9 @@ describe Gitlab::UsageData, :aggregate_failures do expect(count_data[:projects_jira_active]).to eq(4) expect(count_data[:projects_jira_server_active]).to eq(2) expect(count_data[:projects_jira_cloud_active]).to eq(2) + expect(count_data[:jira_imports_projects_count]).to eq(2) + expect(count_data[:jira_imports_total_imported_count]).to eq(3) + expect(count_data[:jira_imports_total_imported_issues_count]).to eq(13) expect(count_data[:projects_slack_notifications_active]).to eq(2) expect(count_data[:projects_slack_slash_active]).to eq(1) expect(count_data[:projects_slack_active]).to eq(2) @@ -57,6 +62,9 @@ describe Gitlab::UsageData, :aggregate_failures do expect(count_data[:issues_using_zoom_quick_actions]).to eq(3) expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2) expect(count_data[:incident_issues]).to eq(4) + expect(count_data[:issues_created_gitlab_alerts]).to eq(1) + expect(count_data[:alert_bot_incident_issues]).to eq(4) + expect(count_data[:incident_labeled_issues]).to eq(3) expect(count_data[:clusters_enabled]).to eq(6) expect(count_data[:project_clusters_enabled]).to eq(4) @@ -82,6 +90,56 @@ describe Gitlab::UsageData, :aggregate_failures do expect(count_data[:clusters_management_project]).to eq(1) end + it 'gathers object store usage correctly' do + expect(subject[:object_store]).to eq( + { artifacts: { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } }, + external_diffs: { enabled: false }, + lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, + uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, + packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } } + ) + end + + context 'with existing container expiration policies' do + let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) } + let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) } + + %i[keep_n cadence older_than].each do |attribute| + ContainerExpirationPolicy.send("#{attribute}_options").keys.each do |value| + let_it_be("container_expiration_policy_with_#{attribute}_set_to_#{value}") { create(:container_expiration_policy, attribute => value) } + end + end + + let(:inactive_policies) { ::ContainerExpirationPolicy.where(enabled: false) } + let(:active_policies) { ::ContainerExpirationPolicy.active } + + subject { described_class.data[:counts] } + + it 'gathers usage data' do + expect(subject[:projects_with_expiration_policy_enabled]).to eq 20 + expect(subject[:projects_with_expiration_policy_disabled]).to eq 1 + + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 14 + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1 + + expect(subject[:projects_with_expiration_policy_enabled_with_older_than_unset]).to eq 16 + expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 1 + + expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 12 + expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 5 + expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1 + expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_3month]).to eq 1 + end + end + it 'works when queries time out' do allow_any_instance_of(ActiveRecord::Relation) .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new('')) @@ -101,6 +159,7 @@ describe Gitlab::UsageData, :aggregate_failures do subject { described_class.usage_data_counters } it { is_expected.to all(respond_to :totals) } + it { is_expected.to all(respond_to :fallback_totals) } describe 'the results of calling #totals on all objects in the array' do subject { described_class.usage_data_counters.map(&:totals) } @@ -109,6 +168,13 @@ describe Gitlab::UsageData, :aggregate_failures do it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) } end + describe 'the results of calling #fallback_totals on all objects in the array' do + subject { described_class.usage_data_counters.map(&:fallback_totals) } + + it { is_expected.to all(be_a Hash) } + it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(eq(-1)))) } + end + it 'does not have any conflicts' do all_keys = subject.flat_map { |counter| counter.totals.keys } @@ -128,6 +194,14 @@ describe Gitlab::UsageData, :aggregate_failures do end end + describe '.recording_ce_finished_at' do + subject { described_class.recording_ce_finish_data } + + it 'gathers time ce recording finishes at' do + expect(subject[:recording_ce_finished_at]).to be_a(Time) + end + end + context 'when not relying on database records' do describe '#features_usage_data_ce' do subject { described_class.features_usage_data_ce } @@ -143,42 +217,20 @@ describe Gitlab::UsageData, :aggregate_failures do expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled) expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled) expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?) + expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?) end - context 'with existing container expiration policies' do - let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) } - let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) } - %i[keep_n cadence older_than].each do |attribute| - ContainerExpirationPolicy.send("#{attribute}_options").keys.each do |value| - let_it_be("container_expiration_policy_with_#{attribute}_set_to_#{value}") { create(:container_expiration_policy, attribute => value) } - end + context 'with embedded grafana' do + it 'returns true when embedded grafana is enabled' do + stub_application_setting(grafana_enabled: true) + + expect(subject[:grafana_link_enabled]).to eq(true) end - let(:inactive_policies) { ::ContainerExpirationPolicy.where(enabled: false) } - let(:active_policies) { ::ContainerExpirationPolicy.active } - - it 'gathers usage data' do - expect(subject[:projects_with_expiration_policy_enabled]).to eq 16 - expect(subject[:projects_with_expiration_policy_disabled]).to eq 1 - - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 10 - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1 - - expect(subject[:projects_with_expiration_policy_enabled_with_older_than_unset]).to eq 12 - expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 1 - - expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 12 - expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1 - expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_3month]).to eq 1 + it 'returns false when embedded grafana is disabled' do + stub_application_setting(grafana_enabled: false) + + expect(subject[:grafana_link_enabled]).to eq(false) end end end @@ -223,6 +275,66 @@ describe Gitlab::UsageData, :aggregate_failures do end end + describe '#object_store_config' do + let(:component) { 'lfs' } + + subject { described_class.object_store_config(component) } + + context 'when object_store is not configured' do + it 'returns component enable status only' do + allow(Settings).to receive(:[]).with(component).and_return({ 'enabled' => false }) + + expect(subject).to eq({ enabled: false }) + end + end + + context 'when object_store is configured' do + it 'returns filtered object store config' do + allow(Settings).to receive(:[]).with(component) + .and_return( + { 'enabled' => true, + 'object_store' => + { 'enabled' => true, + 'remote_directory' => component, + 'direct_upload' => true, + 'connection' => + { 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true }, + 'background_upload' => false, + 'proxy_download' => false } }) + + expect(subject).to eq( + { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } }) + end + end + + context 'when retrieve component setting meets exception' do + it 'returns -1 for component enable status' do + allow(Settings).to receive(:[]).with(component).and_raise(StandardError) + + expect(subject).to eq({ enabled: -1 }) + end + end + end + + describe '#object_store_usage_data' do + subject { described_class.object_store_usage_data } + + it 'fetches object store config of five components' do + %w(artifacts external_diffs lfs uploads packages).each do |component| + expect(described_class).to receive(:object_store_config).with(component).and_return("#{component}_object_store_config") + end + + expect(subject).to eq( + object_store: { + artifacts: 'artifacts_object_store_config', + external_diffs: 'external_diffs_object_store_config', + lfs: 'lfs_object_store_config', + uploads: 'uploads_object_store_config', + packages: 'packages_object_store_config' + }) + end + end + describe '#cycle_analytics_usage_data' do subject { described_class.cycle_analytics_usage_data } @@ -244,18 +356,132 @@ describe Gitlab::UsageData, :aggregate_failures do describe '#ingress_modsecurity_usage' do subject { described_class.ingress_modsecurity_usage } - it 'gathers variable data' do - allow_any_instance_of( - ::Clusters::Applications::IngressModsecurityUsageService - ).to receive(:execute).and_return( - { - ingress_modsecurity_blocking: 1, - ingress_modsecurity_disabled: 2 - } - ) - - expect(subject[:ingress_modsecurity_blocking]).to eq(1) - expect(subject[:ingress_modsecurity_disabled]).to eq(2) + let(:environment) { create(:environment) } + let(:project) { environment.project } + let(:environment_scope) { '*' } + let(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } + let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project]) } + let(:ingress_mode) { :modsecurity_blocking } + let!(:ingress) { create(:clusters_applications_ingress, ingress_mode, cluster: cluster) } + + context 'when cluster is disabled' do + let(:cluster) { create(:cluster, :disabled, projects: [project]) } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(0) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'when deployment is unsuccessful' do + let!(:deployment) { create(:deployment, :failed, environment: environment, project: project, cluster: cluster) } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(0) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'when deployment is successful' do + let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } + + context 'when modsecurity is in blocking mode' do + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(1) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'when modsecurity is in logging mode' do + let(:ingress_mode) { :modsecurity_logging } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(1) + expect(subject[:ingress_modsecurity_blocking]).to eq(0) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'when modsecurity is disabled' do + let(:ingress_mode) { :modsecurity_disabled } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(0) + expect(subject[:ingress_modsecurity_disabled]).to eq(1) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'when modsecurity is not installed' do + let(:ingress_mode) { :modsecurity_not_installed } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(0) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(1) + end + end + + context 'with multiple projects' do + let(:environment_2) { create(:environment) } + let(:project_2) { environment_2.project } + let(:cluster_2) { create(:cluster, environment_scope: environment_scope, projects: [project_2]) } + let!(:ingress_2) { create(:clusters_applications_ingress, :modsecurity_logging, cluster: cluster_2) } + let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster_2) } + + it 'gathers non-duplicated ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(1) + expect(subject[:ingress_modsecurity_blocking]).to eq(1) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'with multiple deployments' do + let!(:deployment_2) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } + + it 'gathers non-duplicated ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(1) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'with multiple projects' do + let(:environment_2) { create(:environment) } + let(:project_2) { environment_2.project } + let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster) } + let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project, project_2]) } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(2) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end + + context 'with multiple environments' do + let!(:environment_2) { create(:environment, project: project) } + let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project, cluster: cluster) } + + it 'gathers ingress data' do + expect(subject[:ingress_modsecurity_logging]).to eq(0) + expect(subject[:ingress_modsecurity_blocking]).to eq(2) + expect(subject[:ingress_modsecurity_disabled]).to eq(0) + expect(subject[:ingress_modsecurity_not_installed]).to eq(0) + end + end end end @@ -334,9 +560,10 @@ describe Gitlab::UsageData, :aggregate_failures do end it 'returns the fallback value when counting fails' do + stub_const("Gitlab::UsageData::FALLBACK", 15) allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new('')) - expect(described_class.count(relation, fallback: 15, batch: false)).to eq(15) + expect(described_class.count(relation, batch: false)).to eq(15) end end @@ -350,9 +577,10 @@ describe Gitlab::UsageData, :aggregate_failures do end it 'returns the fallback value when counting fails' do + stub_const("Gitlab::UsageData::FALLBACK", 15) allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new('')) - expect(described_class.distinct_count(relation, fallback: 15, batch: false)).to eq(15) + expect(described_class.distinct_count(relation, batch: false)).to eq(15) end end end @@ -387,4 +615,28 @@ describe Gitlab::UsageData, :aggregate_failures do expect(described_class.alt_usage_data(1)).to eq 1 end end + + describe '#redis_usage_data' do + context 'with block given' do + it 'returns the fallback when it gets an error' do + expect(described_class.redis_usage_data { raise ::Redis::CommandError } ).to eq(-1) + end + + it 'returns the evaluated block when given' do + expect(described_class.redis_usage_data { 1 }).to eq(1) + end + end + + context 'with counter given' do + it 'returns the falback values for all counter keys when it gets an error' do + allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_raise(::Redis::CommandError) + expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql(::Gitlab::UsageDataCounters::WikiPageCounter.fallback_totals) + end + + it 'returns the totals when couter is given' do + allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_return({ wiki_pages_create: 2 }) + expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql({ wiki_pages_create: 2 }) + end + end + end end diff --git a/spec/lib/gitlab/user_access_snippet_spec.rb b/spec/lib/gitlab/user_access_snippet_spec.rb index 57e52e2e93d..2e8a0a49a76 100644 --- a/spec/lib/gitlab/user_access_snippet_spec.rb +++ b/spec/lib/gitlab/user_access_snippet_spec.rb @@ -7,6 +7,8 @@ describe Gitlab::UserAccessSnippet do let_it_be(:project) { create(:project, :private) } let_it_be(:snippet) { create(:project_snippet, :private, project: project) } + let_it_be(:migration_bot) { User.migration_bot } + let(:user) { create(:user) } describe '#can_do_action?' do @@ -36,6 +38,14 @@ describe Gitlab::UserAccessSnippet do expect(access.can_do_action?(:ability)).to eq(false) end end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'allows access' do + expect(access.can_do_action?(:ability)).to eq(true) + end + end end describe '#can_push_to_branch?' do @@ -65,6 +75,16 @@ describe Gitlab::UserAccessSnippet do end end + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'allows access' do + allow(Ability).to receive(:allowed?).and_return(false) + + expect(access.can_push_to_branch?('random_branch')).to eq(true) + end + end + context 'when snippet is nil' do let(:user) { create_user_from_membership(project, :admin) } let(:snippet) { nil } @@ -72,6 +92,14 @@ describe Gitlab::UserAccessSnippet do it 'disallows access' do expect(access.can_push_to_branch?('random_branch')).to eq(false) end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'disallows access' do + expect(access.can_push_to_branch?('random_branch')).to eq(false) + end + end end end @@ -79,17 +107,41 @@ describe Gitlab::UserAccessSnippet do it 'returns false' do expect(access.can_create_tag?('random_tag')).to be_falsey end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'returns false' do + expect(access.can_create_tag?('random_tag')).to be_falsey + end + end end describe '#can_delete_branch?' do it 'returns false' do expect(access.can_delete_branch?('random_branch')).to be_falsey end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'returns false' do + expect(access.can_delete_branch?('random_branch')).to be_falsey + end + end end describe '#can_merge_to_branch?' do it 'returns false' do expect(access.can_merge_to_branch?('random_branch')).to be_falsey end + + context 'when user is migration bot' do + let(:user) { migration_bot } + + it 'returns false' do + expect(access.can_merge_to_branch?('random_branch')).to be_falsey + end + end end end diff --git a/spec/lib/gitlab/utils/measuring_spec.rb b/spec/lib/gitlab/utils/measuring_spec.rb new file mode 100644 index 00000000000..254f53f7da3 --- /dev/null +++ b/spec/lib/gitlab/utils/measuring_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +describe Gitlab::Utils::Measuring do + describe '#with_measuring' do + let(:base_log_data) { {} } + let(:result) { "result" } + + before do + allow(ActiveSupport::Logger).to receive(:logger_outputs_to?).with(Gitlab::Utils::Measuring.logger, STDOUT).and_return(false) + end + + let(:measurement) { described_class.new(base_log_data) } + + subject do + measurement.with_measuring { result } + end + + it 'measures and logs data', :aggregate_failure do + expect(measurement).to receive(:with_measure_time).and_call_original + expect(measurement).to receive(:with_count_queries).and_call_original + expect(measurement).to receive(:with_gc_stats).and_call_original + + expect(described_class.logger).to receive(:info).with(include(:gc_stats, :time_to_finish, :number_of_sql_calls, :memory_usage, :label)) + + is_expected.to eq(result) + end + + context 'with base_log_data provided' do + let(:base_log_data) { { test: "data" } } + + it 'logs includes base data' do + expect(described_class.logger).to receive(:info).with(include(:test, :gc_stats, :time_to_finish, :number_of_sql_calls, :memory_usage, :label)) + + subject + end + end + end +end diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb index e34367cbbf9..0f0d6a93c97 100644 --- a/spec/lib/gitlab/utils_spec.rb +++ b/spec/lib/gitlab/utils_spec.rb @@ -59,9 +59,10 @@ describe Gitlab::Utils do using RSpec::Parameterized::TableSyntax where(:original, :expected) do - 1999.8999 | 2 - 12384 | 12.38 - 333 | 0.33 + 1999.8999 | 1.9999 + 12384 | 12.384 + 333 | 0.333 + 1333.33333333 | 1.333333 end with_them do @@ -129,7 +130,7 @@ describe Gitlab::Utils do expect(to_boolean(false)).to be(false) end - it 'converts a valid string to a boolean' do + it 'converts a valid value to a boolean' do expect(to_boolean(true)).to be(true) expect(to_boolean('true')).to be(true) expect(to_boolean('YeS')).to be(true) @@ -145,12 +146,35 @@ describe Gitlab::Utils do expect(to_boolean('oFF')).to be(false) end - it 'converts an invalid string to nil' do + it 'converts an invalid value to nil' do expect(to_boolean('fals')).to be_nil expect(to_boolean('yeah')).to be_nil expect(to_boolean('')).to be_nil expect(to_boolean(nil)).to be_nil end + + it 'accepts a default value, and does not return it when a valid value is given' do + expect(to_boolean(true, default: false)).to be(true) + expect(to_boolean('true', default: false)).to be(true) + expect(to_boolean('YeS', default: false)).to be(true) + expect(to_boolean('t', default: false)).to be(true) + expect(to_boolean('1', default: 'any value')).to be(true) + expect(to_boolean('ON', default: 42)).to be(true) + + expect(to_boolean('FaLse', default: true)).to be(false) + expect(to_boolean('F', default: true)).to be(false) + expect(to_boolean('NO', default: true)).to be(false) + expect(to_boolean('n', default: true)).to be(false) + expect(to_boolean('0', default: 'any value')).to be(false) + expect(to_boolean('oFF', default: 42)).to be(false) + end + + it 'accepts a default value, and returns it when an invalid value is given' do + expect(to_boolean('fals', default: true)).to eq(true) + expect(to_boolean('yeah', default: false)).to eq(false) + expect(to_boolean('', default: 'any value')).to eq('any value') + expect(to_boolean(nil, default: 42)).to eq(42) + end end describe '.boolean_to_yes_no' do diff --git a/spec/lib/gitlab/view/presenter/factory_spec.rb b/spec/lib/gitlab/view/presenter/factory_spec.rb index 515a1b0a8e4..7bf3c325019 100644 --- a/spec/lib/gitlab/view/presenter/factory_spec.rb +++ b/spec/lib/gitlab/view/presenter/factory_spec.rb @@ -31,11 +31,11 @@ describe Gitlab::View::Presenter::Factory do end it 'uses the presenter_class if given on #initialize' do - MyCustomPresenter = Class.new(described_class) + my_custom_presenter = Class.new(described_class) - presenter = described_class.new(build, presenter_class: MyCustomPresenter).fabricate! + presenter = described_class.new(build, presenter_class: my_custom_presenter).fabricate! - expect(presenter).to be_a(MyCustomPresenter) + expect(presenter).to be_a(my_custom_presenter) end end end diff --git a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb index c606ba11b9c..f9ed769f2d9 100644 --- a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb +++ b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb @@ -76,11 +76,7 @@ describe Gitlab::WikiPages::FrontMatterParser do let(:raw_content) { with_front_matter } before do - stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false) - stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => { - enabled: true, - thing: gate - }) + stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => gate) end it do diff --git a/spec/lib/gitlab/with_request_store_spec.rb b/spec/lib/gitlab/with_request_store_spec.rb new file mode 100644 index 00000000000..1ef8d986f96 --- /dev/null +++ b/spec/lib/gitlab/with_request_store_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'request_store' + +describe Gitlab::WithRequestStore do + let(:fake_class) { Class.new { include Gitlab::WithRequestStore } } + + subject(:object) { fake_class.new } + + describe "#with_request_store" do + it 'starts a request store and yields control' do + expect(RequestStore).to receive(:begin!).ordered + expect(RequestStore).to receive(:end!).ordered + expect(RequestStore).to receive(:clear!).ordered + + expect { |b| object.with_request_store(&b) }.to yield_control + end + + it 'only starts a request store once when nested' do + expect(RequestStore).to receive(:begin!).ordered.once.and_call_original + expect(RequestStore).to receive(:end!).ordered.once.and_call_original + expect(RequestStore).to receive(:clear!).ordered.once.and_call_original + + object.with_request_store do + expect { |b| object.with_request_store(&b) }.to yield_control + end + end + end +end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 921ed568b71..53b6f461a48 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::Workhorse do def decode_workhorse_header(array) key, value = array command, encoded_params = value.split(":") - params = JSON.parse(Base64.urlsafe_decode64(encoded_params)) + params = Gitlab::Json.parse(Base64.urlsafe_decode64(encoded_params)) [key, command, params] end @@ -24,7 +24,7 @@ describe Gitlab::Workhorse do let(:ref) { 'master' } let(:format) { 'zip' } let(:storage_path) { Gitlab.config.gitlab.repository_downloads_path } - let(:path) { 'some/path' if Feature.enabled?(:git_archive_path, default_enabled: true) } + let(:path) { 'some/path' } let(:metadata) { repository.archive_metadata(ref, storage_path, format, append_sha: nil, path: path) } let(:cache_disabled) { false } @@ -36,70 +36,36 @@ describe Gitlab::Workhorse do allow(described_class).to receive(:git_archive_cache_disabled?).and_return(cache_disabled) end - context 'feature flag disabled' do - before do - stub_feature_flags(git_archive_path: false) - end - - it 'sets the header correctly' do - key, command, params = decode_workhorse_header(subject) + it 'sets the header correctly' do + key, command, params = decode_workhorse_header(subject) - expected_params = metadata.merge( - 'GitalyRepository' => repository.gitaly_repository.to_h, - 'GitalyServer' => { - features: { 'gitaly-feature-foobar' => 'true' }, - address: Gitlab::GitalyClient.address(project.repository_storage), - token: Gitlab::GitalyClient.token(project.repository_storage) - } + expect(key).to eq('Gitlab-Workhorse-Send-Data') + expect(command).to eq('git-archive') + expect(params).to eq({ + 'GitalyServer' => { + features: { 'gitaly-feature-foobar' => 'true' }, + address: Gitlab::GitalyClient.address(project.repository_storage), + token: Gitlab::GitalyClient.token(project.repository_storage) + }, + 'ArchivePath' => metadata['ArchivePath'], + 'GetArchiveRequest' => Base64.encode64( + Gitaly::GetArchiveRequest.new( + repository: repository.gitaly_repository, + commit_id: metadata['CommitId'], + prefix: metadata['ArchivePrefix'], + format: Gitaly::GetArchiveRequest::Format::ZIP, + path: path + ).to_proto ) - - expect(key).to eq('Gitlab-Workhorse-Send-Data') - expect(command).to eq('git-archive') - expect(params).to eq(expected_params.deep_stringify_keys) - end - - context 'when archive caching is disabled' do - let(:cache_disabled) { true } - - it 'tells workhorse not to use the cache' do - _, _, params = decode_workhorse_header(subject) - expect(params).to include({ 'DisableCache' => true }) - end - end + }.deep_stringify_keys) end - context 'feature flag enabled' do - it 'sets the header correctly' do - key, command, params = decode_workhorse_header(subject) - - expect(key).to eq('Gitlab-Workhorse-Send-Data') - expect(command).to eq('git-archive') - expect(params).to eq({ - 'GitalyServer' => { - features: { 'gitaly-feature-foobar' => 'true' }, - address: Gitlab::GitalyClient.address(project.repository_storage), - token: Gitlab::GitalyClient.token(project.repository_storage) - }, - 'ArchivePath' => metadata['ArchivePath'], - 'GetArchiveRequest' => Base64.encode64( - Gitaly::GetArchiveRequest.new( - repository: repository.gitaly_repository, - commit_id: metadata['CommitId'], - prefix: metadata['ArchivePrefix'], - format: Gitaly::GetArchiveRequest::Format::ZIP, - path: path - ).to_proto - ) - }.deep_stringify_keys) - end - - context 'when archive caching is disabled' do - let(:cache_disabled) { true } + context 'when archive caching is disabled' do + let(:cache_disabled) { true } - it 'tells workhorse not to use the cache' do - _, _, params = decode_workhorse_header(subject) - expect(params).to include({ 'DisableCache' => true }) - end + it 'tells workhorse not to use the cache' do + _, _, params = decode_workhorse_header(subject) + expect(params).to include({ 'DisableCache' => true }) end end diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb index 6c585acd5cd..cff2fd7748b 100644 --- a/spec/lib/gitlab/x509/signature_spec.rb +++ b/spec/lib/gitlab/x509/signature_spec.rb @@ -229,4 +229,164 @@ describe Gitlab::X509::Signature do end end end + + describe '#user' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + + context 'if email is assigned to a user' do + let!(:user) { create(:user, email: X509Helpers::User1.certificate_email) } + + it 'returns user' do + expect(signature.user).to eq(user) + end + end + + it 'if email is not assigned to a user, return nil' do + expect(signature.user).to be_nil + end + end + + context 'tag signature' do + let(:certificate_attributes) do + { + subject_key_identifier: X509Helpers::User1.tag_certificate_subject_key_identifier, + subject: X509Helpers::User1.certificate_subject, + email: X509Helpers::User1.certificate_email, + serial_number: X509Helpers::User1.tag_certificate_serial + } + end + + let(:issuer_attributes) do + { + subject_key_identifier: X509Helpers::User1.tag_issuer_subject_key_identifier, + subject: X509Helpers::User1.tag_certificate_issuer, + crl_url: X509Helpers::User1.tag_certificate_crl + } + end + + context 'verified signature' do + context 'with trusted certificate store' do + before do + store = OpenSSL::X509::Store.new + certificate = OpenSSL::X509::Certificate.new X509Helpers::User1.trust_cert + store.add_cert(certificate) + allow(OpenSSL::X509::Store).to receive(:new).and_return(store) + end + + it 'returns a verified signature if email does match' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_truthy + expect(signature.verification_status).to eq(:verified) + end + + it 'returns an unverified signature if email does not match' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + "gitlab@example.com", + X509Helpers::User1.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_truthy + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if email does match and time is wrong' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + Time.new(2020, 2, 22) + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if certificate is revoked' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + + expect(signature.verification_status).to eq(:verified) + + signature.x509_certificate.revoked! + + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'without trusted certificate within store' do + before do + store = OpenSSL::X509::Store.new + allow(OpenSSL::X509::Store).to receive(:new) + .and_return( + store + ) + end + + it 'returns an unverified signature' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end + + context 'invalid signature' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature.tr('A', 'B'), + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'invalid message' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + 'x', + X509Helpers::User1.certificate_email, + X509Helpers::User1.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end end diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb new file mode 100644 index 00000000000..4bc9723bd0d --- /dev/null +++ b/spec/lib/gitlab/x509/tag_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true +require 'spec_helper' + +describe Gitlab::X509::Tag do + subject(:signature) { described_class.new(tag).signature } + + describe '#signature' do + let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') } + let(:project) { create(:project, :repository) } + + describe 'signed tag' do + let(:tag) { project.repository.find_tag('v1.1.1') } + let(:certificate_attributes) do + { + subject_key_identifier: X509Helpers::User1.tag_certificate_subject_key_identifier, + subject: X509Helpers::User1.certificate_subject, + email: X509Helpers::User1.certificate_email, + serial_number: X509Helpers::User1.tag_certificate_serial + } + end + + let(:issuer_attributes) do + { + subject_key_identifier: X509Helpers::User1.tag_issuer_subject_key_identifier, + subject: X509Helpers::User1.tag_certificate_issuer, + crl_url: X509Helpers::User1.tag_certificate_crl + } + end + + it { expect(signature).not_to be_nil } + it { expect(signature.verification_status).to eq(:unverified) } + it { expect(signature.x509_certificate).to have_attributes(certificate_attributes) } + it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) } + end + + context 'unsigned tag' do + let(:tag) { project.repository.find_tag('v1.0.0') } + + it { expect(signature).to be_nil } + end + end +end -- cgit v1.2.3