Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/benchmarks/banzai_benchmark.rb2
-rw-r--r--spec/commands/metrics_server/metrics_server_spec.rb1
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb102
-rw-r--r--spec/components/pajamas/banner_component_spec.rb8
-rw-r--r--spec/components/pajamas/component_spec.rb2
-rw-r--r--spec/components/pajamas/empty_state_component_spec.rb4
-rw-r--r--spec/components/projects/ml/models_index_component_spec.rb32
-rw-r--r--spec/components/projects/ml/show_ml_model_component_spec.rb30
-rw-r--r--spec/config/object_store_settings_spec.rb22
-rw-r--r--spec/contracts/consumer/fixtures/project/pipelines/get_list_project_pipelines.fixture.js4
-rw-r--r--spec/contracts/consumer/fixtures/project/pipelines/get_pipeline_header_data.fixture.js2
-rw-r--r--spec/contracts/consumer/helpers/common_regex_patterns.js2
-rw-r--r--spec/controllers/admin/groups_controller_spec.rb90
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb8
-rw-r--r--spec/controllers/concerns/continue_params_spec.rb2
-rw-r--r--spec/controllers/concerns/onboarding/status_spec.rb7
-rw-r--r--spec/controllers/concerns/product_analytics_tracking_spec.rb38
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb2
-rw-r--r--spec/controllers/graphql_controller_spec.rb162
-rw-r--r--spec/controllers/groups/releases_controller_spec.rb2
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb30
-rw-r--r--spec/controllers/groups_controller_spec.rb8
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb7
-rw-r--r--spec/controllers/import/github_controller_spec.rb75
-rw-r--r--spec/controllers/jira_connect/app_descriptor_controller_spec.rb4
-rw-r--r--spec/controllers/oauth/applications_controller_spec.rb2
-rw-r--r--spec/controllers/oauth/tokens_controller_spec.rb58
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb36
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb48
-rw-r--r--spec/controllers/profiles/two_factor_auths_controller_spec.rb12
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb4
-rw-r--r--spec/controllers/projects/merge_requests/conflicts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb2
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb4
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb40
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb230
-rw-r--r--spec/controllers/projects/refs_controller_spec.rb36
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb6
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb45
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb12
-rw-r--r--spec/controllers/projects/work_items_controller_spec.rb4
-rw-r--r--spec/controllers/projects_controller_spec.rb2
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb119
-rw-r--r--spec/controllers/registrations_controller_spec.rb39
-rw-r--r--spec/controllers/search_controller_spec.rb4
-rw-r--r--spec/controllers/sessions_controller_spec.rb4
-rw-r--r--spec/controllers/snippets_controller_spec.rb2
-rw-r--r--spec/db/development/create_work_item_related_link_restrictions_spec.rb9
-rw-r--r--spec/db/production/create_work_item_related_link_restrictions_spec.rb9
-rw-r--r--spec/db/schema_spec.rb7
-rw-r--r--spec/experiments/application_experiment_spec.rb2
-rw-r--r--spec/experiments/ios_specific_templates_experiment_spec.rb2
-rw-r--r--spec/factories/achievements/user_achievements.rb1
-rw-r--r--spec/factories/bulk_import.rb4
-rw-r--r--spec/factories/ci/builds.rb13
-rw-r--r--spec/factories/ci/reports/security/findings.rb1
-rw-r--r--spec/factories/ci/runners.rb2
-rw-r--r--spec/factories/clusters/clusters.rb4
-rw-r--r--spec/factories/clusters/integrations/prometheus.rb2
-rw-r--r--spec/factories/clusters/providers/aws.rb2
-rw-r--r--spec/factories/container_registry/protection/rules.rb10
-rw-r--r--spec/factories/deployments.rb4
-rw-r--r--spec/factories/environments.rb4
-rw-r--r--spec/factories/group_members.rb15
-rw-r--r--spec/factories/integrations.rb179
-rw-r--r--spec/factories/member_tasks.rb9
-rw-r--r--spec/factories/ml/candidate_metrics.rb2
-rw-r--r--spec/factories/notes.rb10
-rw-r--r--spec/factories/packages/package_files.rb4
-rw-r--r--spec/factories/packages/packages.rb16
-rw-r--r--spec/factories/packages/protection/rules.rb (renamed from spec/factories/packages/package_protection_rules.rb)2
-rw-r--r--spec/factories/pages_deployments.rb4
-rw-r--r--spec/factories/project_members.rb10
-rw-r--r--spec/factories/users.rb9
-rw-r--r--spec/factories/users/credit_card_validations.rb2
-rw-r--r--spec/factories/users/in_product_marketing_email.rb6
-rw-r--r--spec/factories/vs_code/settings/vs_code_settings.rb12
-rw-r--r--spec/factories/work_items/related_link_restrictions.rb14
-rw-r--r--spec/features/admin/admin_jobs_spec.rb50
-rw-r--r--spec/features/admin/admin_runners_spec.rb5
-rw-r--r--spec/features/admin/admin_settings_spec.rb47
-rw-r--r--spec/features/alert_management/alert_details_spec.rb2
-rw-r--r--spec/features/alert_management/user_updates_alert_status_spec.rb2
-rw-r--r--spec/features/boards/sidebar_labels_in_namespaces_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb2
-rw-r--r--spec/features/boards/user_visits_board_spec.rb2
-rw-r--r--spec/features/commits_spec.rb6
-rw-r--r--spec/features/cycle_analytics_spec.rb98
-rw-r--r--spec/features/dashboard/projects_spec.rb4
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb43
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb20
-rw-r--r--spec/features/discussion_comments/issue_spec.rb3
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb4
-rw-r--r--spec/features/gitlab_experiments_spec.rb2
-rw-r--r--spec/features/groups/empty_states_spec.rb18
-rw-r--r--spec/features/groups/labels/sort_labels_spec.rb8
-rw-r--r--spec/features/groups/milestone_spec.rb6
-rw-r--r--spec/features/groups/navbar_spec.rb13
-rw-r--r--spec/features/groups_spec.rb2
-rw-r--r--spec/features/ide/user_opens_merge_request_spec.rb4
-rw-r--r--spec/features/incidents/incident_details_spec.rb2
-rw-r--r--spec/features/invites_spec.rb36
-rw-r--r--spec/features/issuables/issuable_list_spec.rb2
-rw-r--r--spec/features/issuables/markdown_references/jira_spec.rb3
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb4
-rw-r--r--spec/features/issues/form_spec.rb398
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb6
-rw-r--r--spec/features/issues/issue_detail_spec.rb2
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb2
-rw-r--r--spec/features/issues/issue_state_spec.rb49
-rw-r--r--spec/features/issues/markdown_toolbar_spec.rb3
-rw-r--r--spec/features/issues/move_spec.rb2
-rw-r--r--spec/features/issues/note_polling_spec.rb3
-rw-r--r--spec/features/issues/notes_on_issues_spec.rb3
-rw-r--r--spec/features/issues/related_issues_spec.rb4
-rw-r--r--spec/features/issues/resource_label_events_spec.rb2
-rw-r--r--spec/features/issues/service_desk_spec.rb4
-rw-r--r--spec/features/issues/todo_spec.rb1
-rw-r--r--spec/features/issues/user_bulk_edits_issues_labels_spec.rb6
-rw-r--r--spec/features/issues/user_comments_on_issue_spec.rb13
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb57
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb5
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb2
-rw-r--r--spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb5
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb23
-rw-r--r--spec/features/issues/user_uses_quick_actions_spec.rb2
-rw-r--r--spec/features/labels_hierarchy_spec.rb77
-rw-r--r--spec/features/markdown/markdown_spec.rb6
-rw-r--r--spec/features/markdown/math_spec.rb127
-rw-r--r--spec/features/markdown/observability_spec.rb77
-rw-r--r--spec/features/merge_request/admin_views_hidden_merge_request_spec.rb15
-rw-r--r--spec/features/merge_request/hide_default_award_emojis_spec.rb22
-rw-r--r--spec/features/merge_request/user_accepts_merge_request_spec.rb9
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_custom_emoji_spec.rb59
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_mr_spec.rb75
-rw-r--r--spec/features/merge_request/user_edits_merge_request_spec.rb4
-rw-r--r--spec/features/merge_request/user_edits_mr_spec.rb38
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb20
-rw-r--r--spec/features/merge_request/user_merges_immediately_spec.rb3
-rw-r--r--spec/features/merge_request/user_merges_merge_request_spec.rb3
-rw-r--r--spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb3
-rw-r--r--spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb4
-rw-r--r--spec/features/merge_request/user_reverts_merge_request_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_discussions_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb14
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_sets_to_auto_merge_spec.rb6
-rw-r--r--spec/features/merge_request/user_squashes_merge_request_spec.rb8
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb15
-rw-r--r--spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb2
-rw-r--r--spec/features/merge_requests/user_views_all_merge_requests_spec.rb12
-rw-r--r--spec/features/nav/pinned_nav_items_spec.rb2
-rw-r--r--spec/features/populate_new_pipeline_vars_with_params_spec.rb12
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb2
-rw-r--r--spec/features/projects/active_tabs_spec.rb2
-rw-r--r--spec/features/projects/artifacts/user_downloads_artifacts_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb48
-rw-r--r--spec/features/projects/branches/user_views_branches_spec.rb8
-rw-r--r--spec/features/projects/branches_spec.rb8
-rw-r--r--spec/features/projects/cluster_agents_spec.rb2
-rw-r--r--spec/features/projects/clusters_spec.rb2
-rw-r--r--spec/features/projects/commit/user_sees_pipelines_tab_spec.rb2
-rw-r--r--spec/features/projects/container_registry_spec.rb3
-rw-r--r--spec/features/projects/environments/environment_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_prometheus_spec.rb21
-rw-r--r--spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb2
-rw-r--r--spec/features/projects/issuable_templates_spec.rb8
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb8
-rw-r--r--spec/features/projects/jobs_spec.rb12
-rw-r--r--spec/features/projects/labels/issues_sorted_by_priority_spec.rb4
-rw-r--r--spec/features/projects/labels/sort_labels_spec.rb8
-rw-r--r--spec/features/projects/labels/update_prioritization_spec.rb16
-rw-r--r--spec/features/projects/labels/user_removes_labels_spec.rb4
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb55
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb24
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb36
-rw-r--r--spec/features/projects/project_overview_spec.rb59
-rw-r--r--spec/features/projects/settings/auto_devops_spec.rb31
-rw-r--r--spec/features/projects/settings/service_desk_setting_spec.rb6
-rw-r--r--spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb2
-rw-r--r--spec/features/projects/work_items/linked_work_items_spec.rb114
-rw-r--r--spec/features/projects/work_items/work_item_children_spec.rb8
-rw-r--r--spec/features/projects/work_items/work_item_spec.rb28
-rw-r--r--spec/features/protected_branches_spec.rb3
-rw-r--r--spec/features/registrations/oauth_registration_spec.rb18
-rw-r--r--spec/features/reportable_note/issue_spec.rb3
-rw-r--r--spec/features/tags/developer_views_tags_spec.rb2
-rw-r--r--spec/features/user_sees_revert_modal_spec.rb4
-rw-r--r--spec/features/users/google_analytics_csp_spec.rb15
-rw-r--r--spec/features/users/google_syndication_csp_spec.rb54
-rw-r--r--spec/features/users/login_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb62
-rw-r--r--spec/features/users/terms_spec.rb3
-rw-r--r--spec/finders/alert_management/alerts_finder_spec.rb4
-rw-r--r--spec/finders/branches_finder_spec.rb14
-rw-r--r--spec/finders/ci/runners_finder_spec.rb2
-rw-r--r--spec/finders/concerns/packages/finder_helper_spec.rb30
-rw-r--r--spec/finders/environments/environments_finder_spec.rb4
-rw-r--r--spec/finders/groups_finder_spec.rb48
-rw-r--r--spec/finders/license_template_finder_spec.rb2
-rw-r--r--spec/finders/merge_requests/oldest_per_commit_finder_spec.rb35
-rw-r--r--spec/finders/merge_requests_finder_spec.rb6
-rw-r--r--spec/finders/packages/maven/package_finder_spec.rb22
-rw-r--r--spec/finders/packages/npm/packages_for_user_finder_spec.rb18
-rw-r--r--spec/finders/projects/ml/model_finder_spec.rb4
-rw-r--r--spec/finders/snippets_finder_spec.rb48
-rw-r--r--spec/finders/tags_finder_spec.rb12
-rw-r--r--spec/finders/template_finder_spec.rb4
-rw-r--r--spec/finders/vs_code/settings/settings_finder_spec.rb64
-rw-r--r--spec/fixtures/api/schemas/entities/note_user_entity.json39
-rw-r--r--spec/fixtures/api/schemas/entities/user.json39
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json7
-rw-r--r--spec/fixtures/api/schemas/jira_connect/pull_request.json8
-rw-r--r--spec/fixtures/api/schemas/jira_connect/reviewer.json20
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issue.json12
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml2
-rw-r--r--spec/fixtures/markdown.md.erb2
-rw-r--r--spec/fixtures/packages/nuget/package_with_symbols.snupkgbin0 -> 7980 bytes
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json14
-rw-r--r--spec/fixtures/structure.sql108
-rw-r--r--spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap1
-rw-r--r--spec/frontend/admin/abuse_report/components/report_actions_spec.js55
-rw-r--r--spec/frontend/admin/abuse_report/components/user_details_spec.js8
-rw-r--r--spec/frontend/alert_spec.js68
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/base_spec.js2
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/value_stream_filters_spec.js190
-rw-r--r--spec/frontend/analytics/cycle_analytics/mock_data.js2
-rw-r--r--spec/frontend/analytics/cycle_analytics/store/actions_spec.js3
-rw-r--r--spec/frontend/analytics/cycle_analytics/store/mutations_spec.js2
-rw-r--r--spec/frontend/analytics/shared/components/date_ranges_dropdown_spec.js165
-rw-r--r--spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js66
-rw-r--r--spec/frontend/batch_comments/components/preview_dropdown_spec.js8
-rw-r--r--spec/frontend/behaviors/autosize_spec.js42
-rw-r--r--spec/frontend/behaviors/components/global_alerts_spec.js135
-rw-r--r--spec/frontend/behaviors/components/json_table_spec.js4
-rw-r--r--spec/frontend/behaviors/markdown/render_observability_spec.js43
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap2
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js2
-rw-r--r--spec/frontend/blob/csv/csv_viewer_spec.js33
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js19
-rw-r--r--spec/frontend/boards/board_list_helper.js2
-rw-r--r--spec/frontend/boards/board_list_spec.js2
-rw-r--r--spec/frontend/boards/components/board_card_spec.js37
-rw-r--r--spec/frontend/boards/components/board_form_spec.js19
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js79
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js5
-rw-r--r--spec/frontend/boards/mock_data.js10
-rw-r--r--spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap2
-rw-r--r--spec/frontend/branches/components/sort_dropdown_spec.js20
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js26
-rw-r--r--spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js32
-rw-r--r--spec/frontend/ci/catalog/components/ci_catalog_home_spec.js46
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js120
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js113
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js83
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js139
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js96
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_header_spec.js86
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_list_skeleton_loader_spec.js22
-rw-r--r--spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js198
-rw-r--r--spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js143
-rw-r--r--spec/frontend/ci/catalog/components/list/empty_state_spec.js27
-rw-r--r--spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js186
-rw-r--r--spec/frontend/ci/catalog/mock.js546
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js118
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js111
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js41
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js39
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js2
-rw-r--r--spec/frontend/ci/ci_variable_list/mocks.js3
-rw-r--r--spec/frontend/ci/ci_variable_list/utils_spec.js53
-rw-r--r--spec/frontend/ci/common/pipelines_table_spec.js241
-rw-r--r--spec/frontend/ci/job_details/components/job_header_spec.js37
-rw-r--r--spec/frontend/ci/job_details/components/log/collapsible_section_spec.js28
-rw-r--r--spec/frontend/ci/job_details/components/log/line_header_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/log/line_number_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/log/line_spec.js2
-rw-r--r--spec/frontend/ci/job_details/components/log/log_spec.js33
-rw-r--r--spec/frontend/ci/job_details/components/log/mock_data.js65
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js16
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js6
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js3
-rw-r--r--spec/frontend/ci/job_details/job_app_spec.js2
-rw-r--r--spec/frontend/ci/job_details/store/actions_spec.js25
-rw-r--r--spec/frontend/ci/job_details/store/mutations_spec.js22
-rw-r--r--spec/frontend/ci/job_details/store/utils_spec.js67
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js8
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/status_cell_spec.js (renamed from spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js)4
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js2
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_spec.js7
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js26
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_details/mock_data.js6
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js15
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js26
-rw-r--r--spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js8
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_empty_state_spec.js37
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js73
-rw-r--r--spec/frontend/ci/pipeline_schedules/mock_data.js27
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js40
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js69
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js42
-rw-r--r--spec/frontend/ci/pipelines_page/pipelines_spec.js101
-rw-r--r--spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js5
-rw-r--r--spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js4
-rw-r--r--spec/frontend/ci/runner/components/runner_details_spec.js7
-rw-r--r--spec/frontend/ci/runner/components/runner_details_tabs_spec.js14
-rw-r--r--spec/frontend/ci/runner/components/runner_list_spec.js28
-rw-r--r--spec/frontend/ci/runner/components/runner_type_icon_spec.js67
-rw-r--r--spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js5
-rw-r--r--spec/frontend/ci/runner/sentry_utils_spec.js21
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js37
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js10
-rw-r--r--spec/frontend/commit/commit_pipeline_status_spec.js2
-rw-r--r--spec/frontend/commit/components/commit_box_pipeline_status_spec.js16
-rw-r--r--spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js97
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js8
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js15
-rw-r--r--spec/frontend/crm/crm_form_spec.js2
-rw-r--r--spec/frontend/crm/organization_form_wrapper_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_description/description_form_spec.js8
-rw-r--r--spec/frontend/design_management/pages/index_spec.js2
-rw-r--r--spec/frontend/diffs/components/app_spec.js28
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js60
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js16
-rw-r--r--spec/frontend/diffs/store/actions_spec.js14
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js10
-rw-r--r--spec/frontend/diffs/store/utils_spec.js28
-rw-r--r--spec/frontend/diffs/utils/merge_request_spec.js16
-rw-r--r--spec/frontend/diffs/utils/sort_errors_by_file_spec.js52
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/script.yml14
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/script.yml52
-rw-r--r--spec/frontend/editor/source_editor_spec.js21
-rw-r--r--spec/frontend/environments/canary_ingress_spec.js58
-rw-r--r--spec/frontend/environments/environment_form_spec.js9
-rw-r--r--spec/frontend/environments/graphql/resolvers/kubernetes_spec.js50
-rw-r--r--spec/frontend/environments/kubernetes_overview_spec.js7
-rw-r--r--spec/frontend/environments/kubernetes_pods_spec.js2
-rw-r--r--spec/frontend/environments/kubernetes_summary_spec.js6
-rw-r--r--spec/frontend/environments/kubernetes_tabs_spec.js2
-rw-r--r--spec/frontend/fixtures/autocomplete.rb20
-rw-r--r--spec/frontend/fixtures/autocomplete_sources.rb18
-rw-r--r--spec/frontend/fixtures/environments.rb34
-rw-r--r--spec/frontend/fixtures/issues.rb34
-rw-r--r--spec/frontend/fixtures/releases.rb130
-rw-r--r--spec/frontend/fixtures/search.rb7
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js38
-rw-r--r--spec/frontend/google_tag_manager/index_spec.js532
-rw-r--r--spec/frontend/helpers/startup_css_helper_spec.js67
-rw-r--r--spec/frontend/ide/init_gitlab_web_ide_spec.js43
-rw-r--r--spec/frontend/import/details/mock_data.js6
-rw-r--r--spec/frontend/import_entities/components/group_dropdown_spec.js94
-rw-r--r--spec/frontend/import_entities/components/import_target_dropdown_spec.js55
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js141
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js52
-rw-r--r--spec/frontend/integrations/gitlab_slack_application/components/projects_dropdown_spec.js54
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js76
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js52
-rw-r--r--spec/frontend/issuable/components/hidden_badge_spec.js45
-rw-r--r--spec/frontend/issuable/components/locked_badge_spec.js45
-rw-r--r--spec/frontend/issues/dashboard/mock_data.js1
-rw-r--r--spec/frontend/issues/list/mock_data.js1
-rw-r--r--spec/frontend/issues/show/components/description_spec.js4
-rw-r--r--spec/frontend/issues/show/components/fields/description_spec.js17
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js86
-rw-r--r--spec/frontend/issues/show/components/new_header_actions_popover_spec.js77
-rw-r--r--spec/frontend/issues/show/components/sticky_header_spec.js37
-rw-r--r--spec/frontend/issues/show/mock_data/mock_data.js3
-rw-r--r--spec/frontend/lib/utils/global_alerts_spec.js80
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js74
-rw-r--r--spec/frontend/merge_requests/components/header_metadata_spec.js93
-rw-r--r--spec/frontend/merge_requests/components/merge_request_header_spec.js88
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js11
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js138
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js7
-rw-r--r--spec/frontend/ml/model_registry/apps/show_ml_model_spec.js15
-rw-r--r--spec/frontend/ml/model_registry/mock_data.js1
-rw-r--r--spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js68
-rw-r--r--spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js19
-rw-r--r--spec/frontend/ml/model_registry/routes/models/index/components/model_row_spec.js42
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js201
-rw-r--r--spec/frontend/notes/components/email_participants_warning_spec.js6
-rw-r--r--spec/frontend/notes/components/note_form_spec.js26
-rw-r--r--spec/frontend/notes/mock_data.js1
-rw-r--r--spec/frontend/notes/stores/actions_spec.js4
-rw-r--r--spec/frontend/observability/client_spec.js233
-rw-r--r--spec/frontend/observability/index_spec.js64
-rw-r--r--spec/frontend/observability/observability_app_spec.js201
-rw-r--r--spec/frontend/observability/observability_container_spec.js6
-rw-r--r--spec/frontend/observability/skeleton_spec.js86
-rw-r--r--spec/frontend/organizations/index/components/app_spec.js87
-rw-r--r--spec/frontend/organizations/index/components/organizations_list_item_spec.js70
-rw-r--r--spec/frontend/organizations/index/components/organizations_list_spec.js28
-rw-r--r--spec/frontend/organizations/index/components/organizations_view_spec.js57
-rw-r--r--spec/frontend/organizations/index/mock_data.js3
-rw-r--r--spec/frontend/organizations/new/components/app_spec.js113
-rw-r--r--spec/frontend/organizations/shared/components/new_edit_form_spec.js112
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap63
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap2
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js30
-rw-r--r--spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js9
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js17
-rw-r--r--spec/frontend/performance_bar/components/request_warning_spec.js7
-rw-r--r--spec/frontend/performance_bar/index_spec.js3
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap2
-rw-r--r--spec/frontend/projects/project_find_file_spec.js57
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js12
-rw-r--r--spec/frontend/ref/components/ambiguous_ref_modal_spec.js64
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js19
-rw-r--r--spec/frontend/ref/init_ambiguous_ref_modal_spec.js48
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js13
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js442
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js16
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap121
-rw-r--r--spec/frontend/repository/components/commit_info_spec.js87
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js119
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap6
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js9
-rw-r--r--spec/frontend/search/sidebar/components/archived_filter_spec.js42
-rw-r--r--spec/frontend/search/sidebar/components/issues_filters_spec.js4
-rw-r--r--spec/frontend/search/sidebar/components/merge_requests_filters_spec.js6
-rw-r--r--spec/frontend/search/sidebar/components/milestones_filters_spec.js28
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js135
-rw-r--r--spec/frontend/sentry/init_sentry_spec.js35
-rw-r--r--spec/frontend/sentry/sentry_browser_wrapper_spec.js22
-rw-r--r--spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js21
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap4
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap4
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap6
-rw-r--r--spec/frontend/snippets/components/edit_spec.js3
-rw-r--r--spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js2
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js2
-rw-r--r--spec/frontend/super_sidebar/components/create_menu_spec.js4
-rw-r--r--spec/frontend/super_sidebar/components/help_center_spec.js6
-rw-r--r--spec/frontend/super_sidebar/components/nav_item_spec.js45
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_spec.js26
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js42
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_profile_item_spec.js (renamed from spec/frontend/super_sidebar/components/user_name_group_spec.js)13
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_spec.js10
-rw-r--r--spec/frontend/super_sidebar/utils_spec.js9
-rw-r--r--spec/frontend/tags/components/sort_dropdown_spec.js20
-rw-r--r--spec/frontend/tracking/internal_events_spec.js99
-rw-r--r--spec/frontend/users_select/test_helper.js1
-rw-r--r--spec/frontend/vue_alerts_spec.js7
-rw-r--r--spec/frontend/vue_merge_request_widget/components/checks/conflicts_spec.js90
-rw-r--r--spec/frontend/vue_merge_request_widget/components/checks/message_spec.js30
-rw-r--r--spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js92
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/action_buttons_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js94
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_status_spec.js25
-rw-r--r--spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap21
-rw-r--r--spec/frontend/vue_shared/components/badges/__snapshots__/experiment_badge_spec.js.snap41
-rw-r--r--spec/frontend/vue_shared/components/badges/beta_badge_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/badges/experiment_badge_spec.js32
-rw-r--r--spec/frontend/vue_shared/components/badges/hover_badge_spec.js50
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js17
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js40
-rw-r--r--spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/markdown/editor_mode_switcher_spec.js80
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js95
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_spec.js24
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/segmented_control_button_group_spec.js31
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/__snapshots__/utils_spec.js.snap88
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/blame_info_spec.js63
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/mock_data.js21
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/utils_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/toggle_labels_spec.js56
-rw-r--r--spec/frontend/vue_shared/components/vuex_module_provider_spec.js10
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js3
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js32
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js18
-rw-r--r--spec/frontend/vue_shared/issuable/list/mock_data.js2
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js36
-rw-r--r--spec/frontend/work_items/components/notes/work_item_add_note_spec.js60
-rw-r--r--spec/frontend/work_items/components/notes/work_item_comment_form_spec.js4
-rw-r--r--spec/frontend/work_items/components/notes/work_item_discussion_spec.js4
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_actions_spec.js2
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js4
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_spec.js38
-rw-r--r--spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js5
-rw-r--r--spec/frontend/work_items/components/work_item_actions_spec.js140
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js45
-rw-r--r--spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_created_updated_spec.js64
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js39
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js87
-rw-r--r--spec/frontend/work_items/components/work_item_labels_spec.js58
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js3
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js21
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js3
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js37
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_milestone_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_notes_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_parent_spec.js236
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap6
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/work_item_add_relationship_form_spec.js156
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js3
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js157
-rw-r--r--spec/frontend/work_items/components/work_item_todos_spec.js3
-rw-r--r--spec/frontend/work_items/graphql/cache_utils_spec.js8
-rw-r--r--spec/frontend/work_items/mock_data.js173
-rw-r--r--spec/frontend/work_items/pages/create_work_item_spec.js5
-rw-r--r--spec/frontend/work_items/router_spec.js1
-rw-r--r--spec/frontend/work_items/utils_spec.js13
-rw-r--r--spec/graphql/features/authorization_spec.rb54
-rw-r--r--spec/graphql/mutations/alert_management/update_alert_status_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb2
-rw-r--r--spec/graphql/mutations/commits/create_spec.rb2
-rw-r--r--spec/graphql/mutations/container_repositories/destroy_spec.rb6
-rw-r--r--spec/graphql/mutations/design_management/delete_spec.rb45
-rw-r--r--spec/graphql/mutations/issues/set_assignees_spec.rb10
-rw-r--r--spec/graphql/mutations/merge_requests/accept_spec.rb157
-rw-r--r--spec/graphql/mutations/merge_requests/create_spec.rb12
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb10
-rw-r--r--spec/graphql/mutations/merge_requests/set_reviewers_spec.rb10
-rw-r--r--spec/graphql/mutations/release_asset_links/update_spec.rb14
-rw-r--r--spec/graphql/mutations/releases/update_spec.rb13
-rw-r--r--spec/graphql/mutations/users/set_namespace_commit_email_spec.rb2
-rw-r--r--spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/ci/config_resolver_spec.rb11
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/ci/jobs_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/ci/project_runners_resolver_spec.rb15
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/clusters/agents_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/concerns/looks_ahead_spec.rb10
-rw-r--r--spec/graphql/resolvers/concerns/resolves_groups_spec.rb5
-rw-r--r--spec/graphql/resolvers/container_repositories_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/container_repository_tags_resolver_spec.rb17
-rw-r--r--spec/graphql/resolvers/environments_resolver_spec.rb26
-rw-r--r--spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/package_pipelines_resolver_spec.rb17
-rw-r--r--spec/graphql/resolvers/paginated_tree_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/project_issues_resolver_spec.rb7
-rw-r--r--spec/graphql/resolvers/project_merge_requests_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb25
-rw-r--r--spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/user_notes_count_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/users/participants_resolver_spec.rb3
-rw-r--r--spec/graphql/resolvers/work_items_resolver_spec.rb33
-rw-r--r--spec/graphql/subscriptions/issuable_updated_spec.rb4
-rw-r--r--spec/graphql/types/achievements/user_achievement_type_spec.rb1
-rw-r--r--spec/graphql/types/base_edge_spec.rb3
-rw-r--r--spec/graphql/types/base_field_spec.rb40
-rw-r--r--spec/graphql/types/boards/board_issue_input_type_spec.rb4
-rw-r--r--spec/graphql/types/ci/ci_cd_setting_type_spec.rb22
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb21
-rw-r--r--spec/graphql/types/ci/job_base_field_spec.rb4
-rw-r--r--spec/graphql/types/ci/job_trace_type_spec.rb16
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb6
-rw-r--r--spec/graphql/types/current_user_todos_type_spec.rb8
-rw-r--r--spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb2
-rw-r--r--spec/graphql/types/issue_type_spec.rb6
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb1
-rw-r--r--spec/graphql/types/merge_requests/mergeability_check_identifier_enum_spec.rb13
-rw-r--r--spec/graphql/types/merge_requests/mergeability_check_status_enum_spec.rb13
-rw-r--r--spec/graphql/types/merge_requests/mergeability_check_type_spec.rb10
-rw-r--r--spec/graphql/types/namespace_type_spec.rb2
-rw-r--r--spec/graphql/types/packages/package_base_type_spec.rb4
-rw-r--r--spec/graphql/types/packages/protection/rule_access_level_enum_spec.rb9
-rw-r--r--spec/graphql/types/packages/protection/rule_package_type_enum_spec.rb9
-rw-r--r--spec/graphql/types/packages/protection/rule_type_spec.rb29
-rw-r--r--spec/graphql/types/project_statistics_type_spec.rb10
-rw-r--r--spec/graphql/types/project_type_spec.rb90
-rw-r--r--spec/graphql/types/snippet_type_spec.rb2
-rw-r--r--spec/graphql/types/todo_type_spec.rb32
-rw-r--r--spec/graphql/types/work_item_type_spec.rb1
-rw-r--r--spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb2
-rw-r--r--spec/haml_lint/linter/documentation_links_spec.rb28
-rw-r--r--spec/helpers/access_tokens_helper_spec.rb1
-rw-r--r--spec/helpers/appearances_helper_spec.rb14
-rw-r--r--spec/helpers/application_helper_spec.rb4
-rw-r--r--spec/helpers/application_settings_helper_spec.rb10
-rw-r--r--spec/helpers/auth_helper_spec.rb96
-rw-r--r--spec/helpers/blob_helper_spec.rb55
-rw-r--r--spec/helpers/breadcrumbs_helper_spec.rb8
-rw-r--r--spec/helpers/ci/builds_helper_spec.rb18
-rw-r--r--spec/helpers/ci/jobs_helper_spec.rb11
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb7
-rw-r--r--spec/helpers/ci/status_helper_spec.rb46
-rw-r--r--spec/helpers/ci/triggers_helper_spec.rb8
-rw-r--r--spec/helpers/clusters_helper_spec.rb10
-rw-r--r--spec/helpers/diff_helper_spec.rb37
-rw-r--r--spec/helpers/emails_helper_spec.rb6
-rw-r--r--spec/helpers/form_helper_spec.rb5
-rw-r--r--spec/helpers/groups/observability_helper_spec.rb76
-rw-r--r--spec/helpers/groups_helper_spec.rb121
-rw-r--r--spec/helpers/ide_helper_spec.rb33
-rw-r--r--spec/helpers/issuables_description_templates_helper_spec.rb6
-rw-r--r--spec/helpers/issuables_helper_spec.rb57
-rw-r--r--spec/helpers/issues_helper_spec.rb2
-rw-r--r--spec/helpers/nav_helper_spec.rb4
-rw-r--r--spec/helpers/organizations/organization_helper_spec.rb29
-rw-r--r--spec/helpers/page_layout_helper_spec.rb6
-rw-r--r--spec/helpers/profiles_helper_spec.rb6
-rw-r--r--spec/helpers/projects/ml/experiments_helper_spec.rb8
-rw-r--r--spec/helpers/projects_helper_spec.rb145
-rw-r--r--spec/helpers/releases_helper_spec.rb12
-rw-r--r--spec/helpers/sidekiq_helper_spec.rb73
-rw-r--r--spec/helpers/sorting_helper_spec.rb42
-rw-r--r--spec/helpers/todos_helper_spec.rb9
-rw-r--r--spec/helpers/tracking_helper_spec.rb2
-rw-r--r--spec/helpers/users_helper_spec.rb2
-rw-r--r--spec/helpers/wiki_helper_spec.rb42
-rw-r--r--spec/initializers/direct_upload_support_spec.rb2
-rw-r--r--spec/initializers/enumerator_next_patch_spec.rb6
-rw-r--r--spec/initializers/gitlab_http_spec.rb47
-rw-r--r--spec/initializers/hangouts_chat_http_override_spec.rb34
-rw-r--r--spec/initializers/net_http_patch_spec.rb8
-rw-r--r--spec/initializers/net_http_response_patch_spec.rb3
-rw-r--r--spec/initializers/rack_multipart_patch_spec.rb4
-rw-r--r--spec/initializers/validate_database_config_spec.rb5
-rw-r--r--spec/initializers/validate_puma_spec.rb16
-rw-r--r--spec/lib/api/ci/helpers/runner_helpers_spec.rb2
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb2
-rw-r--r--spec/lib/api/entities/basic_project_details_spec.rb28
-rw-r--r--spec/lib/api/entities/bulk_import_spec.rb5
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_spec.rb5
-rw-r--r--spec/lib/api/entities/diff_spec.rb46
-rw-r--r--spec/lib/api/entities/namespace_basic_spec.rb47
-rw-r--r--spec/lib/api/entities/user_spec.rb4
-rw-r--r--spec/lib/api/entities/wiki_page_spec.rb13
-rw-r--r--spec/lib/api/helpers/common_helpers_spec.rb2
-rw-r--r--spec/lib/api/helpers/import_github_helpers_spec.rb47
-rw-r--r--spec/lib/api/helpers_spec.rb77
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb10
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb48
-rw-r--r--spec/lib/aws/s3_client_spec.rb33
-rw-r--r--spec/lib/backup/database_model_spec.rb90
-rw-r--r--spec/lib/backup/database_spec.rb17
-rw-r--r--spec/lib/backup/files_spec.rb24
-rw-r--r--spec/lib/backup/manager_spec.rb21
-rw-r--r--spec/lib/backup/repositories_spec.rb38
-rw-r--r--spec/lib/backup/task_spec.rb2
-rw-r--r--spec/lib/banzai/filter/asset_proxy_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/autolink_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/image_link_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/inline_observability_filter_spec.rb101
-rw-r--r--spec/lib/banzai/filter/math_filter_spec.rb17
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/design_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb28
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/reference_filter_spec.rb7
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/user_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter_array_spec.rb16
-rw-r--r--spec/lib/banzai/pipeline/description_pipeline_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/issue_parser_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/merge_request_parser_spec.rb2
-rw-r--r--spec/lib/bitbucket/collection_spec.rb2
-rw-r--r--spec/lib/bitbucket/representation/issue_spec.rb30
-rw-r--r--spec/lib/bitbucket/representation/repo_spec.rb2
-rw-r--r--spec/lib/bitbucket_server/representation/pull_request_spec.rb12
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb16
-rw-r--r--spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb18
-rw-r--r--spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb11
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb30
-rw-r--r--spec/lib/bulk_imports/pipeline/extracted_data_spec.rb14
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb115
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb14
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb5
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb17
-rw-r--r--spec/lib/container_registry/client_spec.rb27
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb147
-rw-r--r--spec/lib/expand_variables_spec.rb104
-rw-r--r--spec/lib/extracts_path_spec.rb4
-rw-r--r--spec/lib/extracts_ref/ref_extractor_spec.rb125
-rw-r--r--spec/lib/extracts_ref_spec.rb24
-rw-r--r--spec/lib/feature_spec.rb62
-rw-r--r--spec/lib/generators/batched_background_migration/batched_background_migration_generator_spec.rb14
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt4
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec.txt (renamed from spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt)0
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt1
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb56
-rw-r--r--spec/lib/generators/model/mocks/migration_file.txt3
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb71
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb15
-rw-r--r--spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb31
-rw-r--r--spec/lib/gitlab/auth_spec.rb103
-rw-r--r--spec/lib/gitlab/background_migration/backfill_finding_id_in_vulnerabilities_spec.rb133
-rw-r--r--spec/lib/gitlab/background_migration/backfill_has_remediations_of_vulnerability_reads_spec.rb136
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphans_approval_merge_request_rules2_spec.rb121
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphans_approval_project_rules2_spec.rb122
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb28
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issue_importer_spec.rb103
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issue_notes_importer_spec.rb85
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb70
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb53
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/lfs_object_importer_spec.rb82
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/lfs_objects_importer_spec.rb116
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_request_notes_importer_spec.rb68
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb51
-rw-r--r--spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb69
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb62
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb94
-rw-r--r--spec/lib/gitlab/chat_spec.rb19
-rw-r--r--spec/lib/gitlab/checks/global_file_size_check_spec.rb16
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/build/duration_parser_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/components/instance_path_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/config/external/file/component_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/config/header/input_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/context_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions/expand_vars_spec.rb90
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb409
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/yaml/loader_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/config/yaml/result_spec.rb55
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern/regular_expression_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/status/build/factory_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/status/canceled_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/created_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/factory_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/failed_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/manual_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/pending_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/pipeline/blocked_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/pipeline/delayed_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/preparing_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/running_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/scheduled_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/skipped_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/success_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/success_warning_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/variables/builder/group_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/variables/collection/item_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb25
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb11
-rw-r--r--spec/lib/gitlab/database/click_house_client_spec.rb22
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb82
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb21
-rw-r--r--spec/lib/gitlab/database/migration_helpers/swapping_spec.rb172
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb59
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb59
-rw-r--r--spec/lib/gitlab/database/migrations/milestone_mixin_spec.rb48
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb8
-rw-r--r--spec/lib/gitlab/database/migrations/swap_columns_default_spec.rb118
-rw-r--r--spec/lib/gitlab/database/migrations/swap_columns_spec.rb64
-rw-r--r--spec/lib/gitlab/database/migrations/version_spec.rb120
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb8
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb75
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb292
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb313
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb190
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb78
-rw-r--r--spec/lib/gitlab/database_importers/work_items/related_links_restrictions_importer_spec.rb10
-rw-r--r--spec/lib/gitlab/deploy_key_access_spec.rb10
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb66
-rw-r--r--spec/lib/gitlab/doctor/reset_tokens_spec.rb133
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb12
-rw-r--r--spec/lib/gitlab/email/message/build_ios_app_guide_spec.rb19
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/helper_spec.rb75
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb20
-rw-r--r--spec/lib/gitlab/email/service_desk_receiver_spec.rb35
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb37
-rw-r--r--spec/lib/gitlab/exclusive_lease_spec.rb331
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb94
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb122
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb63
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb117
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb28
-rw-r--r--spec/lib/gitlab/git_audit_event_spec.rb79
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb26
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb18
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/clients/proxy_spec.rb150
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb38
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb53
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb11
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb17
-rw-r--r--spec/lib/gitlab/graphql/pagination/array_connection_spec.rb3
-rw-r--r--spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb3
-rw-r--r--spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb6
-rw-r--r--spec/lib/gitlab/graphql/timeout_spec.rb5
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb2
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb247
-rw-r--r--spec/lib/gitlab/http_spec.rb447
-rw-r--r--spec/lib/gitlab/i18n_spec.rb14
-rw-r--r--spec/lib/gitlab/import/errors_spec.rb1
-rw-r--r--spec/lib/gitlab/import/import_failure_service_spec.rb33
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml5
-rw-r--r--spec/lib/gitlab/import_export/attributes_finder_spec.rb30
-rw-r--r--spec/lib/gitlab/import_export/base/object_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/design_repo_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb28
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/project/export_task_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/import_task_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/object_builder_spec.rb150
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb46
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/internal_events/event_definitions_spec.rb10
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb50
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb4
-rw-r--r--spec/lib/gitlab/legacy_http_spec.rb448
-rw-r--r--spec/lib/gitlab/memory/instrumentation_spec.rb4
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb15
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb19
-rw-r--r--spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb33
-rw-r--r--spec/lib/gitlab/middleware/path_traversal_check_spec.rb197
-rw-r--r--spec/lib/gitlab/observability_spec.rb204
-rw-r--r--spec/lib/gitlab/octokit/middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb102
-rw-r--r--spec/lib/gitlab/path_traversal_spec.rb7
-rw-r--r--spec/lib/gitlab/prometheus/metric_group_spec.rb48
-rw-r--r--spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb39
-rw-r--r--spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb137
-rw-r--r--spec/lib/gitlab/prometheus/queries/validate_query_spec.rb59
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb96
-rw-r--r--spec/lib/gitlab/protocol_access_spec.rb49
-rw-r--r--spec/lib/gitlab/puma/error_handler_spec.rb85
-rw-r--r--spec/lib/gitlab/rack_attack/request_spec.rb41
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb49
-rw-r--r--spec/lib/gitlab/redis/queues_metadata_spec.rb35
-rw-r--r--spec/lib/gitlab/redis/workhorse_spec.rb38
-rw-r--r--spec/lib/gitlab/regex_spec.rb27
-rw-r--r--spec/lib/gitlab/saas_spec.rb5
-rw-r--r--spec/lib/gitlab/search_results_spec.rb2
-rw-r--r--spec/lib/gitlab/shell_spec.rb81
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb65
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb135
-rw-r--r--spec/lib/gitlab/slash_commands/run_spec.rb10
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb20
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb100
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb10
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/container_registry_db_enabled_metric_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_csv_imports_metric_spec.rb32
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_jira_imports_metric_spec.rb32
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_packages_metric_spec.rb33
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_projects_metric_spec.rb33
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb8
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb40
-rw-r--r--spec/lib/gitlab/usage/metrics/query_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb56
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb13
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb61
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb84
-rw-r--r--spec/lib/product_analytics/settings_spec.rb101
-rw-r--r--spec/lib/release_highlights/validator_spec.rb2
-rw-r--r--spec/lib/sidebars/groups/menus/observability_menu_spec.rb93
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb22
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_panel_spec.rb1
-rw-r--r--spec/lib/sidebars/organizations/menus/settings_menu_spec.rb52
-rw-r--r--spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb3
-rw-r--r--spec/lib/sidebars/projects/menus/deployments_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb12
-rw-r--r--spec/mailers/emails/in_product_marketing_spec.rb51
-rw-r--r--spec/mailers/emails/profile_spec.rb12
-rw-r--r--spec/migrations/20230721095222_delete_orphans_scan_finding_license_scanning_approval_rules2_spec.rb22
-rw-r--r--spec/migrations/20230905064317_swap_columns_for_ci_pipeline_variables_pipeline_id_bigint_spec.rb13
-rw-r--r--spec/migrations/20230908033511_swap_columns_for_ci_pipeline_chat_data_pipeline_id_bigint_spec.rb72
-rw-r--r--spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb26
-rw-r--r--spec/migrations/20230913071219_delete_pages_domain_with_reserved_domains_spec.rb30
-rw-r--r--spec/migrations/20230920154302_change_epics_hierarchy_restrictions_spec.rb64
-rw-r--r--spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb26
-rw-r--r--spec/migrations/20231003142706_lower_project_build_timeout_to_respect_max_validation_spec.rb40
-rw-r--r--spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb26
-rw-r--r--spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb14
-rw-r--r--spec/migrations/add_projects_emails_enabled_column_data_spec.rb14
-rw-r--r--spec/migrations/add_work_items_related_link_restrictions_spec.rb37
-rw-r--r--spec/models/abuse/reports/user_mention_spec.rb12
-rw-r--r--spec/models/abuse_report_spec.rb6
-rw-r--r--spec/models/application_setting_spec.rb53
-rw-r--r--spec/models/approval_spec.rb12
-rw-r--r--spec/models/blob_viewer/gitlab_ci_yml_spec.rb3
-rw-r--r--spec/models/bulk_import_spec.rb8
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb21
-rw-r--r--spec/models/chat_name_spec.rb18
-rw-r--r--spec/models/ci/build_need_spec.rb2
-rw-r--r--spec/models/ci/catalog/components_project_spec.rb104
-rw-r--r--spec/models/ci/catalog/listing_spec.rb43
-rw-r--r--spec/models/ci/catalog/resource_spec.rb32
-rw-r--r--spec/models/ci/pipeline_spec.rb56
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/ref_spec.rb16
-rw-r--r--spec/models/ci/runner_spec.rb14
-rw-r--r--spec/models/ci/unlock_pipeline_request_spec.rb113
-rw-r--r--spec/models/clusters/agent_token_spec.rb9
-rw-r--r--spec/models/clusters/cluster_spec.rb1
-rw-r--r--spec/models/concerns/integrations/enable_ssl_verification_spec.rb12
-rw-r--r--spec/models/concerns/integrations/has_web_hook_spec.rb4
-rw-r--r--spec/models/concerns/noteable_spec.rb20
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb138
-rw-r--r--spec/models/concerns/reset_on_column_errors_spec.rb243
-rw-r--r--spec/models/concerns/reset_on_union_error_spec.rb132
-rw-r--r--spec/models/concerns/routable_spec.rb50
-rw-r--r--spec/models/container_expiration_policy_spec.rb3
-rw-r--r--spec/models/container_registry/protection/rule_spec.rb54
-rw-r--r--spec/models/dependency_proxy/image_ttl_group_policy_spec.rb3
-rw-r--r--spec/models/discussion_note_spec.rb8
-rw-r--r--spec/models/environment_spec.rb36
-rw-r--r--spec/models/group_spec.rb407
-rw-r--r--spec/models/integration_spec.rb41
-rw-r--r--spec/models/integrations/apple_app_store_spec.rb3
-rw-r--r--spec/models/integrations/asana_spec.rb92
-rw-r--r--spec/models/integrations/bamboo_spec.rb17
-rw-r--r--spec/models/integrations/chat_message/alert_message_spec.rb6
-rw-r--r--spec/models/integrations/chat_message/deployment_message_spec.rb65
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb6
-rw-r--r--spec/models/integrations/chat_message/pipeline_message_spec.rb27
-rw-r--r--spec/models/integrations/chat_message/push_message_spec.rb6
-rw-r--r--spec/models/integrations/discord_spec.rb6
-rw-r--r--spec/models/integrations/google_play_spec.rb3
-rw-r--r--spec/models/integrations/hangouts_chat_spec.rb2
-rw-r--r--spec/models/integrations/integration_list_spec.rb22
-rw-r--r--spec/models/integrations/jira_spec.rb10
-rw-r--r--spec/models/integrations/pivotaltracker_spec.rb12
-rw-r--r--spec/models/integrations/pushover_spec.rb8
-rw-r--r--spec/models/integrations/slack_spec.rb2
-rw-r--r--spec/models/integrations/telegram_spec.rb8
-rw-r--r--spec/models/issue_link_spec.rb4
-rw-r--r--spec/models/issue_spec.rb158
-rw-r--r--spec/models/lfs_download_object_spec.rb13
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb28
-rw-r--r--spec/models/member_spec.rb12
-rw-r--r--spec/models/members/last_group_owner_assigner_spec.rb2
-rw-r--r--spec/models/members/member_task_spec.rb124
-rw-r--r--spec/models/merge_request_diff_spec.rb34
-rw-r--r--spec/models/merge_request_reviewer_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb119
-rw-r--r--spec/models/ml/model_spec.rb43
-rw-r--r--spec/models/namespace/package_setting_spec.rb10
-rw-r--r--spec/models/namespace_setting_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb172
-rw-r--r--spec/models/note_spec.rb65
-rw-r--r--spec/models/packages/build_info_spec.rb2
-rw-r--r--spec/models/packages/protection/rule_spec.rb234
-rw-r--r--spec/models/pages/lookup_path_spec.rb57
-rw-r--r--spec/models/pages_deployment_spec.rb38
-rw-r--r--spec/models/pages_domain_spec.rb2
-rw-r--r--spec/models/preloaders/project_root_ancestor_preloader_spec.rb90
-rw-r--r--spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb49
-rw-r--r--spec/models/project_authorization_spec.rb26
-rw-r--r--spec/models/project_pages_metadatum_spec.rb21
-rw-r--r--spec/models/project_setting_spec.rb3
-rw-r--r--spec/models/project_spec.rb185
-rw-r--r--spec/models/project_team_spec.rb13
-rw-r--r--spec/models/repository_spec.rb100
-rw-r--r--spec/models/resource_state_event_spec.rb4
-rw-r--r--spec/models/snippet_spec.rb38
-rw-r--r--spec/models/todo_spec.rb13
-rw-r--r--spec/models/user_preference_spec.rb3
-rw-r--r--spec/models/user_spec.rb28
-rw-r--r--spec/models/users/credit_card_validation_spec.rb155
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb60
-rw-r--r--spec/models/vs_code/settings/vs_code_setting_spec.rb29
-rw-r--r--spec/models/wiki_page_spec.rb16
-rw-r--r--spec/models/work_item_spec.rb25
-rw-r--r--spec/models/work_items/parent_link_spec.rb26
-rw-r--r--spec/models/work_items/related_link_restriction_spec.rb27
-rw-r--r--spec/models/work_items/related_work_item_link_spec.rb73
-rw-r--r--spec/models/work_items/type_spec.rb3
-rw-r--r--spec/policies/achievements/user_achievement_policy_spec.rb23
-rw-r--r--spec/policies/group_policy_spec.rb69
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb2
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb51
-rw-r--r--spec/policies/project_member_policy_spec.rb6
-rw-r--r--spec/policies/project_policy_spec.rb1
-rw-r--r--spec/policies/project_snippet_policy_spec.rb56
-rw-r--r--spec/policies/work_item_policy_spec.rb88
-rw-r--r--spec/presenters/blob_presenter_spec.rb50
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb126
-rw-r--r--spec/presenters/commit_presenter_spec.rb2
-rw-r--r--spec/presenters/issue_presenter_spec.rb4
-rw-r--r--spec/presenters/member_presenter_spec.rb35
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb4
-rw-r--r--spec/presenters/ml/candidate_details_presenter_spec.rb12
-rw-r--r--spec/presenters/ml/model_presenter_spec.rb6
-rw-r--r--spec/presenters/tree_entry_presenter_spec.rb15
-rw-r--r--spec/presenters/vs_code/settings/vs_code_manifest_presenter_spec.rb35
-rw-r--r--spec/presenters/vs_code/settings/vs_code_setting_presenter_spec.rb43
-rw-r--r--spec/rake_helper.rb14
-rw-r--r--spec/requests/acme_challenges_controller_spec.rb9
-rw-r--r--spec/requests/api/admin/instance_clusters_spec.rb2
-rw-r--r--spec/requests/api/bulk_imports_spec.rb53
-rw-r--r--spec/requests/api/ci/jobs_spec.rb2
-rw-r--r--spec/requests/api/ci/pipeline_schedules_spec.rb40
-rw-r--r--spec/requests/api/commits_spec.rb18
-rw-r--r--spec/requests/api/composer_packages_spec.rb587
-rw-r--r--spec/requests/api/graphql/achievements/user_achievements_query_spec.rb27
-rw-r--r--spec/requests/api/graphql/ci/ci_cd_setting_spec.rb7
-rw-r--r--spec/requests/api/graphql/ci/config_spec.rb105
-rw-r--r--spec/requests/api/graphql/ci/job_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/pipeline_schedules_spec.rb25
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb1
-rw-r--r--spec/requests/api/graphql/merge_request/merge_request_spec.rb38
-rw-r--r--spec/requests/api/graphql/mutations/achievements/update_user_achievement_priorities_spec.rb94
-rw-r--r--spec/requests/api/graphql/mutations/ci/job/retry_spec.rb28
-rw-r--r--spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb11
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb11
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/packages/protection/rule/create_spec.rb165
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb34
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb67
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/issue/design_collection/version_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_request/pipelines_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb34
-rw-r--r--spec/requests/api/graphql/project/packages_protection_rules_spec.rb84
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb13
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb28
-rw-r--r--spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/project_statistics_spec.rb8
-rw-r--r--spec/requests/api/graphql/project/terraform/states_spec.rb47
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb46
-rw-r--r--spec/requests/api/graphql_spec.rb15
-rw-r--r--spec/requests/api/group_clusters_spec.rb2
-rw-r--r--spec/requests/api/group_export_spec.rb26
-rw-r--r--spec/requests/api/groups_spec.rb14
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb32
-rw-r--r--spec/requests/api/import_github_spec.rb13
-rw-r--r--spec/requests/api/integrations_spec.rb25
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb9
-rw-r--r--spec/requests/api/internal/pages_spec.rb9
-rw-r--r--spec/requests/api/invitations_spec.rb182
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb8
-rw-r--r--spec/requests/api/lint_spec.rb99
-rw-r--r--spec/requests/api/maven_packages_spec.rb30
-rw-r--r--spec/requests/api/members_spec.rb78
-rw-r--r--spec/requests/api/merge_request_diffs_spec.rb9
-rw-r--r--spec/requests/api/merge_requests_spec.rb20
-rw-r--r--spec/requests/api/ml/mlflow/runs_spec.rb6
-rw-r--r--spec/requests/api/namespaces_spec.rb4
-rw-r--r--spec/requests/api/npm_group_packages_spec.rb10
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb107
-rw-r--r--spec/requests/api/oauth_tokens_spec.rb31
-rw-r--r--spec/requests/api/project_attributes.yml12
-rw-r--r--spec/requests/api/project_clusters_spec.rb2
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb3
-rw-r--r--spec/requests/api/project_export_spec.rb44
-rw-r--r--spec/requests/api/projects_spec.rb25
-rw-r--r--spec/requests/api/repositories_spec.rb12
-rw-r--r--spec/requests/api/settings_spec.rb16
-rw-r--r--spec/requests/api/usage_data_queries_spec.rb5
-rw-r--r--spec/requests/api/usage_data_spec.rb10
-rw-r--r--spec/requests/api/users_spec.rb183
-rw-r--r--spec/requests/api/vs_code/settings/vs_code_settings_sync_spec.rb141
-rw-r--r--spec/requests/application_controller_spec.rb15
-rw-r--r--spec/requests/chaos_controller_spec.rb14
-rw-r--r--spec/requests/concerns/planning_hierarchy_spec.rb3
-rw-r--r--spec/requests/groups/custom_emoji_controller_spec.rb27
-rw-r--r--spec/requests/groups/observability_controller_spec.rb99
-rw-r--r--spec/requests/groups/settings/access_tokens_controller_spec.rb18
-rw-r--r--spec/requests/health_controller_spec.rb4
-rw-r--r--spec/requests/jwt_controller_spec.rb36
-rw-r--r--spec/requests/lfs_http_spec.rb13
-rw-r--r--spec/requests/metrics_controller_spec.rb9
-rw-r--r--spec/requests/oauth/authorizations_controller_spec.rb4
-rw-r--r--spec/requests/oauth/tokens_controller_spec.rb152
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb50
-rw-r--r--spec/requests/organizations/settings_controller_spec.rb54
-rw-r--r--spec/requests/projects/issue_links_controller_spec.rb2
-rw-r--r--spec/requests/projects/issues_controller_spec.rb23
-rw-r--r--spec/requests/projects/merge_requests/creations_spec.rb12
-rw-r--r--spec/requests/projects/merge_requests_controller_spec.rb69
-rw-r--r--spec/requests/projects/ml/models_controller_spec.rb74
-rw-r--r--spec/requests/projects/settings/access_tokens_controller_spec.rb18
-rw-r--r--spec/requests/projects/work_items_spec.rb4
-rw-r--r--spec/requests/registrations_controller_spec.rb6
-rw-r--r--spec/requests/sessions_spec.rb4
-rw-r--r--spec/requests/users/namespace_visits_controller_spec.rb12
-rw-r--r--spec/requests/users_controller_spec.rb2
-rw-r--r--spec/routing/environments_spec.rb6
-rw-r--r--spec/routing/group_routing_spec.rb12
-rw-r--r--spec/routing/organizations/settings_controller_routing_spec.rb12
-rw-r--r--spec/routing/project_routing_spec.rb197
-rw-r--r--spec/routing/routing_spec.rb8
-rw-r--r--spec/rubocop/batched_background_migrations_spec.rb43
-rw-r--r--spec/rubocop/check_graceful_task_spec.rb11
-rw-r--r--spec/rubocop/cop/background_migration/feature_category_spec.rb6
-rw-r--r--spec/rubocop/cop/experiments_test_coverage_spec.rb2
-rw-r--r--spec/rubocop/cop/gemfile/missing_feature_category_spec.rb63
-rw-r--r--spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb45
-rw-r--r--spec/rubocop/cop/gitlab/feature_available_usage_spec.rb4
-rw-r--r--spec/rubocop/cop/migration/prevent_index_creation_spec.rb24
-rw-r--r--spec/rubocop/cop/migration/unfinished_dependencies_spec.rb118
-rw-r--r--spec/rubocop/cop/qa/fabricate_usage_spec.rb35
-rw-r--r--spec/rubocop/cop/rspec/env_mocking_spec.rb12
-rw-r--r--spec/rubocop/cop/rspec/feature_category_spec.rb (renamed from spec/rubocop/cop/rspec/invalid_feature_category_spec.rb)38
-rw-r--r--spec/rubocop/cop/rspec/missing_feature_category_spec.rb31
-rw-r--r--spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb8
-rw-r--r--spec/rubocop/feature_categories_spec.rb102
-rw-r--r--spec/rubocop/formatter/graceful_formatter_spec.rb27
-rw-r--r--spec/rubocop_spec_helper.rb2
-rw-r--r--spec/scripts/changed-feature-flags_spec.rb168
-rw-r--r--spec/scripts/generate_rspec_pipeline_spec.rb30
-rw-r--r--spec/scripts/pipeline/average_reports_spec.rb140
-rw-r--r--spec/scripts/pipeline/create_test_failure_issues_spec.rb188
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb1
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb31
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb2
-rw-r--r--spec/serializers/issue_board_entity_spec.rb2
-rw-r--r--spec/serializers/issue_entity_spec.rb2
-rw-r--r--spec/serializers/linked_project_issue_entity_spec.rb2
-rw-r--r--spec/serializers/project_import_entity_spec.rb10
-rw-r--r--spec/services/achievements/update_user_achievement_priorities_service_spec.rb73
-rw-r--r--spec/services/admin/abuse_reports/moderate_user_service_spec.rb37
-rw-r--r--spec/services/audit_events/build_service_spec.rb8
-rw-r--r--spec/services/auto_merge/base_service_spec.rb41
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/process_service_spec.rb325
-rw-r--r--spec/services/bulk_imports/relation_batch_export_service_spec.rb14
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb14
-rw-r--r--spec/services/chat_names/find_user_service_spec.rb26
-rw-r--r--spec/services/ci/catalog/resources/validate_service_spec.rb (renamed from spec/services/ci/catalog/validate_resource_service_spec.rb)2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb26
-rw-r--r--spec/services/ci/delete_objects_service_spec.rb4
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb224
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb118
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb8
-rw-r--r--spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb109
-rw-r--r--spec/services/ci/retry_job_service_spec.rb43
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb4
-rw-r--r--spec/services/ci/unlock_pipeline_service_spec.rb160
-rw-r--r--spec/services/deployments/create_service_spec.rb1
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb4
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb2
-rw-r--r--spec/services/git/branch_push_service_spec.rb43
-rw-r--r--spec/services/groups/update_service_spec.rb24
-rw-r--r--spec/services/import/github_service_spec.rb395
-rw-r--r--spec/services/import/validate_remote_git_endpoint_service_spec.rb43
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb8
-rw-r--r--spec/services/issues/update_service_spec.rb11
-rw-r--r--spec/services/jira_connect/sync_service_spec.rb18
-rw-r--r--spec/services/members/create_service_spec.rb118
-rw-r--r--spec/services/members/invite_service_spec.rb28
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb33
-rw-r--r--spec/services/merge_requests/create_ref_service_spec.rb4
-rw-r--r--spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb3
-rw-r--r--spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb46
-rw-r--r--spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb23
-rw-r--r--spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb60
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb153
-rw-r--r--spec/services/merge_requests/update_service_spec.rb44
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb2
-rw-r--r--spec/services/ml/experiment_tracking/experiment_repository_spec.rb2
-rw-r--r--spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb2
-rw-r--r--spec/services/notes/create_service_spec.rb2
-rw-r--r--spec/services/notes/destroy_service_spec.rb4
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb79
-rw-r--r--spec/services/notes/update_service_spec.rb2
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb28
-rw-r--r--spec/services/packages/nuget/extract_metadata_file_service_spec.rb44
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb24
-rw-r--r--spec/services/packages/nuget/odata_package_entry_service_spec.rb33
-rw-r--r--spec/services/packages/nuget/process_package_file_service_spec.rb83
-rw-r--r--spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb103
-rw-r--r--spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb23
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb13
-rw-r--r--spec/services/packages/protection/create_rule_service_spec.rb139
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb137
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb118
-rw-r--r--spec/services/pages/zip_directory_service_spec.rb280
-rw-r--r--spec/services/projects/after_rename_service_spec.rb207
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb152
-rw-r--r--spec/services/projects/hashed_storage/migration_service_spec.rb44
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb106
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb152
-rw-r--r--spec/services/projects/hashed_storage/rollback_service_spec.rb78
-rw-r--r--spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb136
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb2
-rw-r--r--spec/services/projects/participants_service_spec.rb26
-rw-r--r--spec/services/projects/record_target_platforms_service_spec.rb46
-rw-r--r--spec/services/projects/transfer_service_spec.rb22
-rw-r--r--spec/services/projects/update_pages_service_spec.rb58
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb12
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb118
-rw-r--r--spec/services/releases/destroy_service_spec.rb6
-rw-r--r--spec/services/repositories/replicate_service_spec.rb45
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb2
-rw-r--r--spec/services/snippets/destroy_service_spec.rb2
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb4
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb12
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb31
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb6
-rw-r--r--spec/services/tasks_to_be_done/base_service_spec.rb69
-rw-r--r--spec/services/todo_service_spec.rb80
-rw-r--r--spec/services/update_container_registry_info_service_spec.rb11
-rw-r--r--spec/services/users/auto_ban_service_spec.rb56
-rw-r--r--spec/services/users/in_product_marketing_email_records_spec.rb13
-rw-r--r--spec/services/users/signup_service_spec.rb75
-rw-r--r--spec/services/users/trust_service_spec.rb (renamed from spec/services/users/allow_possible_spam_service_spec.rb)4
-rw-r--r--spec/services/users/untrust_service_spec.rb (renamed from spec/services/users/disallow_possible_spam_service_spec.rb)8
-rw-r--r--spec/services/verify_pages_domain_service_spec.rb16
-rw-r--r--spec/services/vs_code/settings/create_or_update_service_spec.rb48
-rw-r--r--spec/services/web_hook_service_spec.rb17
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb2
-rw-r--r--spec/services/work_items/related_work_item_links/create_service_spec.rb3
-rw-r--r--spec/services/work_items/update_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/labels_service/update_service_spec.rb10
-rw-r--r--spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb10
-rw-r--r--spec/sidekiq/cron/job_gem_dependency_spec.rb9
-rw-r--r--spec/spec_helper.rb12
-rw-r--r--spec/support/ability_check_todo.yml2
-rw-r--r--spec/support/capybara.rb9
-rw-r--r--spec/support/database/prevent_cross_joins.rb2
-rw-r--r--spec/support/db_cleaner.rb5
-rw-r--r--spec/support/finder_collection_allowlist.yml1
-rw-r--r--spec/support/helpers/content_editor_helpers.rb8
-rw-r--r--spec/support/helpers/content_security_policy_helpers.rb4
-rw-r--r--spec/support/helpers/dns_helpers.rb16
-rw-r--r--spec/support/helpers/fake_migration_classes.rb13
-rw-r--r--spec/support/helpers/features/dom_helpers.rb8
-rw-r--r--spec/support/helpers/graphql_helpers.rb5
-rw-r--r--spec/support/helpers/integrations/test_helpers.rb11
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb2
-rw-r--r--spec/support/helpers/listbox_helpers.rb4
-rw-r--r--spec/support/helpers/migrations_helpers/work_item_types_helper.rb4
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb13
-rw-r--r--spec/support/helpers/prometheus/metric_builders.rb29
-rw-r--r--spec/support/helpers/stub_configuration.rb5
-rw-r--r--spec/support/helpers/stub_feature_flags.rb9
-rw-r--r--spec/support/helpers/stub_saas_features.rb20
-rw-r--r--spec/support/helpers/test_env.rb3
-rw-r--r--spec/support/helpers/unlock_pipelines_helpers.rb23
-rw-r--r--spec/support/helpers/usage_data_helpers.rb1
-rw-r--r--spec/support/matchers/pushed_licensed_features_matcher.rb29
-rw-r--r--spec/support/protected_branch_helpers.rb2
-rw-r--r--spec/support/rake.rb20
-rw-r--r--spec/support/rspec.rb22
-rw-r--r--spec/support/rspec_order_todo.yml49
-rw-r--r--spec/support/shared_contexts/bulk_imports_requests_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb67
-rw-r--r--spec/support/shared_contexts/merge_request_create_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/merge_request_edit_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/policies/project_policy_table_shared_context.rb25
-rw-r--r--spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb2
-rw-r--r--spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb32
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb7
-rw-r--r--spec/support/shared_examples/ci/deployable_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/internal_event_tracking_examples.rb6
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb6
-rw-r--r--spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb230
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb106
-rw-r--r--spec/support/shared_examples/graphql/design_fields_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb57
-rw-r--r--spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/integrations/integration_settings_form.rb6
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb95
-rw-r--r--spec/support/shared_examples/metrics_instrumentation_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/migrations/swap_conversion_columns_shared_examples.rb117
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/models/concerns/protected_ref_access_examples.rb45
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/models/issuable_link_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb53
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb8
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb43
-rw-r--r--spec/support/shared_examples/observability/embed_observabilities_examples.rb61
-rw-r--r--spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb161
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb64
-rw-r--r--spec/support/shared_examples/ref_extraction_shared_examples.rb165
-rw-r--r--spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb102
-rw-r--r--spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/requests/organizations_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb55
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/services/protected_branches_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb15
-rw-r--r--spec/support/shared_examples/work_item_related_link_restrictions_importer.rb39
-rw-r--r--spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb17
-rw-r--r--spec/support_specs/helpers/stub_saas_features_spec.rb50
-rw-r--r--spec/tasks/admin_mode_spec.rb2
-rw-r--r--spec/tasks/cache_rake_spec.rb2
-rw-r--r--spec/tasks/config_lint_rake_spec.rb32
-rw-r--r--spec/tasks/dev_rake_spec.rb4
-rw-r--r--spec/tasks/gettext_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/artifacts/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/artifacts/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb2
-rw-r--r--spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb2
-rw-r--r--spec/tasks/gitlab/background_migrations_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/cleanup_rake_spec.rb191
-rw-r--r--spec/tasks/gitlab/container_registry_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/db/validate_config_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb5
-rw-r--r--spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/doctor/secrets_rake_spec.rb56
-rw-r--r--spec/tasks/gitlab/external_diffs_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/feature_categories_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/git_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/incoming_email_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/ldap_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/lfs/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/lfs/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/packages/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/password_rake_spec.rb12
-rw-r--r--spec/tasks/gitlab/praefect_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/seed/group_seed_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/service_desk_email_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/setup_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/shell_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/sidekiq_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/smtp_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/snippets_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb284
-rw-r--r--spec/tasks/gitlab/terraform/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/update_templates_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/uploads/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/uploads/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/usage_data_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/user_management_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/workhorse_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/x509/update_rake_spec.rb2
-rw-r--r--spec/tasks/import_rake_spec.rb2
-rw-r--r--spec/tasks/migrate/schema_check_rake_spec.rb4
-rw-r--r--spec/tasks/rubocop_rake_spec.rb3
-rw-r--r--spec/tasks/tokens_rake_spec.rb2
-rw-r--r--spec/tooling/danger/config_files_spec.rb4
-rw-r--r--spec/tooling/danger/feature_flag_spec.rb22
-rw-r--r--spec/tooling/danger/rubocop_inline_disable_suggestion_spec.rb111
-rw-r--r--spec/tooling/danger/saas_feature_spec.rb138
-rw-r--r--spec/tooling/danger/sidekiq_args_spec.rb13
-rw-r--r--spec/tooling/lib/tooling/job_metrics_spec.rb721
-rw-r--r--spec/uploaders/packages/nuget/symbol_uploader_spec.rb3
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb5
-rw-r--r--spec/views/ci/status/_badge.html.haml_spec.rb14
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb40
-rw-r--r--spec/views/groups/observability/observability.html.haml_spec.rb18
-rw-r--r--spec/views/groups/show.html.haml_spec.rb15
-rw-r--r--spec/views/layouts/terms.html.haml_spec.rb2
-rw-r--r--spec/views/notify/user_deactivated_email.html.haml_spec.rb12
-rw-r--r--spec/views/notify/user_deactivated_email.text.erb_spec.rb12
-rw-r--r--spec/views/projects/commits/_commit.html.haml_spec.rb6
-rw-r--r--spec/views/projects/merge_requests/edit.html.haml_spec.rb65
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb8
-rw-r--r--spec/views/projects/tree/show.html.haml_spec.rb22
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb20
-rw-r--r--spec/views/shared/groups/_dropdown.html.haml_spec.rb28
-rw-r--r--spec/workers/auto_devops/disable_worker_spec.rb1
-rw-r--r--spec/workers/background_migration/ci_database_worker_spec.rb4
-rw-r--r--spec/workers/background_migration_worker_spec.rb4
-rw-r--r--spec/workers/bulk_import_worker_spec.rb306
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb226
-rw-r--r--spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb39
-rw-r--r--spec/workers/bulk_imports/pipeline_batch_worker_spec.rb1
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb103
-rw-r--r--spec/workers/ci/initial_pipeline_process_worker_spec.rb50
-rw-r--r--spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb78
-rw-r--r--spec/workers/ci/refs/unlock_previous_pipelines_worker_spec.rb46
-rw-r--r--spec/workers/ci/schedule_unlock_pipelines_in_queue_cron_worker_spec.rb15
-rw-r--r--spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb136
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb67
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb122
-rw-r--r--spec/workers/delete_container_repository_worker_spec.rb19
-rw-r--r--spec/workers/environments/stop_job_failed_worker_spec.rb111
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb12
-rw-r--r--spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb46
-rw-r--r--spec/workers/gitlab/bitbucket_import/import_issue_notes_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/bitbucket_import/import_issue_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/bitbucket_import/import_lfs_object_worker_spec.rb14
-rw-r--r--spec/workers/gitlab/bitbucket_import/import_pull_request_notes_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_issues_notes_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_issues_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_lfs_objects_worker_spec.rb28
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_notes_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb20
-rw-r--r--spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb20
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb24
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb24
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb34
-rw-r--r--spec/workers/gitlab_shell_worker_spec.rb31
-rw-r--r--spec/workers/hashed_storage/migrator_worker_spec.rb30
-rw-r--r--spec/workers/hashed_storage/project_migrate_worker_spec.rb85
-rw-r--r--spec/workers/hashed_storage/project_rollback_worker_spec.rb50
-rw-r--r--spec/workers/hashed_storage/rollbacker_worker_spec.rb30
-rw-r--r--spec/workers/integrations/execute_worker_spec.rb1
-rw-r--r--spec/workers/integrations/slack_event_worker_spec.rb4
-rw-r--r--spec/workers/issuable/related_links_create_worker_spec.rb110
-rw-r--r--spec/workers/merge_worker_spec.rb57
-rw-r--r--spec/workers/pages/deactivated_deployments_delete_cron_worker_spec.rb15
-rw-r--r--spec/workers/partition_creation_worker_spec.rb2
-rw-r--r--spec/workers/projects/after_import_worker_spec.rb2
-rw-r--r--spec/workers/projects/delete_branch_worker_spec.rb1
-rw-r--r--spec/workers/projects/record_target_platforms_worker_spec.rb2
-rw-r--r--spec/workers/tasks_to_be_done/create_worker_spec.rb20
-rw-r--r--spec/workers/web_hook_worker_spec.rb1
1535 files changed, 33323 insertions, 20754 deletions
diff --git a/spec/benchmarks/banzai_benchmark.rb b/spec/benchmarks/banzai_benchmark.rb
index 45f45bcc8dd..988577691f2 100644
--- a/spec/benchmarks/banzai_benchmark.rb
+++ b/spec/benchmarks/banzai_benchmark.rb
@@ -52,7 +52,7 @@ RSpec.describe 'GitLab Markdown Benchmark', :aggregate_failures, feature_categor
stub_application_setting(asset_proxy_enabled: true)
stub_application_setting(asset_proxy_secret_key: 'shared-secret')
stub_application_setting(asset_proxy_url: 'https://assets.example.com')
- stub_application_setting(asset_proxy_whitelist: %w(gitlab.com *.mydomain.com))
+ stub_application_setting(asset_proxy_whitelist: %w[gitlab.com *.mydomain.com])
stub_application_setting(plantuml_enabled: true, plantuml_url: 'http://localhost:8080')
stub_application_setting(kroki_enabled: true, kroki_url: 'http://localhost:8000')
diff --git a/spec/commands/metrics_server/metrics_server_spec.rb b/spec/commands/metrics_server/metrics_server_spec.rb
index ee07602016f..52f74ad4a9e 100644
--- a/spec/commands/metrics_server/metrics_server_spec.rb
+++ b/spec/commands/metrics_server/metrics_server_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake_helper'
require_relative '../../../metrics_server/metrics_server'
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 09e40d8f91a..d5fa88e72a7 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -63,11 +63,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
it 'allows the special * selector' do
- worker_queues = %w(foo bar baz)
+ worker_queues = %w[foo bar baz]
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(Gitlab::SidekiqCluster)
.to receive(:start).with([worker_queues], default_options).and_return([])
- cli.run(%w(*))
+ cli.run(%w[*])
end
it 'raises an error when the arguments contain newlines' do
@@ -97,31 +97,31 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with([['baz'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
- cli.run(%w(foo -n))
+ cli.run(%w[foo -n])
end
end
context 'with --max-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a max concurrency' do
- expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
- expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
+ expected_queues = [%w[foo bar baz], %w[solo]].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(max_concurrency: 2))
.and_return([])
- cli.run(%w(foo,bar,baz solo -m 2))
+ cli.run(%w[foo,bar,baz solo -m 2])
end
end
context 'with --min-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a min concurrency' do
- expected_queues = [%w(foo bar baz), %w(solo)].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
- expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
+ expected_queues = [%w[foo bar baz], %w[solo]].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
+ expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(min_concurrency: 2))
.and_return([])
- cli.run(%w(foo,bar,baz solo --min-concurrency 2))
+ cli.run(%w[foo,bar,baz solo --min-concurrency 2])
end
end
@@ -131,7 +131,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout: 10))
.and_return([])
- cli.run(%w(foo --timeout 10))
+ cli.run(%w[foo --timeout 10])
end
it 'when not given', 'starts Sidekiq workers with default timeout' do
@@ -140,13 +140,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
.and_return([])
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
context 'with --list-queues flag' do
it 'errors when given --list-queues and --dryrun' do
- expect { cli.run(%w(foo --list-queues --dryrun)) }.to raise_error(described_class::CommandError)
+ expect { cli.run(%w[foo --list-queues --dryrun]) }.to raise_error(described_class::CommandError)
end
it 'prints out a list of queues in alphabetical order' do
@@ -163,7 +163,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(cli).to receive(:puts).with([expected_queues])
- cli.run(%w(--queue-selector feature_category=epics --list-queues))
+ cli.run(%w[--queue-selector feature_category=epics --list-queues])
end
end
@@ -175,7 +175,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
described_class::DEFAULT_QUEUES], default_options)
.and_return([])
- cli.run(%w(cronjob))
+ cli.run(%w[cronjob])
end
end
@@ -184,33 +184,33 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
{
'memory-bound queues' => {
query: 'resource_boundary=memory',
- included_queues: %w(project_export),
- excluded_queues: %w(merge)
+ included_queues: %w[project_export],
+ excluded_queues: %w[merge]
},
'memory- or CPU-bound queues' => {
query: 'resource_boundary=memory,cpu',
- included_queues: %w(auto_merge:auto_merge_process project_export),
- excluded_queues: %w(merge)
+ included_queues: %w[auto_merge:auto_merge_process project_export],
+ excluded_queues: %w[merge]
},
'high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high',
- included_queues: %w(pipeline_default:ci_drop_pipeline),
- excluded_queues: %w(merge)
+ included_queues: %w[pipeline_default:ci_drop_pipeline],
+ excluded_queues: %w[merge]
},
'CPU-bound high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
- included_queues: %w(pipeline_default:ci_create_downstream_pipeline),
- excluded_queues: %w(pipeline_default:ci_drop_pipeline merge)
+ included_queues: %w[pipeline_default:ci_create_downstream_pipeline],
+ excluded_queues: %w[pipeline_default:ci_drop_pipeline merge]
},
'CPU-bound high urgency non-CI queues' => {
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
- included_queues: %w(new_issue),
- excluded_queues: %w(pipeline_default:ci_create_downstream_pipeline)
+ included_queues: %w[new_issue],
+ excluded_queues: %w[pipeline_default:ci_create_downstream_pipeline]
},
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
- included_queues: %w(pipeline_default:ci_drop_pipeline merge),
- excluded_queues: %w()
+ included_queues: %w[pipeline_default:ci_drop_pipeline merge],
+ excluded_queues: %w[]
}
}
end
@@ -226,7 +226,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
[]
end
- cli.run(%W(--queue-selector #{query}))
+ cli.run(%W[--queue-selector #{query}])
end
it 'works when negated' do
@@ -239,7 +239,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
[]
end
- cli.run(%W(--negate --queue-selector #{query}))
+ cli.run(%W[--negate --queue-selector #{query}])
end
end
@@ -264,11 +264,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with(expected_workers, default_options)
.and_return([])
- cli.run(%w(--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
+ cli.run(%w[--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers])
end
it 'allows the special * selector' do
- worker_queues = %w(foo bar baz)
+ worker_queues = %w[foo bar baz]
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
@@ -276,20 +276,20 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(Gitlab::SidekiqCluster)
.to receive(:start).with([worker_queues], default_options).and_return([])
- cli.run(%w(--queue-selector *))
+ cli.run(%w[--queue-selector *])
end
it 'errors when the selector matches no queues' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
- expect { cli.run(%w(--queue-selector has_external_dependencies=true&has_external_dependencies=false)) }
+ expect { cli.run(%w[--queue-selector has_external_dependencies=true&has_external_dependencies=false]) }
.to raise_error(described_class::CommandError)
end
it 'errors on an invalid query multiple queue groups correctly' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
- expect { cli.run(%w(--queue-selector unknown_field=chatops)) }
+ expect { cli.run(%w[--queue-selector unknown_field=chatops]) }
.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
end
end
@@ -304,7 +304,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with([['foo']], default_options)
.and_return([])
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -316,7 +316,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it "does not throw an error" do
allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
- expect { cli.run(%w(foo)) }.not_to raise_error
+ expect { cli.run(%w[foo]) }.not_to raise_error
end
it "starts Sidekiq workers with given queues, and additional default and mailers queues (DEFAULT_QUEUES)" do
@@ -324,7 +324,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
end
@@ -351,7 +351,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(metrics_cleanup_service).to receive(:execute).ordered
expect(Gitlab::SidekiqCluster).to receive(:start).ordered.and_return([])
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
context 'when sidekiq_exporter is not set up' do
@@ -362,7 +362,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:start_for_sidekiq)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -374,11 +374,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:start_for_sidekiq)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
it 'does not throw an error' do
- expect { cli.run(%w(foo)) }.not_to raise_error
+ expect { cli.run(%w[foo]) }.not_to raise_error
end
end
@@ -386,7 +386,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:start_for_sidekiq)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -396,7 +396,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'starts the metrics server' do
expect(MetricsServer).to receive(:start_for_sidekiq).with(metrics_dir: metrics_dir, reset_signals: trapped_signals)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -404,8 +404,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'writes the PID to a file' do
expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
- cli.option_parser.parse!(%w(-P /dev/null))
- cli.run(%w(foo))
+ cli.option_parser.parse!(%w[-P /dev/null])
+ cli.run(%w[foo])
end
end
@@ -413,7 +413,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'does not write a PID' do
expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
end
@@ -424,7 +424,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
it 'does not start the server' do
expect(MetricsServer).not_to receive(:start_for_sidekiq)
- cli.run(%w(foo --dryrun))
+ cli.run(%w[foo --dryrun])
end
end
end
@@ -456,7 +456,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(supervisor).to receive(:shutdown)
expect(cli).not_to receive(:exit).with(1)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -468,7 +468,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(process_status).to receive(:success?).and_return(false)
expect(cli).to receive(:exit).with(1)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
@@ -477,14 +477,14 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(supervisor).to receive(:supervise).and_yield([2, 99])
expect(supervisor).to receive(:shutdown)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
it 'restarts the metrics server when it is down' do
expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
expect(MetricsServer).to receive(:start_for_sidekiq).twice.and_return(metrics_server_pid)
- cli.run(%w(foo))
+ cli.run(%w[foo])
end
end
end
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
index c9d9a9176e8..47dc9042913 100644
--- a/spec/components/pajamas/banner_component_spec.rb
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -80,8 +80,8 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
describe 'variant' do
context 'by default (promotion)' do
- it 'applies no variant class' do
- expect(page).to have_css "[class='gl-banner']"
+ it 'does not apply introduction class' do
+ expect(page).not_to have_css ".gl-banner-introduction"
end
end
@@ -89,7 +89,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { variant: :introduction } }
it "applies the introduction class to the banner" do
- expect(page).to have_css ".gl-banner.gl-banner-introduction"
+ expect(page).to have_css ".gl-banner-introduction"
end
it "applies the confirm class to the close button" do
@@ -101,7 +101,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
let(:options) { { variant: :foobar } }
it 'ignores the unknown variant' do
- expect(page).to have_css "[class='gl-banner']"
+ expect(page).to have_css ".gl-banner"
end
end
end
diff --git a/spec/components/pajamas/component_spec.rb b/spec/components/pajamas/component_spec.rb
index 7385519b468..d5c7a0e2538 100644
--- a/spec/components/pajamas/component_spec.rb
+++ b/spec/components/pajamas/component_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Pajamas::Component do
subject.send(
:format_options,
options: { foo: 'bar', class: 'gl-display-flex gl-py-5' },
- css_classes: %w(gl-px-5 gl-mt-5),
+ css_classes: %w[gl-px-5 gl-mt-5],
additional_options: { baz: 'bax' }
)
).to match({
diff --git a/spec/components/pajamas/empty_state_component_spec.rb b/spec/components/pajamas/empty_state_component_spec.rb
index 5aa3f2143c3..852e73777df 100644
--- a/spec/components/pajamas/empty_state_component_spec.rb
+++ b/spec/components/pajamas/empty_state_component_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Pajamas::EmptyStateComponent, type: :component, feature_category:
it 'renders section with flex direction column' do
expect(find_section[:id]).to eq(empty_state_options[:id])
- expect(find_section[:class]).to eq("gl-display-flex empty-state gl-text-center gl-flex-direction-column")
+ expect(find_section[:class]).to eq("gl-display-flex gl-empty-state gl-text-center gl-flex-direction-column")
end
end
@@ -62,7 +62,7 @@ RSpec.describe Pajamas::EmptyStateComponent, type: :component, feature_category:
let(:compact) { true }
it 'renders section with flex direction row' do
- expect(find_section[:class]).to eq("gl-display-flex empty-state gl-flex-direction-row gl-align-items-center")
+ expect(find_section[:class]).to eq("gl-display-flex gl-empty-state gl-flex-direction-row")
end
end
diff --git a/spec/components/projects/ml/models_index_component_spec.rb b/spec/components/projects/ml/models_index_component_spec.rb
index e4599cc5eec..c42c94d5d01 100644
--- a/spec/components/projects/ml/models_index_component_spec.rb
+++ b/spec/components/projects/ml/models_index_component_spec.rb
@@ -8,14 +8,30 @@ RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_cat
let_it_be(:model2) { build_stubbed(:ml_models, project: project) }
let_it_be(:models) { [model1, model2] }
+ let(:paginator) do
+ Class.new do
+ def initialize(models:)
+ @models = models
+ end
+
+ def records = @models
+ def has_next_page? = true
+ def has_previous_page? = false
+ def cursor_for_previous_page = 'abcde'
+ def cursor_for_next_page = 'defgh'
+ end.new(models: models)
+ end
+
subject(:component) do
- described_class.new(models: models)
+ described_class.new(paginator: paginator)
end
describe 'rendered' do
let(:element) { page.find("#js-index-ml-models") }
before do
+ allow(model1).to receive(:version_count).and_return(1)
+ allow(model2).to receive(:version_count).and_return(0)
render_inline component
end
@@ -27,14 +43,22 @@ RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_cat
{
'name' => model1.name,
'version' => model1.latest_version.version,
- 'path' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}"
+ 'path' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}",
+ 'versionCount' => 1
},
{
'name' => model2.name,
'version' => nil,
- 'path' => nil
+ 'path' => nil,
+ 'versionCount' => 0
}
- ]
+ ],
+ 'pageInfo' => {
+ 'hasNextPage' => true,
+ 'hasPreviousPage' => false,
+ 'startCursor' => 'abcde',
+ 'endCursor' => 'defgh'
+ }
})
end
end
diff --git a/spec/components/projects/ml/show_ml_model_component_spec.rb b/spec/components/projects/ml/show_ml_model_component_spec.rb
new file mode 100644
index 00000000000..7d08b90791b
--- /dev/null
+++ b/spec/components/projects/ml/show_ml_model_component_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Projects::Ml::ShowMlModelComponent, type: :component, feature_category: :mlops do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:model1) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
+
+ subject(:component) do
+ described_class.new(model: model1)
+ end
+
+ describe 'rendered' do
+ before do
+ render_inline component
+ end
+
+ it 'renders element with view_model' do
+ element = page.find("#js-mount-show-ml-model")
+
+ expect(Gitlab::Json.parse(element['data-view-model'])).to eq({
+ 'model' => {
+ 'id' => model1.id,
+ 'name' => model1.name,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model1.id}"
+ }
+ })
+ end
+ end
+end
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 14995e2934e..03ace77af9b 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -300,4 +300,26 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
end
end
end
+
+ describe '.enabled_endpoint_uris' do
+ subject(:enabled_endpoint_uris) { described_class.enabled_endpoint_uris }
+
+ it 'returns a list of enabled endpoint URIs' do
+ stub_config(
+ artifacts: { enabled: true, object_store: { enabled: true, connection: { endpoint: 'http://example1.com' } } },
+ external_diffs: {
+ enabled: true, object_store: { enabled: true, connection: { endpoint: 'http://example1.com' } }
+ },
+ lfs: { enabled: false, object_store: { enabled: true, connection: { endpoint: 'http://example2.com' } } },
+ uploads: { enabled: true, object_store: { enabled: false, connection: { endpoint: 'http://example3.com' } } },
+ packages: { enabled: true, object_store: { enabled: true, connection: { provider: 'AWS' } } },
+ pages: { enabled: true, object_store: { enabled: true, connection: { endpoint: 'http://example4.com' } } }
+ )
+
+ expect(enabled_endpoint_uris).to contain_exactly(
+ URI('http://example1.com'),
+ URI('http://example4.com')
+ )
+ end
+ end
end
diff --git a/spec/contracts/consumer/fixtures/project/pipelines/get_list_project_pipelines.fixture.js b/spec/contracts/consumer/fixtures/project/pipelines/get_list_project_pipelines.fixture.js
index a982e927572..e9e820fda9a 100644
--- a/spec/contracts/consumer/fixtures/project/pipelines/get_list_project_pipelines.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/pipelines/get_list_project_pipelines.fixture.js
@@ -68,7 +68,7 @@ const body = {
}),
text: Matchers.term({
matcher: PIPELINE_TEXTS,
- generate: 'running',
+ generate: 'Running',
}),
label: Matchers.term({
matcher: PIPELINE_LABELS,
@@ -103,7 +103,7 @@ const body = {
}),
text: Matchers.term({
matcher: PIPELINE_TEXTS,
- generate: 'passed',
+ generate: 'Passed',
}),
label: Matchers.term({
matcher: PIPELINE_LABELS,
diff --git a/spec/contracts/consumer/fixtures/project/pipelines/get_pipeline_header_data.fixture.js b/spec/contracts/consumer/fixtures/project/pipelines/get_pipeline_header_data.fixture.js
index b14a230d2e0..c5d694ab125 100644
--- a/spec/contracts/consumer/fixtures/project/pipelines/get_pipeline_header_data.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/pipelines/get_pipeline_header_data.fixture.js
@@ -41,7 +41,7 @@ const body = {
}),
text: Matchers.term({
matcher: PIPELINE_TEXTS,
- generate: 'running',
+ generate: 'Running',
}),
},
createdAt: Matchers.iso8601DateTime('2022-06-30T16:58:59Z'),
diff --git a/spec/contracts/consumer/helpers/common_regex_patterns.js b/spec/contracts/consumer/helpers/common_regex_patterns.js
index 78dfeb7748f..a887a1293d3 100644
--- a/spec/contracts/consumer/helpers/common_regex_patterns.js
+++ b/spec/contracts/consumer/helpers/common_regex_patterns.js
@@ -15,7 +15,7 @@ export const PIPELINE_SOURCES =
export const PIPELINE_STATUSES =
'^status_(canceled|created|failed|manual|pending|preparing|running|scheduled|skipped|success|warning)$';
export const PIPELINE_TEXTS =
- '^(canceled|created|delayed|failed|manual|passed|pending|preparing|running|skipped|waiting)$';
+ '^(Canceled|Created|Delayed|Failed|Manual|Passed|Pending|Preparing|Running|Skipped|Waiting)$';
// Jobs
export const JOB_STATUSES =
diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb
index c534cf14327..6596d788e98 100644
--- a/spec/controllers/admin/groups_controller_spec.rb
+++ b/spec/controllers/admin/groups_controller_spec.rb
@@ -11,6 +11,96 @@ RSpec.describe Admin::GroupsController do
sign_in(admin)
end
+ describe 'GET #index' do
+ let!(:group_2) { create(:group, name: 'Ygroup') }
+ let!(:group_3) { create(:group, name: 'Jgroup', created_at: 2.days.ago, updated_at: 1.day.ago) }
+
+ render_views
+
+ it 'lists available groups' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ expect(assigns(:groups)).to eq([group, group_2, group_3])
+ end
+
+ it 'renders a correct list of sort by options' do
+ get :index
+
+ html_rendered = Nokogiri::HTML(response.body)
+ sort_options = Gitlab::Json.parse(html_rendered.css('div.dropdown')[0]['data-items'])
+
+ expect(response).to render_template('shared/groups/_dropdown')
+
+ expect(sort_options.size).to eq(7)
+ expect(sort_options[0]['value']).to eq('name_asc')
+ expect(sort_options[0]['text']).to eq(s_('SortOptions|Name'))
+
+ expect(sort_options[1]['value']).to eq('name_desc')
+ expect(sort_options[1]['text']).to eq(s_('SortOptions|Name, descending'))
+
+ expect(sort_options[2]['value']).to eq('created_desc')
+ expect(sort_options[2]['text']).to eq(s_('SortOptions|Last created'))
+
+ expect(sort_options[3]['value']).to eq('created_asc')
+ expect(sort_options[3]['text']).to eq(s_('SortOptions|Oldest created'))
+
+ expect(sort_options[4]['value']).to eq('latest_activity_desc')
+ expect(sort_options[4]['text']).to eq(_('Updated date'))
+
+ expect(sort_options[5]['value']).to eq('latest_activity_asc')
+ expect(sort_options[5]['text']).to eq(s_('SortOptions|Oldest updated'))
+
+ expect(sort_options[6]['value']).to eq('storage_size_desc')
+ expect(sort_options[6]['text']).to eq(s_('SortOptions|Largest group'))
+ end
+
+ context 'when a sort param is present' do
+ it 'returns a sorted by name_asc result' do
+ get :index, params: { sort: 'name_asc' }
+
+ expect(assigns(:groups)).to eq([group, group_3, group_2])
+ end
+ end
+
+ context 'when a name param is present' do
+ it 'returns a search by name result' do
+ get :index, params: { name: 'Ygr' }
+
+ expect(assigns(:groups)).to eq([group_2])
+ end
+
+ it 'returns an empty list if no match' do
+ get :index, params: { name: 'nomatch' }
+
+ expect(assigns(:groups)).to be_empty
+ end
+ end
+
+ context 'when page is specified' do
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+ end
+
+ it 'redirects to the page' do
+ get :index, params: { page: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:groups).current_page).to eq(1)
+ expect(assigns(:groups)).to eq([group])
+ end
+
+ it 'redirects to the page' do
+ get :index, params: { page: 2 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:groups).current_page).to eq(2)
+ expect(assigns(:groups)).to eq([group_2])
+ end
+ end
+ end
+
describe 'DELETE #destroy' do
it 'schedules a group destroy' do
Sidekiq::Testing.fake! do
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 6fa8d2c61c1..5f98004e9cf 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -106,13 +106,11 @@ RSpec.describe Admin::RunnersController, feature_category: :runner_fleet do
subject(:request) { post :update, params: runner_params }
context 'with update succeeding' do
- before do
+ it 'updates the runner and ticks the queue' do
expect_next_instance_of(Ci::Runners::UpdateRunnerService, runner) do |service|
expect(service).to receive(:execute).with(anything).and_call_original
end
- end
- it 'updates the runner and ticks the queue' do
expect { request }.to change { runner.ensure_runner_queue_value }
runner.reload
@@ -123,13 +121,11 @@ RSpec.describe Admin::RunnersController, feature_category: :runner_fleet do
end
context 'with update failing' do
- before do
+ it 'does not update runner or tick the queue' do
expect_next_instance_of(Ci::Runners::UpdateRunnerService, runner) do |service|
expect(service).to receive(:execute).with(anything).and_return(ServiceResponse.error(message: 'failure'))
end
- end
- it 'does not update runner or tick the queue' do
expect { request }.not_to change { runner.ensure_runner_queue_value }
expect { request }.not_to change { runner.reload.description }
diff --git a/spec/controllers/concerns/continue_params_spec.rb b/spec/controllers/concerns/continue_params_spec.rb
index 9ac7087430e..a9899447054 100644
--- a/spec/controllers/concerns/continue_params_spec.rb
+++ b/spec/controllers/concerns/continue_params_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe ContinueParams do
strong_continue_params(to: '/hello', notice: 'world', notice_now: '!', something: 'else')
end
- expect(controller.continue_params.keys).to contain_exactly(*%w(to notice notice_now))
+ expect(controller.continue_params.keys).to contain_exactly(*%w[to notice notice_now])
end
it 'does not allow cross host redirection' do
diff --git a/spec/controllers/concerns/onboarding/status_spec.rb b/spec/controllers/concerns/onboarding/status_spec.rb
index fe7c5ac6346..6fcbd059947 100644
--- a/spec/controllers/concerns/onboarding/status_spec.rb
+++ b/spec/controllers/concerns/onboarding/status_spec.rb
@@ -5,15 +5,8 @@ require 'spec_helper'
RSpec.describe Onboarding::Status, feature_category: :onboarding do
let_it_be(:member) { create(:group_member) }
let_it_be(:user) { member.user }
- let_it_be(:tasks_to_be_done) { %w[ci code] }
let_it_be(:source) { member.group }
- describe '#continue_full_onboarding?' do
- subject { described_class.new(nil, {}, user).continue_full_onboarding? }
-
- it { is_expected.to eq(false) }
- end
-
describe '#single_invite?' do
subject { described_class.new(nil, nil, user).single_invite? }
diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb
index 1394325014b..5c69af48e65 100644
--- a/spec/controllers/concerns/product_analytics_tracking_spec.rb
+++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb
@@ -10,15 +10,17 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
let(:event_name) { 'an_event' }
let(:event_action) { 'an_action' }
let(:event_label) { 'a_label' }
-
let!(:group) { create(:group) }
+
let_it_be(:project) { create(:project) }
+ subject(:track_internal_event) { get :show, params: { id: 1 } }
+
describe '.track_internal_event' do
controller(ApplicationController) do
include ProductAnalyticsTracking
- skip_before_action :authenticate_user!, only: [:show]
+ skip_before_action :authenticate_user!, only: [:index]
track_internal_event :index, :show, name: 'g_compliance_dashboard', conditions: [:custom_condition?]
def index
@@ -58,8 +60,6 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
context 'when user is logged in' do
let(:namespace) { project.namespace }
- subject(:track_internal_event) { get :index }
-
before do
sign_in(user)
end
@@ -83,7 +83,7 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
it 'does not track the event if the format is not HTML' do
expect_no_internal_tracking
- get :index, format: :json
+ get :show, params: { id: 1, format: :json }
end
it 'does not track the event if a custom condition returns false' do
@@ -96,34 +96,10 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
end
context 'when user is not logged in' do
- let(:visitor_id) { SecureRandom.uuid }
-
- it 'tracks the event when there is a visitor id' do
- cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
-
- expect_internal_tracking(tracked_user: nil)
-
- get :show, params: { id: 1 }
- end
-
- context 'and there is no visitor_id' do
- it 'does not track the event' do
- expect_no_internal_tracking
-
- subject
- end
- end
- end
-
- context 'when there is no custom_id set' do
- before do
- allow(controller).to receive(:get_custom_id).and_return(nil)
- end
-
- it 'does not track' do
+ it 'does not track the event' do
expect_no_internal_tracking
- subject
+ get :index
end
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index bf6b68df54e..d58f07a92a6 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe SendFileUpload, feature_category: :user_profile do
it 'sends a file with a custom type' do
headers = double
- expected_headers = /response-content-disposition=attachment%3B%20filename%3D%22test.js%22%3B%20filename%2A%3DUTF-8%27%27test.js&response-content-type=application%2Fjavascript/
+ expected_headers = /response-content-disposition=attachment%3B%20filename%3D%22test.js%22%3B%20filename%2A%3DUTF-8%27%27test.js&response-content-type=text%2Fjavascript/
expect(Gitlab::Workhorse).to receive(:send_url).with(expected_headers).and_call_original
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 8fcbf4049a5..bc73168ff1a 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -317,6 +317,73 @@ RSpec.describe GraphqlController, feature_category: :integrations do
subject { post :execute, params: { query: query, access_token: token.token } }
+ shared_examples 'invalid token' do
+ it 'returns 401 with invalid token message' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect_graphql_errors_to_include('Invalid token')
+ end
+ end
+
+ context 'with an invalid token' do
+ context 'with auth header' do
+ subject do
+ request.headers[header] = 'invalid'
+ post :execute, params: { query: query, user: nil }
+ end
+
+ context 'with private-token' do
+ let(:header) { 'Private-Token' }
+
+ it_behaves_like 'invalid token'
+ end
+
+ context 'with job-token' do
+ let(:header) { 'Job-Token' }
+
+ it_behaves_like 'invalid token'
+ end
+
+ context 'with deploy-token' do
+ let(:header) { 'Deploy-Token' }
+
+ it_behaves_like 'invalid token'
+ end
+ end
+
+ context 'with authorization bearer (oauth token)' do
+ subject do
+ request.headers['Authorization'] = 'Bearer invalid'
+ post :execute, params: { query: query, user: nil }
+ end
+
+ it_behaves_like 'invalid token'
+ end
+
+ context 'with auth param' do
+ subject { post :execute, params: { query: query, user: nil }.merge(header) }
+
+ context 'with private_token' do
+ let(:header) { { private_token: 'invalid' } }
+
+ it_behaves_like 'invalid token'
+ end
+
+ context 'with job_token' do
+ let(:header) { { job_token: 'invalid' } }
+
+ it_behaves_like 'invalid token'
+ end
+
+ context 'with token' do
+ let(:header) { { token: 'invalid' } }
+
+ it_behaves_like 'invalid token'
+ end
+ end
+ end
+
context 'when the user is a project bot' do
let(:user) { create(:user, :project_bot, last_activity_on: last_activity_on) }
@@ -471,62 +538,81 @@ RSpec.describe GraphqlController, feature_category: :integrations do
context 'when querying an IntrospectionQuery', :use_clean_rails_memory_store_caching do
let_it_be(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql')) }
- it 'caches IntrospectionQuery even when operationName is not given' do
- expect(GitlabSchema).to receive(:execute).exactly(:once)
-
- post :execute, params: { query: query }
- post :execute, params: { query: query }
- end
+ context 'in dev or test env' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(true)
+ end
- it 'caches the IntrospectionQuery' do
- expect(GitlabSchema).to receive(:execute).exactly(:once)
+ it 'does not cache IntrospectionQuery' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
- post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
- post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query }
+ post :execute, params: { query: query }
+ end
end
- it 'caches separately for both remove_deprecated set to true and false' do
- expect(GitlabSchema).to receive(:execute).exactly(:twice)
+ context 'in env different from dev or test' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ end
- post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
- post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
+ it 'caches IntrospectionQuery even when operationName is not given' do
+ expect(GitlabSchema).to receive(:execute).exactly(:once)
- # We clear this instance variable to reset remove_deprecated
- subject.remove_instance_variable(:@context) if subject.instance_variable_defined?(:@context)
+ post :execute, params: { query: query }
+ post :execute, params: { query: query }
+ end
- post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
- post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
- end
+ it 'caches the IntrospectionQuery' do
+ expect(GitlabSchema).to receive(:execute).exactly(:once)
- it 'has a different cache for each Gitlab.revision' do
- expect(GitlabSchema).to receive(:execute).exactly(:twice)
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
- post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ it 'caches separately for both remove_deprecated set to true and false' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
- allow(Gitlab).to receive(:revision).and_return('new random value')
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
- post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
- end
+ # We clear this instance variable to reset remove_deprecated
+ subject.remove_instance_variable(:@context) if subject.instance_variable_defined?(:@context)
- context 'when there is an unknown introspection query' do
- let(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/fake_introspection.graphql')) }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
+ end
- it 'does not cache an unknown introspection query' do
+ it 'has a different cache for each Gitlab.revision' do
expect(GitlabSchema).to receive(:execute).exactly(:twice)
post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+
+ allow(Gitlab).to receive(:revision).and_return('new random value')
+
post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
end
- end
- it 'hits the cache even if the whitespace in the query differs' do
- query_1 = File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql'))
- query_2 = "#{query_1} " # add a couple of spaces to change the fingerprint
+ context 'when there is an unknown introspection query' do
+ let(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/fake_introspection.graphql')) }
+
+ it 'does not cache an unknown introspection query' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+ end
+
+ it 'hits the cache even if the whitespace in the query differs' do
+ query_1 = File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql'))
+ query_2 = "#{query_1} " # add a couple of spaces to change the fingerprint
- expect(GitlabSchema).to receive(:execute).exactly(:once)
+ expect(GitlabSchema).to receive(:execute).exactly(:once)
- post :execute, params: { query: query_1, operationName: 'IntrospectionQuery' }
- post :execute, params: { query: query_2, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query_1, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query_2, operationName: 'IntrospectionQuery' }
+ end
end
it 'fails if the GraphiQL gem version is not 1.8.0' do
@@ -542,7 +628,7 @@ RSpec.describe GraphqlController, feature_category: :integrations do
let_it_be(:admin) { create(:admin) }
let_it_be(:project) { create(:project) }
- let(:graphql_query) { graphql_query_for('project', { 'fullPath' => project.full_path }, %w(id name)) }
+ let(:graphql_query) { graphql_query_for('project', { 'fullPath' => project.full_path }, %w[id name]) }
before do
sign_in(admin)
@@ -588,8 +674,8 @@ RSpec.describe GraphqlController, feature_category: :integrations do
end
describe '#append_info_to_payload' do
- let(:query_1) { { query: graphql_query_for('project', { 'fullPath' => 'foo' }, %w(id name), 'getProject_1') } }
- let(:query_2) { { query: graphql_query_for('project', { 'fullPath' => 'bar' }, %w(id), 'getProject_2') } }
+ let(:query_1) { { query: graphql_query_for('project', { 'fullPath' => 'foo' }, %w[id name], 'getProject_1') } }
+ let(:query_2) { { query: graphql_query_for('project', { 'fullPath' => 'bar' }, %w[id], 'getProject_2') } }
let(:graphql_queries) { [query_1, query_2] }
let(:log_payload) { {} }
let(:expected_logs) do
diff --git a/spec/controllers/groups/releases_controller_spec.rb b/spec/controllers/groups/releases_controller_spec.rb
index 40e8cb4efc5..4b4333dea0e 100644
--- a/spec/controllers/groups/releases_controller_spec.rb
+++ b/spec/controllers/groups/releases_controller_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Groups::ReleasesController do
index
- expect(json_response.map { |r| r['tag'] }).to match_array(%w(p2 p1 v2 v1))
+ expect(json_response.map { |r| r['tag'] }).to match_array(%w[p2 p1 v2 v1])
end
end
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index a4e55a89f41..d48e9ff0d51 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when the user is a maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -64,7 +64,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when the user is an owner' do
- before do
+ before_all do
group.add_owner(user)
end
@@ -79,7 +79,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
group.add_developer(user)
end
@@ -89,7 +89,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
describe '#new' do
context 'when user is owner' do
- before do
+ before_all do
group.add_owner(user)
end
@@ -102,7 +102,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not owner' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -118,7 +118,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
subject(:register) { get :register, params: { group_id: group, id: new_runner } }
context 'when user is owner' do
- before do
+ before_all do
group.add_owner(user)
end
@@ -145,7 +145,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not owner' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -163,7 +163,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
describe '#show' do
context 'when user is maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -190,7 +190,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
group.add_developer(user)
end
@@ -216,7 +216,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
describe '#edit' do
context 'when user is owner' do
- before do
+ before_all do
group.add_owner(user)
end
@@ -248,7 +248,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -273,7 +273,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
group.add_developer(user)
end
@@ -329,7 +329,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is owner' do
- before do
+ before_all do
group.add_owner(user)
end
@@ -353,7 +353,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
@@ -377,7 +377,7 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
group.add_developer(user)
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 87a30ed1234..31257fd3a30 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -673,14 +673,6 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
expect(controller).to set_flash[:notice]
end
- it 'does not update the path on error' do
- allow_any_instance_of(Group).to receive(:move_dir).and_raise(Gitlab::UpdatePathError)
- post :update, params: { id: group.to_param, group: { path: 'new_path' } }
-
- expect(assigns(:group).errors).not_to be_empty
- expect(assigns(:group).path).not_to eq('new_path')
- end
-
it 'updates the project_creation_level successfully' do
post :update, params: { id: group.to_param, group: { project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS } }
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index b2a56423253..3266c4d4d39 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
let(:repo_slug) { 'some-repo' }
let(:repo_id) { "#{project_key}/#{repo_slug}" }
let(:client) { instance_double(BitbucketServer::Client) }
+ let(:timeout_strategy) { "pessimistic" }
def assign_session_tokens
session[:bitbucket_server_url] = 'http://localhost:7990'
@@ -44,7 +45,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'returns the new project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post :create, params: { repo_id: repo_id }, format: :json
@@ -57,7 +58,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'successfully creates a project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post :create, params: { repo_id: repo_id }, format: :json
@@ -88,7 +89,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
it 'returns an error when the project cannot be saved' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project_name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: build(:project)))
post :create, params: { repo_id: repo_id }, format: :json
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index bf56043a496..aafba6e2b9f 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -66,31 +66,11 @@ RSpec.describe Import::GithubController, feature_category: :importers do
context "when auth state param is present in session" do
let(:valid_auth_state) { "secret-state" }
- context 'when remove_legacy_github_client feature is disabled' do
- before do
- stub_feature_flags(remove_legacy_github_client: false)
- allow_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
- allow(client).to receive(:get_token).and_return(token)
- end
- session[:github_auth_state_key] = valid_auth_state
- end
-
- it "updates access token if state param is valid" do
- token = "asdasd12345"
-
- get :callback, params: { state: valid_auth_state }
-
- expect(session[:github_access_token]).to eq(token)
- expect(controller).to redirect_to(status_import_github_url)
- end
-
- it "includes namespace_id from query params if it is present" do
- namespace_id = 1
-
- get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
-
- expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
+ before do
+ allow_next_instance_of(OAuth2::Client) do |client|
+ allow(client).to receive_message_chain(:auth_code, :get_token, :token).and_return(token)
end
+ session[:github_auth_state_key] = valid_auth_state
end
it "reports an error if state param is invalid" do
@@ -100,31 +80,21 @@ RSpec.describe Import::GithubController, feature_category: :importers do
expect(flash[:alert]).to eq('Access denied to your GitHub account.')
end
- context 'when remove_legacy_github_client feature is enabled' do
- before do
- stub_feature_flags(remove_legacy_github_client: true)
- allow_next_instance_of(OAuth2::Client) do |client|
- allow(client).to receive_message_chain(:auth_code, :get_token, :token).and_return(token)
- end
- session[:github_auth_state_key] = valid_auth_state
- end
+ it "updates access token if state param is valid" do
+ token = "asdasd12345"
- it "updates access token if state param is valid" do
- token = "asdasd12345"
+ get :callback, params: { state: valid_auth_state }
- get :callback, params: { state: valid_auth_state }
-
- expect(session[:github_access_token]).to eq(token)
- expect(controller).to redirect_to(status_import_github_url)
- end
+ expect(session[:github_access_token]).to eq(token)
+ expect(controller).to redirect_to(status_import_github_url)
+ end
- it "includes namespace_id from query params if it is present" do
- namespace_id = 1
+ it "includes namespace_id from query params if it is present" do
+ namespace_id = 1
- get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
+ get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
- expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
- end
+ expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
end
end
end
@@ -138,7 +108,6 @@ RSpec.describe Import::GithubController, feature_category: :importers do
it 'calls repos list from provider with expected args' do
expect_next_instance_of(Gitlab::GithubImport::Clients::Proxy) do |client|
expect(client).to receive(:repos)
- .with(expected_filter, expected_options)
.and_return({ repos: [], page_info: {}, count: 0 })
end
@@ -160,10 +129,6 @@ RSpec.describe Import::GithubController, feature_category: :importers do
let(:pagination_params) { { before: nil, after: nil } }
let(:relation_params) { { relation_type: nil, organization_login: '' } }
let(:provider_repos) { [] }
- let(:expected_filter) { '' }
- let(:expected_options) do
- pagination_params.merge(relation_params).merge(first: 25)
- end
before do
allow_next_instance_of(Gitlab::GithubImport::Clients::Proxy) do |proxy|
@@ -287,21 +252,11 @@ RSpec.describe Import::GithubController, feature_category: :importers do
let(:organization_login) { 'test-login' }
let(:params) { pagination_params.merge(relation_type: 'organization', organization_login: organization_login) }
let(:pagination_defaults) { { first: 25 } }
- let(:expected_options) do
- pagination_defaults.merge(pagination_params).merge(
- relation_type: 'organization', organization_login: organization_login
- )
- end
it_behaves_like 'calls repos through Clients::Proxy with expected args'
context 'when organization_login is too long and with ":"' do
let(:organization_login) { ":#{Array.new(270) { ('a'..'z').to_a.sample }.join}" }
- let(:expected_options) do
- pagination_defaults.merge(pagination_params).merge(
- relation_type: 'organization', organization_login: organization_login.slice(1, 254)
- )
- end
it_behaves_like 'calls repos through Clients::Proxy with expected args'
end
@@ -310,7 +265,6 @@ RSpec.describe Import::GithubController, feature_category: :importers do
context 'when filtering' do
let(:filter_param) { FFaker::Lorem.word }
let(:params) { { filter: filter_param } }
- let(:expected_filter) { filter_param }
it_behaves_like 'calls repos through Clients::Proxy with expected args'
@@ -332,7 +286,6 @@ RSpec.describe Import::GithubController, feature_category: :importers do
context 'when user input contains colons and spaces' do
let(:filter_param) { ' test1:test2 test3 : test4 ' }
- let(:expected_filter) { 'test1test2test3test4' }
it_behaves_like 'calls repos through Clients::Proxy with expected args'
end
diff --git a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
index 48b315646de..ea6cb688409 100644
--- a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
+++ b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
authentication: {
type: 'jwt'
},
- scopes: %w(READ WRITE DELETE),
+ scopes: %w[READ WRITE DELETE],
apiVersion: 1,
apiMigrations: {
'context-qsh': true,
@@ -71,7 +71,7 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
name: { value: 'GitLab' },
url: 'https://gitlab.com',
logoUrl: logo_url,
- capabilities: %w(branch commit pull_request)
+ capabilities: %w[branch commit pull_request]
},
jiraBuildInfoProvider: common_module_properties.merge(
actions: {},
diff --git a/spec/controllers/oauth/applications_controller_spec.rb b/spec/controllers/oauth/applications_controller_spec.rb
index 44deeb6c47e..dcd817861a7 100644
--- a/spec/controllers/oauth/applications_controller_spec.rb
+++ b/spec/controllers/oauth/applications_controller_spec.rb
@@ -198,7 +198,7 @@ RSpec.describe Oauth::ApplicationsController, feature_category: :system_access d
end
context 'when scopes are invalid' do
- let(:scopes) { %w(api foo) }
+ let(:scopes) { %w[api foo] }
render_views
diff --git a/spec/controllers/oauth/tokens_controller_spec.rb b/spec/controllers/oauth/tokens_controller_spec.rb
index 389153d138e..489470dc0df 100644
--- a/spec/controllers/oauth/tokens_controller_spec.rb
+++ b/spec/controllers/oauth/tokens_controller_spec.rb
@@ -2,8 +2,64 @@
require 'spec_helper'
-RSpec.describe Oauth::TokensController do
+RSpec.describe Oauth::TokensController, feature_category: :user_management do
+ let(:user) { create(:user) }
+
it 'includes Two-factor enforcement concern' do
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
end
+
+ describe '#append_info_to_payload' do
+ controller(described_class) do
+ attr_reader :last_payload
+
+ def create
+ render html: 'authenticated'
+ end
+
+ def append_info_to_payload(payload)
+ super
+
+ @last_payload = payload
+ end
+ end
+
+ it 'does log correlation id' do
+ Labkit::Correlation::CorrelationId.use_id('new-id') do
+ post :create
+ end
+
+ expect(controller.last_payload).to include('correlation_id' => 'new-id')
+ end
+
+ it 'adds context metadata to the payload' do
+ sign_in user
+
+ post :create
+
+ expect(controller.last_payload[:metadata]).to include(Gitlab::ApplicationContext.current)
+ end
+
+ it 'logs response length' do
+ sign_in user
+
+ post :create
+
+ expect(controller.last_payload[:response_bytes]).to eq('authenticated'.bytesize)
+ end
+
+ context 'with log_response_length disabled' do
+ before do
+ stub_feature_flags(log_response_length: false)
+ end
+
+ it 'logs response length' do
+ sign_in user
+
+ post :create
+
+ expect(controller.last_payload).not_to include(:response_bytes)
+ end
+ end
+ end
end
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 8d2face0233..847f7aeae7c 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
Rails.application.env_config['omniauth.auth'] = @original_env_config_omniauth_auth
end
- context 'authentication succeeds' do
+ context 'when authentication succeeds' do
let(:extern_uid) { 'my-uid' }
let(:provider) { :github }
@@ -77,7 +77,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'a deactivated user' do
+ context 'for a deactivated user' do
let(:provider) { :github }
let(:extern_uid) { 'my-uid' }
@@ -216,8 +216,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'strategies' do
- shared_context 'sign_up' do
+ context 'with strategies' do
+ shared_context 'with sign_up' do
let(:user) { double(email: 'new@example.com') }
before do
@@ -225,7 +225,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'github' do
+ context 'for github' do
let(:extern_uid) { 'my-uid' }
let(:provider) { :github }
@@ -299,7 +299,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
context 'for sign up' do
- include_context 'sign_up'
+ include_context 'with sign_up'
it 'is allowed' do
post provider
@@ -307,10 +307,10 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect(request.env['warden']).to be_authenticated
end
- it 'redirects to welcome path' do
- post provider
+ it_behaves_like Onboarding::Redirectable do
+ let(:email) { user.email }
- expect(response).to redirect_to(users_sign_up_welcome_path)
+ subject(:post_create) { post provider }
end
end
@@ -341,8 +341,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect { post provider }.to change { user.reload.identities.count }.by(1)
end
- context 'sign up' do
- include_context 'sign_up'
+ context 'for sign up' do
+ include_context 'with sign_up'
it 'is prevented' do
post provider
@@ -353,7 +353,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'auth0' do
+ context 'for auth0' do
let(:extern_uid) { '' }
let(:provider) { :auth0 }
@@ -366,7 +366,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'atlassian_oauth2' do
+ context 'for atlassian_oauth2' do
let(:provider) { :atlassian_oauth2 }
let(:extern_uid) { 'my-uid' }
@@ -424,7 +424,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'salesforce' do
+ context 'for salesforce' do
let(:extern_uid) { 'my-uid' }
let(:provider) { :salesforce }
let(:additional_info) { { extra: { email_verified: false } } }
@@ -440,7 +440,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
context 'with verified email' do
- include_context 'sign_up'
+ include_context 'with sign_up'
let(:additional_info) { { extra: { email_verified: true } } }
it 'allows sign in' do
@@ -532,7 +532,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
let(:post_action) { post :saml, params: { SAMLResponse: mock_saml_response } }
end
- context 'sign up' do
+ context 'for sign up' do
before do
user.destroy!
end
@@ -683,7 +683,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect(subject.current_user_mode.admin_mode?).to be(expected_admin_mode)
end
- context 'user and admin mode requested by the same user' do
+ context 'when user and admin mode is requested by the same user' do
before do
sign_in user
@@ -724,7 +724,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'user and admin mode requested by different users' do
+ context 'when user and admin mode is requested by different users' do
let(:reauth_extern_uid) { 'another_uid' }
let(:reauth_user) { create(:omniauth_user, extern_uid: reauth_extern_uid, provider: provider) }
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 14f3f5c23cd..9c9a9a28879 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -109,53 +109,5 @@ RSpec.describe Profiles::PersonalAccessTokensController do
it 'sets available scopes' do
expect(assigns(:scopes)).to eq(Gitlab::Auth.available_scopes_for(access_token_user))
end
-
- context 'with feature flag k8s_proxy_pat disabled' do
- before do
- stub_feature_flags(k8s_proxy_pat: false)
- # Impersonation and inactive personal tokens are ignored
- create(:personal_access_token, :impersonation, user: access_token_user)
- create(:personal_access_token, :revoked, user: access_token_user)
- get :index
- end
-
- it "only includes details of active personal access tokens" do
- active_personal_access_tokens_detail =
- ::PersonalAccessTokenSerializer.new.represent([active_personal_access_token])
-
- expect(assigns(:active_access_tokens).to_json).to eq(active_personal_access_tokens_detail.to_json)
- end
-
- it "builds a PAT with name and scopes from params" do
- name = 'My PAT'
- scopes = 'api,read_user'
-
- get :index, params: { name: name, scopes: scopes }
-
- expect(assigns(:personal_access_token)).to have_attributes(
- name: eq(name),
- scopes: contain_exactly(:api, :read_user)
- )
- end
-
- it 'returns 404 when personal access tokens are disabled' do
- allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
-
- get :index
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it 'returns tokens for json format' do
- get :index, params: { format: :json }
-
- expect(json_response.count).to eq(1)
- end
-
- it 'sets available scopes' do
- expect(assigns(:scopes))
- .to eq(Gitlab::Auth.available_scopes_for(access_token_user) - [Gitlab::Auth::K8S_PROXY_SCOPE])
- end
- end
end
end
diff --git a/spec/controllers/profiles/two_factor_auths_controller_spec.rb b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
index dde0af3c543..0c4280467f4 100644
--- a/spec/controllers/profiles/two_factor_auths_controller_spec.rb
+++ b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
@@ -165,11 +165,11 @@ RSpec.describe Profiles::TwoFactorAuthsController, feature_category: :system_acc
end
it 'presents plaintext codes for the user to save' do
- expect(user).to receive(:generate_otp_backup_codes!).and_return(%w(a b c))
+ expect(user).to receive(:generate_otp_backup_codes!).and_return(%w[a b c])
go
- expect(assigns[:codes]).to match_array %w(a b c)
+ expect(assigns[:codes]).to match_array %w[a b c]
end
it 'calls to delete other sessions' do
@@ -216,11 +216,11 @@ RSpec.describe Profiles::TwoFactorAuthsController, feature_category: :system_acc
end
it 'presents plaintext codes for the user to save' do
- expect(user).to receive(:generate_otp_backup_codes!).and_return(%w(a b c))
+ expect(user).to receive(:generate_otp_backup_codes!).and_return(%w[a b c])
go
- expect(assigns[:codes]).to match_array %w(a b c)
+ expect(assigns[:codes]).to match_array %w[a b c]
end
it 'calls to delete other sessions' do
@@ -288,10 +288,10 @@ RSpec.describe Profiles::TwoFactorAuthsController, feature_category: :system_acc
let(:current_password) { user.password }
it 'presents plaintext codes for the user to save' do
- expect(user).to receive(:generate_otp_backup_codes!).and_return(%w(a b c))
+ expect(user).to receive(:generate_otp_backup_codes!).and_return(%w[a b c])
post :codes, params: { current_password: current_password }
- expect(assigns[:codes]).to match_array %w(a b c)
+ expect(assigns[:codes]).to match_array %w[a b c]
end
it 'persists the generated codes' do
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 44615506e5d..31e6d6ae5e6 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -409,7 +409,7 @@ RSpec.describe Projects::ArtifactsController, feature_category: :build_artifacts
expect(response.headers['Gitlab-Workhorse-Detect-Content-Type']).to eq('true')
expect(send_data).to start_with('artifacts-entry:')
- expect(params.keys).to eq(%w(Archive Entry))
+ expect(params.keys).to eq(%w[Archive Entry])
expect(params['Archive']).to start_with(archive_path)
# On object storage, the URL can end with a query string
expect(params['Archive']).to match(archive_matcher)
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index 52a605cf548..96addb4b6c5 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Projects::DeployKeysController do
it 'returns json in a correct format' do
get :index, params: params.merge(format: :json)
- expect(json_response.keys).to match_array(%w(enabled_keys available_project_keys public_keys))
+ expect(json_response.keys).to match_array(%w[enabled_keys available_project_keys public_keys])
expect(json_response['enabled_keys'].count).to eq(1)
expect(json_response['available_project_keys'].count).to eq(1)
expect(json_response['public_keys'].count).to eq(1)
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index d4f04105605..5024698a769 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -191,7 +191,7 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
it 'redirects to work item page using iid' do
make_request
- expect(response).to redirect_to(project_work_items_path(project, task.iid, query))
+ expect(response).to redirect_to(project_work_item_path(project, task.iid, query))
end
end
@@ -849,7 +849,7 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
update_issue(issue_params: { assignee_ids: [assignee.id] })
expect(json_response['assignees'].first.keys)
- .to include(*%w(id name username avatar_url state web_url))
+ .to include(*%w[id name username avatar_url state web_url])
end
end
diff --git a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
index 428ce5b5607..1fe297362bf 100644
--- a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe Projects::MergeRequests::ConflictsController, feature_category: :
section['lines'].each do |line|
if section['conflict']
- expect(line['type']).to be_in(%w(old new))
+ expect(line['type']).to be_in(%w[old new])
expect(line.values_at('old_line', 'new_line')).to contain_exactly(nil, a_kind_of(Integer))
elsif line['type'].nil?
expect(line['old_line']).not_to eq(nil)
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index a47bb98770c..92bbffdfde5 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -407,7 +407,7 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
update_merge_request({ assignee_ids: [assignee.id] }, format: :json)
- expect(json_response['assignees']).to all(include(*%w(name username avatar_url id state web_url)))
+ expect(json_response['assignees']).to all(include(*%w[name username avatar_url id state web_url]))
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 43e7bafc206..deaed8e1162 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -216,7 +216,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
end
def create_all_pipeline_types
- %w(pending running success failed canceled).each_with_index do |status, index|
+ %w[pending running success failed canceled].each_with_index do |status, index|
create_pipeline(status, project.commit("HEAD~#{index}"))
end
@@ -821,7 +821,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
subject { get :charts, params: request_params, format: :html }
let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
- let(:action) { tab[:event] }
+ let(:event) { tab[:event] }
let(:namespace) { project.namespace }
end
end
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 9657cf33afd..c20f92cd2f0 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -98,18 +98,9 @@ RSpec.describe Projects::ProjectMembersController do
end
end
- context 'when invited group members are present' do
+ shared_examples 'users are invited through groups' do
let_it_be(:invited_group_member) { create(:user) }
- before do
- group.add_owner(invited_group_member)
-
- project.invited_groups << group
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
context 'when webui_members_inherited_users is disabled' do
before do
stub_feature_flags(webui_members_inherited_users: false)
@@ -128,6 +119,35 @@ RSpec.describe Projects::ProjectMembersController do
expect(assigns(:project_members).map(&:user_id)).to include(invited_group_member.id)
end
end
+
+ context 'when invited group members are present' do
+ before do
+ group.add_owner(invited_group_member)
+
+ project.invited_groups << group
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ include_examples 'users are invited through groups'
+ end
+
+ context 'when group is invited to project parent' do
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:project, reload: true) { create(:project, :public, namespace: parent_group) }
+
+ before do
+ group.add_owner(invited_group_member)
+
+ parent_group.shared_with_groups << group
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ include_examples 'users are invited through groups'
+ end
end
context 'invited members' do
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
deleted file mode 100644
index 8f8edebbc30..00000000000
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ /dev/null
@@ -1,230 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Prometheus::MetricsController, feature_category: :metrics do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :with_prometheus_integration) }
-
- let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- project.add_maintainer(user)
- sign_in(user)
- end
-
- describe 'GET #active_common' do
- context 'when prometheus_adapter can query' do
- before do
- allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- end
-
- context 'when prometheus metrics are enabled' do
- context 'when data is not present' do
- before do
- allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
- end
-
- it 'returns no content response' do
- get :active_common, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- context 'when data is available' do
- let(:sample_response) { { some_data: 1 } }
-
- before do
- allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return(sample_response)
- end
-
- it 'returns no content response' do
- get :active_common, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq(sample_response.deep_stringify_keys)
- end
- end
-
- context 'when requesting non json response' do
- it 'returns not found response' do
- get :active_common, params: project_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-
- context 'when prometheus_adapter cannot query' do
- it 'renders 404' do
- prometheus_adapter = double('prometheus_adapter', can_query?: false)
-
- allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:query).with(:matched_metrics).and_return({})
-
- get :active_common, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when prometheus_adapter is disabled' do
- let(:project) { create(:project) }
-
- it 'renders 404' do
- get :active_common, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'renders 404' do
- get :active_common, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'POST #validate_query' do
- before do
- allow(controller).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:query).with(:validate, query) { validation_result }
- end
-
- let(:query) { 'avg(metric)' }
-
- context 'validation information is ready' do
- let(:validation_result) { { valid: true } }
-
- it 'validation data is returned' do
- post :validate_query, params: project_params(format: :json, query: query)
-
- expect(json_response).to eq('valid' => true)
- end
- end
-
- context 'validation information is not ready' do
- let(:validation_result) { nil }
-
- it 'validation data is returned' do
- post :validate_query, params: project_params(format: :json, query: query)
-
- expect(response).to have_gitlab_http_status(:accepted)
- end
- end
- end
-
- describe 'GET #index' do
- context 'with custom metric present' do
- let!(:prometheus_metric) { create(:prometheus_metric, project: project) }
-
- it 'returns a list of metrics' do
- get :index, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('prometheus/metrics')
- end
- end
-
- context 'without custom metrics ' do
- it 'returns an empty json' do
- get :index, params: project_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq({})
- end
- end
- end
-
- describe 'POST #create' do
- context 'metric is valid' do
- let(:valid_metric) { { prometheus_metric: { title: 'title', query: 'query', group: 'business', y_label: 'label', unit: 'u', legend: 'legend' } } }
-
- it 'shows a success flash message' do
- post :create, params: project_params(valid_metric)
-
- expect(flash[:notice]).to include('Metric was successfully added.')
-
- expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
- end
- end
-
- context 'metric is invalid' do
- let(:invalid_metric) { { prometheus_metric: { title: 'title' } } }
-
- it 'renders new metric page' do
- post :create, params: project_params(invalid_metric)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('new')
- end
- end
- end
-
- describe 'PUT #update' do
- context 'metric is updated' do
- let_it_be(:metric) { create(:prometheus_metric, project: project) }
-
- let(:metric_params) { { prometheus_metric: { title: 'new_title' }, id: metric.id } }
-
- it 'shows a success flash message' do
- put :update, params: project_params(metric_params)
-
- expect(metric.reload.title).to eq('new_title')
- expect(flash[:notice]).to include('Metric was successfully updated.')
- expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
- end
- end
- end
-
- describe 'DELETE #destroy' do
- context 'format html' do
- let!(:metric) { create(:prometheus_metric, project: project) }
-
- it 'destroys the metric' do
- delete :destroy, params: project_params(id: metric.id)
-
- expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
- expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
- end
- end
-
- context 'format json' do
- let!(:metric) { create(:prometheus_metric, project: project) }
-
- it 'destroys the metric' do
- delete :destroy, params: project_params(id: metric.id, format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
- end
- end
- end
-
- describe '#prometheus_adapter' do
- before do
- allow(controller).to receive(:project).and_return(project)
- end
-
- it 'calls prometheus adapter service' do
- expect_next_instance_of(::Gitlab::Prometheus::Adapter) do |instance|
- expect(instance).to receive(:prometheus_adapter)
- end
-
- subject.__send__(:prometheus_adapter)
- end
- end
-
- def project_params(opts = {})
- opts.reverse_merge(namespace_id: project.namespace, project_id: project)
- end
-end
diff --git a/spec/controllers/projects/refs_controller_spec.rb b/spec/controllers/projects/refs_controller_spec.rb
index 7ea0e678a41..345e6e2e0de 100644
--- a/spec/controllers/projects/refs_controller_spec.rb
+++ b/spec/controllers/projects/refs_controller_spec.rb
@@ -26,32 +26,32 @@ RSpec.describe Projects::RefsController, feature_category: :source_code_manageme
subject { get :switch, params: params }
where(:destination, :ref_type, :path, :redirected_to) do
- 'tree' | nil | nil | lazy { project_tree_path(project, id) }
- 'tree' | 'heads' | nil | lazy { project_tree_path(project, id) }
+ 'tree' | nil | nil | lazy { project_tree_path(project, id) }
+ 'tree' | 'heads' | nil | lazy { project_tree_path(project, id) }
'tree' | nil | 'foo/bar' | lazy { project_tree_path(project, id_and_path) }
- 'blob' | nil | nil | lazy { project_blob_path(project, id) }
- 'blob' | 'heads' | nil | lazy { project_blob_path(project, id) }
+ 'blob' | nil | nil | lazy { project_blob_path(project, id) }
+ 'blob' | 'heads' | nil | lazy { project_blob_path(project, id) }
'blob' | nil | 'foo/bar' | lazy { project_blob_path(project, id_and_path) }
- 'graph' | nil | nil | lazy { project_network_path(project, id) }
- 'graph' | 'heads' | nil | lazy { project_network_path(project, id, ref_type: 'heads') }
+ 'graph' | nil | nil | lazy { project_network_path(project, id) }
+ 'graph' | 'heads' | nil | lazy { project_network_path(project, id, ref_type: 'heads') }
'graph' | nil | 'foo/bar' | lazy { project_network_path(project, id_and_path) }
- 'graphs' | nil | nil | lazy { project_graph_path(project, id) }
- 'graphs' | 'heads' | nil | lazy { project_graph_path(project, id, ref_type: 'heads') }
+ 'graphs' | nil | nil | lazy { project_graph_path(project, id) }
+ 'graphs' | 'heads' | nil | lazy { project_graph_path(project, id, ref_type: 'heads') }
'graphs' | nil | 'foo/bar' | lazy { project_graph_path(project, id_and_path) }
- 'find_file' | nil | nil | lazy { project_find_file_path(project, id) }
- 'find_file' | 'heads' | nil | lazy { project_find_file_path(project, id) }
+ 'find_file' | nil | nil | lazy { project_find_file_path(project, id) }
+ 'find_file' | 'heads' | nil | lazy { project_find_file_path(project, id) }
'find_file' | nil | 'foo/bar' | lazy { project_find_file_path(project, id_and_path) }
- 'graphs_commits' | nil | nil | lazy { commits_project_graph_path(project, id) }
- 'graphs_commits' | 'heads' | nil | lazy { commits_project_graph_path(project, id) }
+ 'graphs_commits' | nil | nil | lazy { commits_project_graph_path(project, id) }
+ 'graphs_commits' | 'heads' | nil | lazy { commits_project_graph_path(project, id) }
'graphs_commits' | nil | 'foo/bar' | lazy { commits_project_graph_path(project, id_and_path) }
- 'badges' | nil | nil | lazy { project_settings_ci_cd_path(project, ref: id) }
- 'badges' | 'heads' | nil | lazy { project_settings_ci_cd_path(project, ref: id) }
+ 'badges' | nil | nil | lazy { project_settings_ci_cd_path(project, ref: id) }
+ 'badges' | 'heads' | nil | lazy { project_settings_ci_cd_path(project, ref: id) }
'badges' | nil | 'foo/bar' | lazy { project_settings_ci_cd_path(project, ref: id_and_path) }
- 'commits' | nil | nil | lazy { project_commits_path(project, id) }
- 'commits' | 'heads' | nil | lazy { project_commits_path(project, id, ref_type: 'heads') }
+ 'commits' | nil | nil | lazy { project_commits_path(project, id) }
+ 'commits' | 'heads' | nil | lazy { project_commits_path(project, id, ref_type: 'heads') }
'commits' | nil | 'foo/bar' | lazy { project_commits_path(project, id_and_path) }
- nil | nil | nil | lazy { project_commits_path(project, id) }
- nil | 'heads' | nil | lazy { project_commits_path(project, id, ref_type: 'heads') }
+ nil | nil | nil | lazy { project_commits_path(project, id) }
+ nil | 'heads' | nil | lazy { project_commits_path(project, id, ref_type: 'heads') }
nil | nil | 'foo/bar' | lazy { project_commits_path(project, id_and_path) }
end
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 834fdddd583..a07a3641edf 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Registry::RepositoriesController do
+RSpec.describe Projects::Registry::RepositoriesController, feature_category: :container_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
@@ -103,8 +103,6 @@ RSpec.describe Projects::Registry::RepositoriesController do
end
it 'marks the repository as delete_scheduled' do
- expect(DeleteContainerRepositoryWorker).not_to receive(:perform_async).with(user.id, repository.id)
-
expect { delete_repository(repository) }
.to change { repository.reload.status }.from(nil).to('delete_scheduled')
@@ -113,8 +111,6 @@ RSpec.describe Projects::Registry::RepositoriesController do
end
it 'tracks the event', :snowplow do
- allow(DeleteContainerRepositoryWorker).to receive(:perform_async).with(user.id, repository.id)
-
delete_repository(repository)
expect_snowplow_event(category: anything, action: 'delete_repository')
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index d6816bd49af..3602a2df959 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
before do
sign_in(user)
- project.add_maintainer(user)
end
describe '#new' do
@@ -29,7 +28,7 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
context 'when user is maintainer' do
- before do
+ before_all do
project.add_maintainer(user)
end
@@ -42,7 +41,7 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
project.add_developer(user)
end
@@ -55,15 +54,19 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#register' do
- subject(:register) { get :register, params: { namespace_id: project.namespace, project_id: project, id: new_runner } }
+ subject(:register) do
+ get :register, params: { namespace_id: project.namespace, project_id: project, id: new_runner }
+ end
context 'when user is maintainer' do
- before do
+ before_all do
project.add_maintainer(user)
end
context 'when runner can be registered after creation' do
- let_it_be(:new_runner) { create(:ci_runner, :project, projects: [project], registration_type: :authenticated_user) }
+ let_it_be(:new_runner) do
+ create(:ci_runner, :project, projects: [project], registration_type: :authenticated_user)
+ end
it 'renders a :register template' do
register
@@ -85,12 +88,14 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
context 'when user is not maintainer' do
- before do
+ before_all do
project.add_developer(user)
end
context 'when runner can be registered after creation' do
- let_it_be(:new_runner) { create(:ci_runner, :project, projects: [project], registration_type: :authenticated_user) }
+ let_it_be(:new_runner) do
+ create(:ci_runner, :project, projects: [project], registration_type: :authenticated_user)
+ end
it 'returns :not_found' do
register
@@ -102,6 +107,10 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#update' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
it 'updates the runner and ticks the queue' do
new_desc = runner.description.swapcase
@@ -117,6 +126,10 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#destroy' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
it 'destroys the runner' do
expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, user) do |service|
expect(service).to receive(:execute).once.and_call_original
@@ -130,6 +143,10 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#resume' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
it 'marks the runner as active and ticks the queue' do
runner.update!(active: false)
@@ -145,6 +162,10 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#pause' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
it 'marks the runner as inactive and ticks the queue' do
runner.update!(active: true)
@@ -160,9 +181,14 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
end
describe '#toggle_shared_runners' do
+ let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
+ before do
+ project.add_maintainer(user) # rubocop: disable RSpec/BeforeAllRoleAssignment
+ end
+
it 'toggles shared_runners_enabled when the group allows shared runners' do
project.update!(shared_runners_enabled: true)
@@ -196,7 +222,8 @@ RSpec.describe Projects::RunnersController, feature_category: :runner_fleet do
expect(response).to have_gitlab_http_status(:unauthorized)
expect(project.shared_runners_enabled).to eq(false)
- expect(json_response['error']).to eq('Shared runners enabled cannot be enabled because parent group does not allow it')
+ expect(json_response['error'])
+ .to eq('Shared runners enabled cannot be enabled because parent group does not allow it')
end
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 63c870eb133..b27b4e6fe19 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -100,15 +100,15 @@ RSpec.describe Projects::Settings::CiCdController, feature_category: :continuous
show # warmup
# with one tag
- create(:ci_runner, :instance, tag_list: %w(shared_runner))
- create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
- create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
+ create(:ci_runner, :instance, tag_list: %w[shared_runner])
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w[project_runner])
+ create(:ci_runner, :group, groups: [group], tag_list: %w[group_runner])
control = ActiveRecord::QueryRecorder.new { show }
# with several tags
- create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
- create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
- create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
+ create(:ci_runner, :instance, tag_list: %w[shared_runner tag2 tag3])
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w[project_runner tag2 tag3])
+ create(:ci_runner, :group, groups: [group], tag_list: %w[group_runner tag2 tag3])
expect { show }.not_to exceed_query_limit(control)
end
diff --git a/spec/controllers/projects/work_items_controller_spec.rb b/spec/controllers/projects/work_items_controller_spec.rb
index e0f61a4977b..fc5ac77c96c 100644
--- a/spec/controllers/projects/work_items_controller_spec.rb
+++ b/spec/controllers/projects/work_items_controller_spec.rb
@@ -34,10 +34,10 @@ RSpec.describe Projects::WorkItemsController, feature_category: :team_planning d
end
end
- describe 'GET index' do
+ describe 'GET show' do
specify do
expect(
- get(:index, params: { namespace_id: project.namespace, project_id: project, work_items_path: work_item.id })
+ get(:show, params: { namespace_id: project.namespace, project_id: project, iid: work_item.iid })
).to have_request_urgency(:low)
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 7d7bebb7106..dea359e8fee 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -1368,7 +1368,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
it 'renders json in a correct format' do
post :preview_markdown, params: { namespace_id: public_project.namespace, id: public_project, text: '*Markdown* text' }
- expect(json_response.keys).to match_array(%w(body references))
+ expect(json_response.keys).to match_array(%w[body references])
end
context 'when not authorized' do
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
deleted file mode 100644
index 0bac52c8dca..00000000000
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ /dev/null
@@ -1,119 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Registrations::WelcomeController, feature_category: :system_access do
- let(:user) { create(:user) }
-
- describe '#show' do
- subject(:show) { get :show }
-
- context 'without a signed in user' do
- it { is_expected.to redirect_to new_user_registration_path }
- end
-
- context 'when setup_for_company is not set' do
- before do
- sign_in(user)
- end
-
- it { is_expected.to render_template(:show) }
-
- render_views
-
- it 'has the expected submission url' do
- show
-
- expect(response.body).to include("action=\"#{users_sign_up_welcome_path}\"")
- end
- end
-
- context 'when setup_for_company is set' do
- before do
- user.update!(setup_for_company: false)
- sign_in(user)
- end
-
- it { is_expected.to redirect_to(dashboard_projects_path) }
- end
-
- context 'when 2FA is required from group' do
- before do
- user = create(:user, require_two_factor_authentication_from_group: true)
- sign_in(user)
- end
-
- it 'does not perform a redirect' do
- expect(subject).not_to redirect_to(profile_two_factor_auth_path)
- end
- end
-
- context 'when welcome step is completed' do
- before do
- user.update!(setup_for_company: true)
- end
-
- context 'when user is confirmed' do
- before do
- sign_in(user)
- end
-
- it { is_expected.to redirect_to dashboard_projects_path }
- end
-
- context 'when user is not confirmed' do
- before do
- stub_application_setting_enum('email_confirmation_setting', 'hard')
-
- sign_in(user)
-
- user.update!(confirmed_at: nil)
- end
-
- it { is_expected.to redirect_to user_session_path }
- end
- end
- end
-
- describe '#update' do
- subject(:update) do
- patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
- end
-
- context 'without a signed in user' do
- it { is_expected.to redirect_to new_user_registration_path }
- end
-
- context 'with a signed in user' do
- before do
- sign_in(user)
- end
-
- it { is_expected.to redirect_to(dashboard_projects_path) }
-
- context 'when the new user already has any accepted group membership' do
- let!(:member1) { create(:group_member, user: user) }
-
- it 'redirects to the group activity page' do
- expect(subject).to redirect_to(activity_group_path(member1.source))
- end
-
- context 'when the new user already has more than 1 accepted group membership' do
- it 'redirects to the most recent membership group activity page' do
- member2 = create(:group_member, user: user)
-
- expect(subject).to redirect_to(activity_group_path(member2.source))
- end
- end
-
- context 'when the member has an orphaned source at the time of the welcome' do
- it 'redirects to the project dashboard page' do
- member1.source.delete
-
- expect(subject).to redirect_to(dashboard_projects_path)
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 9aa8a2ae605..156479c7044 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
subject(:post_create) { post(:create, params: user_params, session: session_params) }
- context '`blocked_pending_approval` state' do
+ context 'with `blocked_pending_approval` state' do
context 'when the `require_admin_approval_after_user_signup` setting is turned on' do
before do
stub_application_setting(require_admin_approval_after_user_signup: true)
@@ -82,7 +82,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
subject
end
- context 'email confirmation' do
+ context 'for email confirmation' do
context 'when email confirmation setting is set to `hard`' do
before do
stub_application_setting_enum('email_confirmation_setting', 'hard')
@@ -95,7 +95,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'audit events' do
+ context 'with audit events' do
context 'when not licensed' do
before do
stub_licensed_features(admin_audit_log: false)
@@ -129,7 +129,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
subject
end
- context 'email confirmation' do
+ context 'with email confirmation' do
context 'when email confirmation setting is set to `hard`' do
before do
stub_application_setting_enum('email_confirmation_setting', 'hard')
@@ -145,7 +145,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'private profile' do
+ context 'with private profile' do
context 'when the `user_defaults_to_private_profile` setting is turned on' do
before do
stub_application_setting(user_defaults_to_private_profile: true)
@@ -160,7 +160,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'email confirmation' do
+ context 'with email confirmation' do
before do
stub_feature_flags(identity_verification: false)
end
@@ -209,7 +209,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
context 'when member exists from the session key value' do
it 'tracks the invite acceptance' do
- subject
+ post_create
expect_snowplow_event(
category: 'RegistrationsController',
@@ -299,7 +299,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
it 'authenticates the user and sends a confirmation email' do
expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
expect(controller.current_user).to be_present
- expect(response).to redirect_to(users_sign_up_welcome_path)
+ expect(response).to redirect_to(dashboard_projects_path)
end
it 'does not track an almost there redirect' do
@@ -312,6 +312,11 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
)
end
+ it_behaves_like Onboarding::Redirectable do
+ let(:email) { user_params.dig(:user, :email) }
+ let(:session_params) { { invite_email: email } }
+ end
+
context 'when invite email matches email used on registration' do
let(:session_params) { { invite_email: user_params.dig(:user, :email) } }
@@ -375,10 +380,10 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- it 'redirects to the welcome page when the reCAPTCHA is solved' do
+ it 'redirects to the dashboard projects page when the reCAPTCHA is solved' do
subject
- expect(response).to redirect_to(users_sign_up_welcome_path)
+ expect(response).to redirect_to(dashboard_projects_path)
end
end
@@ -430,7 +435,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
describe 'timestamp spam detection' do
let(:auth_log_message) { 'Invisible_Captcha_Timestamp_Request' }
- context 'the sign up form has been submitted without the invisible_captcha_timestamp parameter' do
+ context 'when the sign up form has been submitted without the invisible_captcha_timestamp parameter' do
let(:session_params) { nil }
it 'logs the request, refuses to create an account and displays a flash alert' do
@@ -446,7 +451,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'the sign up form has been submitted too quickly' do
+ context 'when the sign up form has been submitted too quickly' do
let(:submit_time) { form_rendered_time }
it 'logs the request, refuses to create an account and displays a flash alert' do
@@ -464,7 +469,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'terms of service' do
+ context 'with terms of service' do
context 'when terms are enforced' do
before do
enforce_terms
@@ -674,7 +679,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
expect(response).to redirect_to new_user_session_path
end
- context 'user requires password confirmation' do
+ context 'when user requires password confirmation' do
it 'fails if password confirmation is not provided' do
post :destroy
@@ -694,7 +699,7 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'user does not require password confirmation' do
+ context 'when user does not require password confirmation' do
before do
stub_application_setting(password_authentication_enabled_for_web: false)
stub_application_setting(password_authentication_enabled_for_git: false)
@@ -719,8 +724,8 @@ RSpec.describe RegistrationsController, feature_category: :user_profile do
end
end
- context 'prerequisites for account deletion' do
- context 'solo-owned groups' do
+ context 'for prerequisites for account deletion' do
+ context 'with solo-owned groups' do
let(:group) { create(:group) }
context 'if the user is the sole owner of at least one group' do
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 9771141a955..94aedf463e9 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -551,7 +551,7 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(payload[:metadata]['meta.search.force_search_results']).to eq('true')
expect(payload[:metadata]['meta.search.filters.confidential']).to eq('true')
expect(payload[:metadata]['meta.search.filters.state']).to eq('true')
- expect(payload[:metadata]['meta.search.project_ids']).to eq(%w(456 789))
+ expect(payload[:metadata]['meta.search.project_ids']).to eq(%w[456 789])
expect(payload[:metadata]['meta.search.type']).to eq('basic')
expect(payload[:metadata]['meta.search.level']).to eq('global')
expect(payload[:metadata]['meta.search.filters.language']).to eq('ruby')
@@ -562,7 +562,7 @@ RSpec.describe SearchController, feature_category: :global_search do
search: 'hello world',
group_id: '123',
project_id: '456',
- project_ids: %w(456 789),
+ project_ids: %w[456 789],
confidential: true,
include_archived: true,
state: true,
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index ce9703753cf..85248b6fa6c 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -195,10 +195,6 @@ RSpec.describe SessionsController, feature_category: :system_access do
end
context 'with reCAPTCHA' do
- before do
- stub_feature_flags(arkose_labs_login_challenge: false)
- end
-
def unsuccesful_login(user_params, sesion_params: {})
# Without this, `verify_recaptcha` arbitrarily returns true in test env
Recaptcha.configuration.skip_verify_env.delete('test')
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 18b2d3b14ec..b33687df2ef 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -452,7 +452,7 @@ RSpec.describe SnippetsController do
post :preview_markdown, params: { id: public_snippet, text: '*Markdown* text' }
- expect(json_response.keys).to match_array(%w(body references))
+ expect(json_response.keys).to match_array(%w[body references])
end
end
end
diff --git a/spec/db/development/create_work_item_related_link_restrictions_spec.rb b/spec/db/development/create_work_item_related_link_restrictions_spec.rb
new file mode 100644
index 00000000000..5a7ade3c83e
--- /dev/null
+++ b/spec/db/development/create_work_item_related_link_restrictions_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create work item related links restrictions in development', feature_category: :portfolio_management do
+ subject { load Rails.root.join('db/fixtures/development/51_create_work_item_related_link_restrictions.rb') }
+
+ it_behaves_like 'work item related links restrictions importer'
+end
diff --git a/spec/db/production/create_work_item_related_link_restrictions_spec.rb b/spec/db/production/create_work_item_related_link_restrictions_spec.rb
new file mode 100644
index 00000000000..e3147593327
--- /dev/null
+++ b/spec/db/production/create_work_item_related_link_restrictions_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create work item related links restrictions in production', feature_category: :portfolio_management do
+ subject { load Rails.root.join('db/fixtures/production/040_create_work_item_related_link_restrictions.rb') }
+
+ it_behaves_like 'work item related links restrictions importer'
+end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index cfd6bbf3094..ac1137e5387 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Database schema', feature_category: :database do
approvers: %w[target_id user_id],
analytics_cycle_analytics_aggregations: %w[last_full_issues_id last_full_merge_requests_id last_incremental_issues_id last_full_run_issues_id last_full_run_merge_requests_id last_incremental_merge_requests_id last_consistency_check_issues_stage_event_hash_id last_consistency_check_issues_issuable_id last_consistency_check_merge_requests_stage_event_hash_id last_consistency_check_merge_requests_issuable_id],
analytics_cycle_analytics_merge_request_stage_events: %w[author_id group_id merge_request_id milestone_id project_id stage_event_hash_id state_id],
- analytics_cycle_analytics_issue_stage_events: %w[author_id group_id issue_id milestone_id project_id stage_event_hash_id state_id],
+ analytics_cycle_analytics_issue_stage_events: %w[author_id group_id issue_id milestone_id project_id stage_event_hash_id state_id sprint_id],
audit_events: %w[author_id entity_id target_id],
award_emoji: %w[awardable_id user_id],
aws_roles: %w[role_external_id],
@@ -44,7 +44,7 @@ RSpec.describe 'Database schema', feature_category: :database do
broadcast_messages: %w[namespace_id],
chat_names: %w[chat_id team_id user_id],
chat_teams: %w[team_id],
- ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id],
+ ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
@@ -87,8 +87,9 @@ RSpec.describe 'Database schema', feature_category: :database do
oauth_access_grants: %w[resource_owner_id application_id],
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
- p_ci_builds: %w[erased_by_id trigger_request_id partition_id],
+ p_ci_builds: %w[erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
p_batched_git_ref_updates_deletions: %w[project_id partition_id],
+ p_ci_finished_build_ch_sync_events: %w[build_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
project_data_transfers: %w[project_id namespace_id],
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 8a65c219f5d..00370a5b7e3 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ApplicationExperiment, :experiment, feature_category: :experimentation_conversion do
+RSpec.describe ApplicationExperiment, :experiment, feature_category: :acquisition do
subject(:application_experiment) { described_class.new('namespaced/stub', **context) }
let(:context) { {} }
diff --git a/spec/experiments/ios_specific_templates_experiment_spec.rb b/spec/experiments/ios_specific_templates_experiment_spec.rb
index 4d02381dbde..909ac22b97b 100644
--- a/spec/experiments/ios_specific_templates_experiment_spec.rb
+++ b/spec/experiments/ios_specific_templates_experiment_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe IosSpecificTemplatesExperiment do
let_it_be(:project) { create(:project, :auto_devops_disabled) }
let!(:project_setting) { create(:project_setting, project: project, target_platforms: target_platforms) }
- let(:target_platforms) { %w(ios) }
+ let(:target_platforms) { %w[ios] }
before do
stub_experiments(ios_specific_templates: :candidate)
diff --git a/spec/factories/achievements/user_achievements.rb b/spec/factories/achievements/user_achievements.rb
index a5fd1df38dd..880fa8e0947 100644
--- a/spec/factories/achievements/user_achievements.rb
+++ b/spec/factories/achievements/user_achievements.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
user
achievement
awarded_by_user factory: :user
+ priority { nil }
trait :revoked do
revoked_by_user factory: :user
diff --git a/spec/factories/bulk_import.rb b/spec/factories/bulk_import.rb
index 54d05264269..097bec43543 100644
--- a/spec/factories/bulk_import.rb
+++ b/spec/factories/bulk_import.rb
@@ -22,5 +22,9 @@ FactoryBot.define do
trait :failed do
status { -1 }
end
+
+ trait :timeout do
+ status { 3 }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 7325ab30989..867db96aaaf 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -30,6 +30,15 @@ FactoryBot.define do
ref { pipeline.ref }
+ runner_manager { nil }
+
+ after(:build) do |build, evaluator|
+ if evaluator.runner_manager
+ build.runner = evaluator.runner_manager.runner
+ create(:ci_runner_machine_build, build: build, runner_manager: evaluator.runner_manager)
+ end
+ end
+
trait :with_token do
transient do
generate_token { true }
@@ -432,8 +441,8 @@ FactoryBot.define do
services: ['postgres',
{ name: 'docker:stable-dind', entrypoint: '/bin/sh', command: 'sleep 30', alias: 'docker' },
{ name: 'mysql:latest', variables: { MYSQL_ROOT_PASSWORD: 'root123.' } }],
- script: %w(echo),
- after_script: %w(ls date),
+ script: %w[echo],
+ after_script: %w[ls date],
hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] },
artifacts: {
name: 'artifacts_file',
diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb
index c57a2dd479f..202c2789b45 100644
--- a/spec/factories/ci/reports/security/findings.rb
+++ b/spec/factories/ci/reports/security/findings.rb
@@ -10,6 +10,7 @@ FactoryBot.define do
metadata_version { 'sast:1.0' }
name { 'Cipher with no integrity' }
report_type { :sast }
+ cvss { [{ vendor: "GitLab", vector_string: "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:L/I:L/A:N" }] }
original_data do
{
description: "The cipher does not provide data integrity update 1",
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index f001cecd28e..2d67a4c0e80 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -83,7 +83,7 @@ FactoryBot.define do
trait :tagged_only do
run_untagged { false }
- tag_list { %w(tag1 tag2) }
+ tag_list { %w[tag1 tag2] }
end
trait :locked do
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 2785a8c9946..d1e7a9fac95 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -83,7 +83,7 @@ FactoryBot.define do
end
trait :with_installed_prometheus do
- integration_prometheus factory: %i(clusters_integrations_prometheus)
+ integration_prometheus factory: %i[clusters_integrations_prometheus]
end
trait :with_domain do
@@ -92,7 +92,7 @@ FactoryBot.define do
trait :with_environments do
transient do
- environments { %i(staging production) }
+ environments { %i[staging production] }
end
cluster_type { Clusters::Cluster.cluster_types[:project_type] }
diff --git a/spec/factories/clusters/integrations/prometheus.rb b/spec/factories/clusters/integrations/prometheus.rb
index 1f0bb1ed512..5e8e770e0ed 100644
--- a/spec/factories/clusters/integrations/prometheus.rb
+++ b/spec/factories/clusters/integrations/prometheus.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :clusters_integrations_prometheus, class: 'Clusters::Integrations::Prometheus' do
- cluster factory: %i(cluster provided_by_gcp)
+ cluster factory: %i[cluster provided_by_gcp]
enabled { true }
trait :disabled do
diff --git a/spec/factories/clusters/providers/aws.rb b/spec/factories/clusters/providers/aws.rb
index 497181de89a..b529eea1458 100644
--- a/spec/factories/clusters/providers/aws.rb
+++ b/spec/factories/clusters/providers/aws.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
kubernetes_version { '1.16' }
role_arn { 'arn:aws:iam::123456789012:role/role-name' }
vpc_id { 'vpc-00000000000000000' }
- subnet_ids { %w(subnet-00000000000000000 subnet-11111111111111111) }
+ subnet_ids { %w[subnet-00000000000000000 subnet-11111111111111111] }
security_group_id { 'sg-00000000000000000' }
key_name { 'user' }
diff --git a/spec/factories/container_registry/protection/rules.rb b/spec/factories/container_registry/protection/rules.rb
new file mode 100644
index 00000000000..cbd5c9d8652
--- /dev/null
+++ b/spec/factories/container_registry/protection/rules.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :container_registry_protection_rule, class: 'ContainerRegistry::Protection::Rule' do
+ project
+ container_path_pattern { '@my_scope/my_container' }
+ delete_protected_up_to_access_level { :developer }
+ push_protected_up_to_access_level { :developer }
+ end
+end
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index cbecaadff77..7d80ab7b15d 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -29,11 +29,11 @@ FactoryBot.define do
end
trait :on_cluster do
- deployment_cluster factory: %i(deployment_cluster provided_by_gcp)
+ deployment_cluster factory: %i[deployment_cluster provided_by_gcp]
end
trait :on_cluster_not_managed do
- deployment_cluster factory: %i(deployment_cluster not_managed)
+ deployment_cluster factory: %i[deployment_cluster not_managed]
end
trait :running do
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 2df9f482bb9..6f2cd4bf596 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -15,6 +15,10 @@ FactoryBot.define do
state { :stopped }
end
+ trait :stopping do
+ state { :stopping }
+ end
+
trait :production do
name { 'production' }
end
diff --git a/spec/factories/group_members.rb b/spec/factories/group_members.rb
index e1841745cb4..4cc927a6520 100644
--- a/spec/factories/group_members.rb
+++ b/spec/factories/group_members.rb
@@ -53,20 +53,5 @@ FactoryBot.define do
member.update!(state: ::Member::STATE_ACTIVE)
end
end
-
- transient do
- tasks_to_be_done { [] }
- end
-
- after(:build) do |group_member, evaluator|
- if evaluator.tasks_to_be_done.present?
- build(
- :member_task,
- member: group_member,
- project: build(:project, namespace: group_member.source),
- tasks_to_be_done: evaluator.tasks_to_be_done
- )
- end
- end
end
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index b74b81d4db9..68751e68f05 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -25,6 +25,8 @@ FactoryBot.define do
factory :datadog_integration, class: 'Integrations::Datadog' do
project
active { true }
+ datadog_site { 'datadoghq.com' }
+ datadog_tags { 'key:value' }
api_key { 'secret' }
end
@@ -34,13 +36,10 @@ FactoryBot.define do
active { true }
push_events { true }
tag_push_events { true }
- properties do
- {
- recipients: 'test@example.com',
- disable_diffs: true,
- send_from_committer_email: true
- }
- end
+ recipients { 'foo@bar.com' }
+ disable_diffs { true }
+ send_from_committer_email { true }
+ branches_to_be_notified { 'all' }
end
factory :gitlab_slack_application_integration, class: 'Integrations::GitlabSlackApplication' do
@@ -70,24 +69,18 @@ FactoryBot.define do
project
type { 'Integrations::Packagist' }
active { true }
- properties do
- {
- username: 'username',
- token: 'test',
- server: 'https://packagist.example.com'
- }
- end
+ username { 'username' }
+ token { 'secrettoken' }
+ server { 'https://packagist.example.comp' }
end
factory :prometheus_integration, class: 'Integrations::Prometheus' do
project
active { true }
- properties do
- {
- api_url: 'https://prometheus.example.com/',
- manual_configuration: true
- }
- end
+ api_url { 'https://prometheus.example.com/' }
+ manual_configuration { true }
+ google_iap_audience_client_id { 'IAP_CLIENT_ID.apps.googleusercontent.com' }
+ google_iap_service_account_json { '{ type: "service_account", project_id: "123" }' }
end
factory :bamboo_integration, class: 'Integrations::Bamboo' do
@@ -103,6 +96,7 @@ FactoryBot.define do
project
active { true }
drone_url { 'https://drone.example.com' }
+ enable_ssl_verification { false }
token { 'test' }
end
@@ -110,14 +104,14 @@ FactoryBot.define do
project
active { true }
type { 'Integrations::Jira' }
+ url { 'https://jira.example.com' }
+ api_url { '' }
+ username { 'jira_username' }
+ password { 'jira_password' }
+ jira_auth_type { 0 }
transient do
create_data { true }
- url { 'https://jira.example.com' }
- api_url { '' }
- username { 'jira_username' }
- password { 'jira_password' }
- jira_auth_type { 0 }
jira_issue_transition_automatic { false }
jira_issue_transition_id { '56-1' }
issues_enabled { false }
@@ -130,6 +124,8 @@ FactoryBot.define do
end
after(:build) do |integration, evaluator|
+ integration.instance_variable_set(:@old_data_fields, nil)
+
if evaluator.create_data
integration.jira_tracker_data = build(:jira_tracker_data,
integration: integration, url: evaluator.url, api_url: evaluator.api_url,
@@ -199,7 +195,8 @@ FactoryBot.define do
factory :youtrack_integration, class: 'Integrations::Youtrack' do
project
active { true }
- issue_tracker
+ project_url { 'http://issuetracker.example.com' }
+ issues_url { 'http://issues.example.com/issues/:id' }
end
factory :ewm_integration, class: 'Integrations::Ewm' do
@@ -211,15 +208,17 @@ FactoryBot.define do
factory :clickup_integration, class: 'Integrations::Clickup' do
project
active { true }
- issue_tracker
+ project_url { 'http://issuetracker.example.com' }
+ issues_url { 'http://issues.example.com/issues/:id' }
end
trait :issue_tracker do
+ project_url { 'http://issuetracker.example.com' }
+ issues_url { 'http://issues.example.com/issues/:id' }
+ new_issue_url { 'http://new-issue.example.com' }
+
transient do
create_data { true }
- project_url { 'http://issuetracker.example.com' }
- issues_url { 'http://issues.example.com/issues/:id' }
- new_issue_url { 'http://new-issue.example.com' }
end
after(:build) do |integration, evaluator|
@@ -248,6 +247,15 @@ FactoryBot.define do
trait :chat_notification do
sequence(:webhook) { |n| "https://example.com/webhook/#{n}" }
+ push_events { false }
+ issues_events { false }
+ confidential_issues_events { false }
+ merge_requests_events { false }
+ note_events { false }
+ confidential_note_events { false }
+ tag_push_events { false }
+ pipeline_events { false }
+ wiki_page_events { false }
end
trait :inactive do
@@ -265,6 +273,116 @@ FactoryBot.define do
chat_notification
project
type { 'Integrations::Mattermost' }
+ labels_to_be_notified_behavior { 'match_any' }
+ active { true }
+ end
+
+ factory :microsoft_teams_integration, class: 'Integrations::MicrosoftTeams' do
+ chat_notification
+ project
+ type { 'Integrations::MicrosoftTeams' }
+ active { true }
+ end
+
+ factory :asana_integration, class: 'Integrations::Asana' do
+ project
+ api_key { 'secrettoken' }
+ active { true }
+ end
+
+ factory :assembla_integration, class: 'Integrations::Assembla' do
+ project
+ token { 'secrettoken' }
+ active { true }
+ end
+
+ factory :buildkite_integration, class: 'Integrations::Buildkite' do
+ project
+ token { 'secrettoken' }
+ project_url { 'http://example.com' }
+ active { true }
+ end
+
+ factory :campfire_integration, class: 'Integrations::Campfire' do
+ project
+ active { true }
+ room { '1234' }
+ token { 'test' }
+ end
+
+ factory :hangouts_chat_integration, class: 'Integrations::HangoutsChat' do
+ chat_notification
+ project
+ type { 'Integrations::HangoutsChat' }
+ active { true }
+ end
+
+ factory :irker_integration, class: 'Integrations::Irker' do
+ project
+ recipients { 'irc://irc.network.net:666/#channel' }
+ server_port { 1234 }
+ type { 'Integrations::Irker' }
+ active { true }
+ end
+
+ factory :mattermost_slash_commands_integration, class: 'Integrations::MattermostSlashCommands' do
+ project
+ token { 'secrettoken' }
+ active { true }
+ end
+
+ factory :mock_ci_integration, class: 'Integrations::MockCi' do
+ project
+ mock_service_url { 'http://example.com' }
+ type { 'Integrations::MockCi' }
+ active { true }
+ end
+
+ factory :mock_monitoring_integration, class: 'Integrations::MockMonitoring' do
+ project
+ type { 'Integrations::MockMonitoring' }
+ active { true }
+ end
+
+ factory :pumble_integration, class: 'Integrations::Pumble' do
+ project
+ chat_notification
+ type { 'Integrations::Pumble' }
+ active { true }
+ end
+
+ factory :pushover_integration, class: 'Integrations::Pushover' do
+ project
+ type { 'Integrations::Pushover' }
+ api_key { 'secrettoken' }
+ user_key { 'secretkey' }
+ priority { "0" }
+ active { true }
+ device { nil }
+ sound { nil }
+ end
+
+ factory :teamcity_integration, class: 'Integrations::Teamcity' do
+ project
+ teamcity_url { 'http://example.com' }
+ username { 'username' }
+ password { 'secrettoken' }
+ build_type { '123' }
+ type { 'Integrations::Teamcity' }
+ active { true }
+ end
+
+ factory :unify_circuit_integration, class: 'Integrations::UnifyCircuit' do
+ project
+ chat_notification
+ type { 'Integrations::UnifyCircuit' }
+ active { true }
+ end
+
+ factory :webex_teams_integration, class: 'Integrations::WebexTeams' do
+ project
+ chat_notification
+ type { 'Integrations::WebexTeams' }
active { true }
end
@@ -279,6 +397,7 @@ FactoryBot.define do
factory :slack_slash_commands_integration, class: 'Integrations::SlackSlashCommands' do
project
active { true }
+ token { 'secrettoken' }
type { 'Integrations::SlackSlashCommands' }
end
diff --git a/spec/factories/member_tasks.rb b/spec/factories/member_tasks.rb
deleted file mode 100644
index 133ccce5f8a..00000000000
--- a/spec/factories/member_tasks.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :member_task do
- member { association(:group_member, :invited) }
- project { association(:project, namespace: member.source) }
- tasks_to_be_done { [:ci, :code] }
- end
-end
diff --git a/spec/factories/ml/candidate_metrics.rb b/spec/factories/ml/candidate_metrics.rb
index 28e3974d39f..633234e5962 100644
--- a/spec/factories/ml/candidate_metrics.rb
+++ b/spec/factories/ml/candidate_metrics.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
sequence(:name) { |n| "metric#{n}" }
value { 2.0 }
- step { 1 }
+ step { 0 }
tracked_at { 1234 }
end
end
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index b1e7866f9ce..fcba413e802 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -18,7 +18,8 @@ FactoryBot.define do
factory :note_on_personal_snippet, traits: [:on_personal_snippet]
factory :note_on_design, traits: [:on_design]
factory :note_on_alert, traits: [:on_alert]
- factory :system_note, traits: [:system]
+ factory :note_on_abuse_report, traits: [:on_abuse_report]
+ factory :system_note, traits: [:system]
factory :discussion_note, class: 'DiscussionNote'
@@ -39,6 +40,8 @@ FactoryBot.define do
factory :discussion_note_on_project_snippet, traits: [:on_project_snippet], class: 'DiscussionNote'
+ factory :discussion_note_on_abuse_report, traits: [:on_abuse_report], class: 'DiscussionNote'
+
factory :legacy_diff_note_on_commit, traits: [:on_commit, :legacy_diff_note], class: 'LegacyDiffNote'
factory :legacy_diff_note_on_merge_request, traits: [:on_merge_request, :legacy_diff_note], class: 'LegacyDiffNote' do
@@ -166,6 +169,11 @@ FactoryBot.define do
noteable { association(:alert_management_alert, project: project) }
end
+ trait :on_abuse_report do
+ noteable { association(:abuse_report) }
+ project { nil }
+ end
+
trait :resolved do
resolved_at { Time.now }
resolved_by { association(:user) }
diff --git a/spec/factories/packages/package_files.rb b/spec/factories/packages/package_files.rb
index 4a2d412832c..61405e9c04c 100644
--- a/spec/factories/packages/package_files.rb
+++ b/spec/factories/packages/package_files.rb
@@ -284,7 +284,7 @@ FactoryBot.define do
end
trait(:nuget) do
- package
+ package { association(:nuget_package, without_package_files: true) }
file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
file_name { 'package.nupkg' }
file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
@@ -292,7 +292,7 @@ FactoryBot.define do
end
trait(:snupkg) do
- package
+ package { association(:nuget_package) }
file_fixture { 'spec/fixtures/packages/nuget/package.snupkg' }
file_name { 'package.snupkg' }
file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb
index caec7580e46..7896cfd8bb8 100644
--- a/spec/factories/packages/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -199,8 +199,14 @@ FactoryBot.define do
sequence(:version) { |n| "1.0.#{n}" }
package_type { :nuget }
- after :create do |package|
- create :package_file, :nuget, package: package, file_name: "#{package.name}.#{package.version}.nupkg"
+ transient do
+ without_package_files { false }
+ end
+
+ after :create do |package, evaluator|
+ unless evaluator.without_package_files
+ create :package_file, :nuget, package: package, file_name: "#{package.name}.#{package.version}.nupkg"
+ end
end
trait(:with_metadatum) do
@@ -214,6 +220,12 @@ FactoryBot.define do
create :package_file, :snupkg, package: package, file_name: "#{package.name}.#{package.version}.snupkg"
end
end
+
+ trait :with_build do
+ after :create do |package|
+ create(:package_build_info, package: package)
+ end
+ end
end
factory :pypi_package do
diff --git a/spec/factories/packages/package_protection_rules.rb b/spec/factories/packages/protection/rules.rb
index 3038fb847e7..f65a9d3e64d 100644
--- a/spec/factories/packages/package_protection_rules.rb
+++ b/spec/factories/packages/protection/rules.rb
@@ -5,6 +5,6 @@ FactoryBot.define do
project
package_name_pattern { '@my_scope/my_package' }
package_type { :npm }
- push_protected_up_to_access_level { Gitlab::Access::DEVELOPER }
+ push_protected_up_to_access_level { :developer }
end
end
diff --git a/spec/factories/pages_deployments.rb b/spec/factories/pages_deployments.rb
index d3e2fefb4ae..eaa3a68770f 100644
--- a/spec/factories/pages_deployments.rb
+++ b/spec/factories/pages_deployments.rb
@@ -8,10 +8,6 @@ FactoryBot.define do
filename { nil }
end
- trait(:migrated) do
- filename { PagesDeployment::MIGRATED_FILE_NAME }
- end
-
after(:build) do |deployment, evaluator|
file = UploadedFile.new("spec/fixtures/pages.zip", filename: evaluator.filename)
diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb
index fb62b2ed951..ccc3eb36c54 100644
--- a/spec/factories/project_members.rb
+++ b/spec/factories/project_members.rb
@@ -43,15 +43,5 @@ FactoryBot.define do
member.update!(state: ::Member::STATE_ACTIVE)
end
end
-
- transient do
- tasks_to_be_done { [] }
- end
-
- after(:build) do |project_member, evaluator|
- if evaluator.tasks_to_be_done.present?
- build(:member_task, member: project_member, project: project_member.source, tasks_to_be_done: evaluator.tasks_to_be_done)
- end
- end
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index d61d5cc2d78..de2b5159fe7 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -48,6 +48,15 @@ FactoryBot.define do
after(:build) { |user, _| user.ban! }
end
+ trait :trusted do
+ after(:create) do |user, _|
+ user.custom_attributes.create!(
+ key: UserCustomAttribute::TRUSTED_BY,
+ value: "placeholder"
+ )
+ end
+ end
+
trait :ldap_blocked do
after(:build) { |user, _| user.ldap_block! }
end
diff --git a/spec/factories/users/credit_card_validations.rb b/spec/factories/users/credit_card_validations.rb
index 509e86e7bd3..fac53a54c12 100644
--- a/spec/factories/users/credit_card_validations.rb
+++ b/spec/factories/users/credit_card_validations.rb
@@ -4,7 +4,7 @@ FactoryBot.define do
factory :credit_card_validation, class: 'Users::CreditCardValidation' do
user
sequence(:credit_card_validated_at) { |n| Time.current + n }
- expiration_date { 1.year.from_now.end_of_month }
+ expiration_date { 1.year.from_now.to_date }
last_digits { 10 }
holder_name { 'John Smith' }
network { 'AmericanExpress' }
diff --git a/spec/factories/users/in_product_marketing_email.rb b/spec/factories/users/in_product_marketing_email.rb
index 42309319bf3..c86c469ff31 100644
--- a/spec/factories/users/in_product_marketing_email.rb
+++ b/spec/factories/users/in_product_marketing_email.rb
@@ -6,11 +6,5 @@ FactoryBot.define do
track { 'create' }
series { 0 }
-
- trait :campaign do
- track { nil }
- series { nil }
- campaign { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
- end
end
end
diff --git a/spec/factories/vs_code/settings/vs_code_settings.rb b/spec/factories/vs_code/settings/vs_code_settings.rb
new file mode 100644
index 00000000000..d3ffca84580
--- /dev/null
+++ b/spec/factories/vs_code/settings/vs_code_settings.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :vscode_setting, class: 'VsCode::Settings::VsCodeSetting' do
+ user
+
+ setting_type { 'settings' }
+ content { '{}' }
+ uuid { SecureRandom.uuid }
+ version { 1 }
+ end
+end
diff --git a/spec/factories/work_items/related_link_restrictions.rb b/spec/factories/work_items/related_link_restrictions.rb
new file mode 100644
index 00000000000..c0e4f188b5f
--- /dev/null
+++ b/spec/factories/work_items/related_link_restrictions.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :related_link_restriction, class: 'WorkItems::RelatedLinkRestriction' do
+ source_type { association :work_item_type, :default }
+ target_type { association :work_item_type, :default }
+ link_type { 0 }
+
+ initialize_with do
+ WorkItems::RelatedLinkRestriction
+ .find_or_initialize_by(source_type: source_type, target_type: target_type, link_type: link_type)
+ end
+ end
+end
diff --git a/spec/features/admin/admin_jobs_spec.rb b/spec/features/admin/admin_jobs_spec.rb
index b305bec6493..b125974532b 100644
--- a/spec/features/admin/admin_jobs_spec.rb
+++ b/spec/features/admin/admin_jobs_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Admin Jobs', :js, feature_category: :continuous_integration do
+ include FilteredSearchHelpers
+
before do
admin = create(:admin)
sign_in(admin)
@@ -26,9 +28,9 @@ RSpec.describe 'Admin Jobs', :js, feature_category: :continuous_integration do
expect(page).to have_selector('[data-testid="jobs-all-tab"]')
expect(page.all('[data-testid="jobs-table-row"]').size).to eq(4)
- expect(page).to have_button 'Cancel all jobs'
click_button 'Cancel all jobs'
+
expect(page).to have_button 'Yes, proceed'
expect(page).to have_content 'Are you sure?'
end
@@ -88,5 +90,51 @@ RSpec.describe 'Admin Jobs', :js, feature_category: :continuous_integration do
end
end
end
+
+ context 'jobs table links' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
+ let_it_be(:runner) { create(:ci_runner, :instance) }
+
+ it 'displays correct links' do
+ pipeline = create(:ci_pipeline, project: project)
+ job = create(:ci_build, pipeline: pipeline, status: :success, runner: runner)
+
+ visit admin_jobs_path
+
+ wait_for_requests
+
+ within_testid('jobs-table') do
+ expect(page).to have_link(href: project_job_path(project, job))
+ expect(page).to have_link(href: project_pipeline_path(project, pipeline))
+ expect(find_by_testid('job-project-link')['href']).to include(project_path(project))
+ expect(find_by_testid('job-runner-link')['href']).to include("/admin/runners/#{runner.id}")
+ end
+ end
+ end
+
+ context 'job filtering' do
+ it 'filters jobs by status' do
+ create(:ci_build, pipeline: pipeline, status: :success)
+ create(:ci_build, pipeline: pipeline, status: :failed)
+
+ visit admin_jobs_path
+
+ wait_for_requests
+
+ within_testid('jobs-table') do
+ expect(page).to have_selector('[data-testid="jobs-table-row"]', count: 2)
+ end
+
+ select_tokens 'Status', 'Failed', submit: true, input_text: 'Filter jobs'
+
+ wait_for_requests
+
+ within_testid('jobs-table') do
+ expect(page).to have_selector('[data-testid="jobs-table-row"]', count: 1)
+ expect(find_by_testid('ci-badge-text')).to have_content('Failed')
+ end
+ end
+ end
end
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index e0f4473c80c..9edd970532e 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
expect(current_url).to match(admin_runner_path(runner))
- expect(find("[data-testid='td-status']")).to have_content "running"
+ expect(find("[data-testid='td-status']")).to have_content "Running"
expect(find("[data-testid='td-job']")).to have_content "##{job.id}"
end
@@ -510,7 +510,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
:ci_runner,
description: 'runner-foo',
version: '14.0',
- ip_address: '127.0.0.1',
tag_list: ['tag1']
)
end
@@ -535,8 +534,6 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
aggregate_failures do
expect(page).to have_content 'Description runner-foo'
expect(page).to have_content 'Last contact Never contacted'
- expect(page).to have_content 'Version 14.0'
- expect(page).to have_content 'IP Address 127.0.0.1'
expect(page).to have_content 'Configuration Runs untagged jobs'
expect(page).to have_content 'Maximum job timeout None'
expect(page).to have_content 'Tags tag1'
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index e87f47e5234..1b10ea81333 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
end
it 'change Account and Limit Settings' do
- page.within(find('[data-testid="account-limit"]')) do
+ page.within(find('[data-testid="account-and-limit-settings-content"]')) do
uncheck 'Gravatar enabled'
click_button 'Save changes'
end
@@ -165,43 +165,44 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
expect(page).to have_field('Days of inactivity before deactivation')
end
- it 'changes dormant users', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408224' do
- expect(page).to have_unchecked_field('Deactivate dormant users after a period of inactivity')
+ it 'changes dormant users', :js do
+ expect(page).to have_unchecked_field(_('Deactivate dormant users after a period of inactivity'))
expect(current_settings.deactivate_dormant_users).to be_falsey
- page.within(find('[data-testid="account-limit"]')) do
- check 'application_setting_deactivate_dormant_users'
- click_button 'Save changes'
+ page.within(find('[data-testid="account-and-limit-settings-content"]')) do
+ check _('Deactivate dormant users after a period of inactivity')
+ click_button _('Save changes')
end
- expect(page).to have_content "Application settings saved successfully"
+ expect(page).to have_content _('Application settings saved successfully')
page.refresh
+ expect(page).to have_checked_field(_('Deactivate dormant users after a period of inactivity'))
expect(current_settings.deactivate_dormant_users).to be_truthy
- expect(page).to have_checked_field('Deactivate dormant users after a period of inactivity')
end
- it 'change dormant users period', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408224' do
- expect(page).to have_field _('Days of inactivity before deactivation')
+ it 'change dormant users period', :js do
+ expect(page).to have_field(_('Days of inactivity before deactivation'), disabled: true)
- page.within(find('[data-testid="account-limit"]')) do
- fill_in _('application_setting_deactivate_dormant_users_period'), with: '90'
- click_button 'Save changes'
+ page.within(find('[data-testid="account-and-limit-settings-content"]')) do
+ check _('Deactivate dormant users after a period of inactivity')
+ fill_in _('Days of inactivity before deactivation'), with: '180'
+ click_button _('Save changes')
end
- expect(page).to have_content "Application settings saved successfully"
+ expect(page).to have_content _('Application settings saved successfully')
page.refresh
- expect(page).to have_field _('Days of inactivity before deactivation'), with: '90'
+ expect(page).to have_field(_('Days of inactivity before deactivation'), disabled: false, with: '180')
end
it 'displays dormant users period field validation error', :js do
selector = '#application_setting_deactivate_dormant_users_period_error'
expect(page).not_to have_selector(selector, visible: :visible)
- page.within(find('[data-testid="account-limit"]')) do
+ page.within(find('[data-testid="account-and-limit-settings-content"]')) do
check 'application_setting_deactivate_dormant_users'
fill_in _('application_setting_deactivate_dormant_users_period'), with: '30'
click_button 'Save changes'
@@ -730,6 +731,8 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
fill_in 'Maximum authenticated web requests per rate limit period per user', with: 700
fill_in 'Authenticated web rate limit period in seconds', with: 800
+ fill_in "Maximum authenticated requests to project/:id/jobs per minute", with: 1000
+
fill_in 'Plain-text response to send to clients that hit a rate limit', with: 'Custom message'
click_button 'Save changes'
@@ -750,6 +753,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
throttle_authenticated_web_enabled: true,
throttle_authenticated_web_requests_per_period: 700,
throttle_authenticated_web_period_in_seconds: 800,
+ project_jobs_api_rate_limit: 1000,
rate_limiting_response_text: 'Custom message'
)
end
@@ -883,12 +887,9 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
context 'Preferences page' do
before do
- stub_feature_flags(deactivation_email_additional_text: deactivation_email_additional_text_feature_flag)
visit preferences_admin_application_settings_path
end
- let(:deactivation_email_additional_text_feature_flag) { true }
-
describe 'Email page' do
context 'when deactivation email additional text feature flag is enabled' do
it 'shows deactivation email additional text field' do
@@ -903,14 +904,6 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
expect(current_settings.deactivation_email_additional_text).to eq('So long and thanks for all the fish!')
end
end
-
- context 'when deactivation email additional text feature flag is disabled' do
- let(:deactivation_email_additional_text_feature_flag) { false }
-
- it 'does not show deactivation email additional text field' do
- expect(page).not_to have_field 'Additional text for deactivation email'
- end
- end
end
it 'change Help page' do
diff --git a/spec/features/alert_management/alert_details_spec.rb b/spec/features/alert_management/alert_details_spec.rb
index b377d3a092b..66b7a9ca46c 100644
--- a/spec/features/alert_management/alert_details_spec.rb
+++ b/spec/features/alert_management/alert_details_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Alert details', :js, feature_category: :incident_management do
expect(alert_status).to have_content('Triggered')
find('.gl-button').click
- find('.gl-dropdown-item', text: 'Acknowledged').click
+ find('.gl-new-dropdown-item', text: 'Acknowledged').click
wait_for_requests
diff --git a/spec/features/alert_management/user_updates_alert_status_spec.rb b/spec/features/alert_management/user_updates_alert_status_spec.rb
index 98fd7449c4f..563b5072782 100644
--- a/spec/features/alert_management/user_updates_alert_status_spec.rb
+++ b/spec/features/alert_management/user_updates_alert_status_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'User updates Alert Management status', :js, feature_category: :i
it 'updates the alert status' do
find('.dropdown-menu-selectable').click
- find('.dropdown-item', text: 'Acknowledged').click
+ find('.gl-new-dropdown-item', text: 'Acknowledged').click
wait_for_requests
expect(find('.dropdown-menu-selectable')).to have_content('Acknowledged')
diff --git a/spec/features/boards/sidebar_labels_in_namespaces_spec.rb b/spec/features/boards/sidebar_labels_in_namespaces_spec.rb
index ffed4a0854f..68c2b2587e7 100644
--- a/spec/features/boards/sidebar_labels_in_namespaces_spec.rb
+++ b/spec/features/boards/sidebar_labels_in_namespaces_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Issue boards sidebar labels select', :js, feature_category: :tea
include_context 'labels from nested groups and projects'
- let(:card) { find('.board:nth-child(1)').first('[data-testid="board_card"]') }
+ let(:card) { find('.board:nth-child(1)').first('[data-testid="board-card"]') }
context 'group boards' do
context 'in the top-level group board' do
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 358da1e1279..71cc9a28575 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Project issue boards sidebar', :js, feature_category: :team_plan
it_behaves_like 'issue boards sidebar'
def first_card
- find('.board:nth-child(1)').first("[data-testid='board_card']")
+ find('.board:nth-child(1)').first("[data-testid='board-card']")
end
def click_first_issue_card
diff --git a/spec/features/boards/user_visits_board_spec.rb b/spec/features/boards/user_visits_board_spec.rb
index 5867ec17070..4741f58d883 100644
--- a/spec/features/boards/user_visits_board_spec.rb
+++ b/spec/features/boards/user_visits_board_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe 'User visits issue boards', :js, feature_category: :team_planning
it 'displays all issues satisfiying filter params and correctly sets url params' do
expect(page).to have_current_path(board_path)
- page.assert_selector('[data-testid="board_card"]', count: expected_issues.length)
+ page.assert_selector('[data-testid="board-card"]', count: expected_issues.length)
expected_issues.each { |issue_title| expect(page).to have_link issue_title }
end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index b72e08b854e..5f880af37dc 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe 'Commits', feature_category: :source_code_management do
it 'shows correct build status from default branch' do
page.within("//li[@id='commit-#{pipeline.short_sha}']") do
- expect(page).to have_css('.ci-status-link')
+ expect(page).to have_css("[data-testid='ci-status-badge-legacy']")
expect(page).to have_css('.ci-status-icon-success')
end
end
@@ -115,7 +115,7 @@ RSpec.describe 'Commits', feature_category: :source_code_management do
it 'cancels commit', :js, :sidekiq_might_not_need_inline do
visit pipeline_path(pipeline)
click_on 'Cancel pipeline'
- expect(page).to have_content 'canceled'
+ expect(page).to have_content 'Canceled'
end
end
@@ -123,7 +123,7 @@ RSpec.describe 'Commits', feature_category: :source_code_management do
it 'cancels build', :js, :sidekiq_might_not_need_inline do
visit pipeline_path(pipeline)
find('[data-testid="cancel-pipeline"]').click
- expect(page).to have_content 'canceled'
+ expect(page).to have_content 'Canceled'
end
end
end
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 4fe05abd73b..a2551c72877 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
let_it_be(:stage_table_duration_column_header_selector) { '[data-testid="vsa-stage-header-duration"]' }
let_it_be(:metrics_selector) { "[data-testid='vsa-metrics']" }
let_it_be(:metric_value_selector) { "[data-testid='displayValue']" }
+ let_it_be(:predefined_date_ranges_dropdown_selector) { '[data-testid="vsa-predefined-date-ranges-dropdown"]' }
let(:stage_table) { find(stage_table_selector) }
let(:project) { create(:project, :repository) }
@@ -92,6 +93,43 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
let(:stage_table_events) { stage_table.all(stage_table_event_selector) }
+ shared_examples 'filters the issues by date' do
+ it 'can filter the issues by date' do
+ expect(page).to have_selector(stage_table_event_selector)
+
+ set_daterange(from, to)
+
+ expect(page).not_to have_selector(stage_table_event_selector)
+ expect(page).not_to have_selector(stage_table_pagination_selector)
+ end
+ end
+
+ shared_examples 'filters the metrics by date' do
+ it 'can filter the metrics by date' do
+ expect(metrics_values).to match_array(%w[21 2 1])
+
+ set_daterange(from, to)
+
+ expect(metrics_values).to eq(['-'] * 3)
+ end
+ end
+
+ shared_examples 'navigates directly to a value stream stream stage with filters applied' do
+ before do
+ visit project_cycle_analytics_path(project, created_before: '2019-12-31', created_after: '2019-11-01', stage_id: 'code', milestone_title: milestone.title)
+ wait_for_requests
+ end
+
+ it 'can navigate directly to a value stream stream stage with filters applied' do
+ expect(page).to have_selector('.gl-path-active-item-indigo', text: 'Code')
+ expect(page.find(".js-daterange-picker-from input").value).to eq("2019-11-01")
+ expect(page.find(".js-daterange-picker-to input").value).to eq("2019-12-31")
+
+ filter_bar = page.find(stage_filter_bar)
+ expect(filter_bar.find(".gl-filtered-search-token-data-content").text).to eq("%#{milestone.title}")
+ end
+ end
+
it 'displays metrics' do
metrics_tiles = page.find(metrics_selector)
@@ -121,23 +159,6 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
expect_merge_request_to_be_present
end
- it 'can filter the issues by date' do
- expect(page).to have_selector(stage_table_event_selector)
-
- set_daterange(from, to)
-
- expect(page).not_to have_selector(stage_table_event_selector)
- expect(page).not_to have_selector(stage_table_pagination_selector)
- end
-
- it 'can filter the metrics by date' do
- expect(metrics_values).to match_array(%w[21 2 1])
-
- set_daterange(from, to)
-
- expect(metrics_values).to eq(['-'] * 3)
- end
-
it 'can sort records', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338332' do
# NOTE: checking that the string changes should suffice
# depending on the order the tests are run we might run into problems with hard coded strings
@@ -163,16 +184,43 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
expect(page).not_to have_text(original_first_title, exact: true)
end
- it 'can navigate directly to a value stream stream stage with filters applied' do
- visit project_cycle_analytics_path(project, created_before: '2019-12-31', created_after: '2019-11-01', stage_id: 'code', milestone_title: milestone.title)
- wait_for_requests
+ context 'when the `vsa_predefined_date_ranges` feature flag is enabled' do
+ before do
+ visit project_cycle_analytics_path(project)
+
+ wait_for_requests
+ end
+
+ it 'shows predefined date ranges dropdown with `Custom` option selected' do
+ page.within(predefined_date_ranges_dropdown_selector) do
+ expect(page).to have_button('Custom')
+ end
+ end
+
+ it_behaves_like 'filters the issues by date'
+
+ it_behaves_like 'filters the metrics by date'
+
+ it_behaves_like 'navigates directly to a value stream stream stage with filters applied'
+ end
+
+ context 'when the `vsa_predefined_date_ranges` feature flag is disabled' do
+ before do
+ stub_feature_flags(vsa_predefined_date_ranges: false)
+ visit project_cycle_analytics_path(project)
+
+ wait_for_requests
+ end
+
+ it 'does not show predefined date ranges dropdown' do
+ expect(page).not_to have_css(predefined_date_ranges_dropdown_selector)
+ end
+
+ it_behaves_like 'filters the issues by date'
- expect(page).to have_selector('.gl-path-active-item-indigo', text: 'Code')
- expect(page.find(".js-daterange-picker-from input").value).to eq("2019-11-01")
- expect(page.find(".js-daterange-picker-to input").value).to eq("2019-12-31")
+ it_behaves_like 'filters the metrics by date'
- filter_bar = page.find(stage_filter_bar)
- expect(filter_bar.find(".gl-filtered-search-token-data-content").text).to eq("%#{milestone.title}")
+ it_behaves_like 'navigates directly to a value stream stream stage with filters applied'
end
def stage_time_column
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index e5ad9808f83..90ad6fcea25 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -153,7 +153,7 @@ RSpec.describe 'Dashboard Projects', feature_category: :groups_and_projects do
page.within('[data-testid="project_controls"]') do
expect(page).to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
- expect(page).to have_css('.ci-status-link')
+ expect(page).to have_css("[data-testid='ci-status-badge']")
expect(page).to have_css('.ci-status-icon-success')
expect(page).to have_link('Pipeline: passed')
end
@@ -165,7 +165,7 @@ RSpec.describe 'Dashboard Projects', feature_category: :groups_and_projects do
page.within('[data-testid="project_controls"]') do
expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
- expect(page).not_to have_css('.ci-status-link')
+ expect(page).not_to have_css("[data-testid='ci-status-badge']")
expect(page).not_to have_css('.ci-status-icon-success')
expect(page).not_to have_link('Pipeline: passed')
end
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index 990b2f18120..c38b3ab3e80 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -188,4 +188,47 @@ RSpec.describe 'Dashboard > User filters todos', :js, feature_category: :team_pl
end
end
end
+
+ describe 'todos tab count' do
+ context 'when filtering by open todos' do
+ it 'includes all open todos' do
+ expect(find('.js-todos-pending .gl-badge')).to have_content('3')
+ end
+
+ it 'only counts open todos that match when filtered by project' do
+ click_button 'Project'
+
+ within '.dropdown-menu-project' do
+ fill_in 'Search projects', with: project_1.full_name
+ click_link project_1.full_name
+ end
+
+ expect(find('.js-todos-pending .gl-badge')).to have_content('1')
+ end
+ end
+
+ context 'when filtering by done todos' do
+ before do
+ create(:todo, user: user_1, author: user_2, project: project_1, target: issue1, action: 1, state: :done)
+ create(:todo, user: user_1, author: user_1, project: project_2, target: merge_request, action: 2, state: :done)
+
+ visit dashboard_todos_path(state: 'done')
+ end
+
+ it 'includes all done todos' do
+ expect(find('.js-todos-done .gl-badge')).to have_content('2')
+ end
+
+ it 'only counts done todos that match when filtered by project' do
+ click_button 'Project'
+
+ within '.dropdown-menu-project' do
+ fill_in 'Search projects', with: project_1.full_name
+ click_link project_1.full_name
+ end
+
+ expect(find('.js-todos-done .gl-badge')).to have_content('1')
+ end
+ end
+ end
end
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 5642d083673..ade7da0cb49 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
let_it_be(:user) { create(:user, :no_super_sidebar, username: 'john') }
let_it_be(:user2) { create(:user, :no_super_sidebar, username: 'diane') }
+ let_it_be(:user3) { create(:user) }
let_it_be(:author) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project, due_date: Date.today, title: "Fix bug") }
@@ -424,6 +425,25 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
wait_for_requests
end
end
+
+ describe 'shows a count of todos' do
+ before do
+ allow(Todo).to receive(:default_per_page).and_return(1)
+ create_list(:todo, 2, :mentioned, user: user3, project: project, target: issue, author: author, state: :pending)
+ create_list(:todo, 2, :mentioned, user: user3, project: project, target: issue, author: author, state: :done)
+ sign_in(user3)
+ end
+
+ it 'displays a count of all pending todos' do
+ visit dashboard_todos_path
+ expect(find('.js-todos-pending')).to have_content('2')
+ end
+
+ it 'displays a count of all done todos' do
+ visit dashboard_todos_path(state: 'done')
+ expect(find('.js-todos-done')).to have_content('2')
+ end
+ end
end
context 'User has a Build Failed todo' do
diff --git a/spec/features/discussion_comments/issue_spec.rb b/spec/features/discussion_comments/issue_spec.rb
index b270a4c7600..90be3f0760d 100644
--- a/spec/features/discussion_comments/issue_spec.rb
+++ b/spec/features/discussion_comments/issue_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Thread Comments Issue', :js, feature_category: :source_code_management do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
@@ -14,7 +12,6 @@ RSpec.describe 'Thread Comments Issue', :js, feature_category: :source_code_mana
sign_in(user)
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
end
it_behaves_like 'thread comments for issue, epic and merge request', 'issue'
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index 43dd80187ce..7fbd6c4e235 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -236,7 +236,7 @@ RSpec.describe 'Expand and collapse diffs', :js, feature_category: :source_code_
expect(page).to have_selector('.diff-content', count: 5)
expect(page).to have_selector('.diff-collapsed', count: 5)
- %w(file-95.txt file-96.txt file-97.txt file-98.txt file-99.txt).each do |filename|
+ %w[file-95.txt file-96.txt file-97.txt file-98.txt file-99.txt].each do |filename|
expect(find("[data-blob-diff-path*='#{filename}']")).to have_selector('.diff-collapsed')
end
end
@@ -252,7 +252,7 @@ RSpec.describe 'Expand and collapse diffs', :js, feature_category: :source_code_
expect(page).to have_selector('.diff-content', count: 6)
expect(page).to have_selector('.diff-collapsed', count: 2)
- %w(file-4.txt file-5.txt).each do |filename|
+ %w[file-4.txt file-5.txt].each do |filename|
expect(find("[data-blob-diff-path*='#{filename}']")).to have_selector('.diff-collapsed')
end
end
diff --git a/spec/features/gitlab_experiments_spec.rb b/spec/features/gitlab_experiments_spec.rb
index c1417f6f7c5..facf4994c44 100644
--- a/spec/features/gitlab_experiments_spec.rb
+++ b/spec/features/gitlab_experiments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Gitlab::Experiment", :js, feature_category: :experimentation_activation do
+RSpec.describe "Gitlab::Experiment", :js, feature_category: :activation do
# This is part of a set of tests that ensure that tracking remains
# consistent at the front end layer. Since we don't want to actually
# introduce an experiment in real code, we're going to simulate it
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
index 30074f421e5..97d53db3ef1 100644
--- a/spec/features/groups/empty_states_spec.rb
+++ b/spec/features/groups/empty_states_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
create(issuable, project_relation => project)
visit path
- expect(page).not_to have_selector('.empty-state')
+ expect(page).not_to have_selector('[data-testid="issuable-empty-state"]')
end
it "displays link to create new #{issuable} when no open #{issuable} is found", :js do
@@ -40,7 +40,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
wait_for_all_requests
- page.within(find('.empty-state')) do
+ within_testid('issuable-empty-state') do
expect(page).to have_content(/There are no open #{issuable.to_s.humanize.downcase}/)
new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
@@ -59,7 +59,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
wait_for_all_requests
- page.within(find('.empty-state')) do
+ within_testid('issuable-empty-state') do
expect(page).to have_content(/Sorry, your filter produced no results/)
new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
@@ -78,7 +78,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
wait_for_all_requests
- page.within(find('.empty-state')) do
+ within_testid('issuable-empty-state') do
expect(page).to have_content(/There are no closed #{issuable.to_s.humanize.downcase}/)
end
end
@@ -90,7 +90,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
end
it 'displays an empty state' do
- expect(page).to have_selector('.empty-state')
+ expect(page).to have_selector('[data-testid="issuable-empty-state"]')
end
it "shows a new #{issuable_name} button" do
@@ -107,11 +107,11 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
shared_examples "no projects" do
it 'displays an empty state', :js do
- expect(page).to have_selector('.empty-state')
+ expect(page).to have_selector('[data-testid="issuable-empty-state"]')
end
it "does not show a new #{issuable_name} button", :js do
- within '.empty-state' do
+ within_testid('issuable-empty-state') do
expect(page).not_to have_link("create #{issuable_name}")
end
end
@@ -130,7 +130,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
end
it 'does not display an empty state' do
- expect(page).not_to have_selector('.empty-state')
+ expect(page).not_to have_selector('[data-testid="issuable-empty-state"]')
end
end
@@ -140,7 +140,7 @@ RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
end
it 'displays an empty state', :js do
- expect(page).to have_selector('.empty-state')
+ expect(page).to have_selector('[data-testid="issuable-empty-state"]')
end
end
end
diff --git a/spec/features/groups/labels/sort_labels_spec.rb b/spec/features/groups/labels/sort_labels_spec.rb
index e177461701e..a8cb49ff2a0 100644
--- a/spec/features/groups/labels/sort_labels_spec.rb
+++ b/spec/features/groups/labels/sort_labels_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
# assert default sorting
within '.other-labels' do
- expect(page.all('.label-list-item').first.text).to include('Bar')
- expect(page.all('.label-list-item').last.text).to include('Foo')
+ expect(page.all('.js-label-list-item').first.text).to include('Bar')
+ expect(page.all('.js-label-list-item').last.text).to include('Foo')
end
end
@@ -43,8 +43,8 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
# assert default sorting
within '.other-labels' do
- expect(page.all('.label-list-item').first.text).to include('Foo')
- expect(page.all('.label-list-item').last.text).to include('Bar')
+ expect(page.all('.js-label-list-item').first.text).to include('Foo')
+ expect(page.all('.js-label-list-item').last.text).to include('Bar')
end
end
end
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index d870471d646..bb7cc3db452 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
- include ContentEditorHelpers
-
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, group: group) }
let_it_be(:user) { create(:group_member, :maintainer, user: create(:user), group: group).user }
@@ -20,7 +18,6 @@ RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
context 'create a milestone', :js do
before do
visit new_group_milestone_path(group)
- close_rich_text_promo_popover_if_present
end
it 'renders description preview' do
@@ -69,7 +66,6 @@ RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
context 'when no milestones' do
it 'renders no milestones text' do
visit group_milestones_path(group)
- close_rich_text_promo_popover_if_present
expect(page).to have_content('Use milestones to track issues and merge requests')
end
end
@@ -99,7 +95,6 @@ RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
before do
visit group_milestones_path(group)
- close_rich_text_promo_popover_if_present
end
it 'counts milestones correctly' do
@@ -175,7 +170,6 @@ RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
before do
visit group_milestone_path(group, milestone)
- close_rich_text_promo_popover_if_present
end
it 'renders the issues tab' do
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 6a38f0c59a8..76e4e32d138 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
stub_config(dependency_proxy: { enabled: false })
stub_config(registry: { enabled: false })
- stub_feature_flags(observability_group_tab: false)
stub_group_wikis(false)
group.add_maintainer(user)
sign_in(user)
@@ -93,16 +92,4 @@ RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
it_behaves_like 'verified navigation bar'
end
-
- context 'when observability tab is enabled' do
- before do
- stub_feature_flags(observability_group_tab: true)
-
- insert_observability_nav
-
- visit group_path(group)
- end
-
- it_behaves_like 'verified navigation bar'
- end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 7af58bf460c..bcbfdf487ac 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -544,7 +544,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
describe 'group README', :js do
context 'with gitlab-profile project and README.md' do
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :readme, namespace: group) }
+ let_it_be(:project) { create(:project, :public, :readme, namespace: group) }
it 'renders README block on group page' do
visit group_path(group)
diff --git a/spec/features/ide/user_opens_merge_request_spec.rb b/spec/features/ide/user_opens_merge_request_spec.rb
index 2aa89cadb7d..1d3cada57db 100644
--- a/spec/features/ide/user_opens_merge_request_spec.rb
+++ b/spec/features/ide/user_opens_merge_request_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'IDE merge request', :js, feature_category: :web_ide do
- include CookieHelper
-
let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
let_it_be(:merge_request) { create(:merge_request, :simple, source_project: project) }
@@ -14,8 +12,6 @@ RSpec.describe 'IDE merge request', :js, feature_category: :web_ide do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
-
visit(merge_request_path(merge_request))
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index 7e447ae32c0..693f2599de5 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -101,7 +101,6 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
end
it 'routes the user to the incident details page when the `issue_type` is set to incident' do
- set_cookie('new-actions-popover-viewed', 'true')
visit project_issue_path(project, issue)
wait_for_requests
@@ -124,7 +123,6 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
end
it 'routes the user to the issue details page when the `issue_type` is set to issue' do
- set_cookie('new-actions-popover-viewed', 'true')
visit incident_project_issues_path(project, incident)
wait_for_requests
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index a56df7bdecc..c86d4c260ee 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_category: :experimentation_expansion do
+RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_category: :acquisition do
let_it_be(:owner) { create(:user, name: 'John Doe') }
# private will ensure we really have access to the group when we land on the activity page
let_it_be(:group) { create(:group, :private, name: 'Owned') }
@@ -113,7 +113,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
it 'declines application and redirects to dashboard' do
- expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
+ expect(page).to have_current_path(dashboard_projects_path)
expect(page).to have_content('You have declined the invitation to join group Owned.')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
@@ -178,7 +178,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
context 'when the user signs up for an account with the invitation email address' do
it 'redirects to the most recent membership activity page with all invitations automatically accepted' do
fill_in_sign_up_form(new_user)
- fill_in_welcome_form
expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
expect(page).to have_content('You have been granted Owner access to group Owned.')
@@ -190,7 +189,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
it 'signs up and redirects to the projects dashboard' do
fill_in_sign_up_form(new_user)
- fill_in_welcome_form
expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
@@ -226,7 +224,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
context 'when the user signs up for an account with the invitation email address' do
it 'redirects to the most recent membership activity page with all invitations automatically accepted' do
fill_in_sign_up_form(new_user)
- fill_in_welcome_form
expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
@@ -245,7 +242,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
fill_in_sign_up_form(new_user)
confirm_email(new_user)
gitlab_sign_in(new_user, remember: true, visit: false)
- fill_in_welcome_form
expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
@@ -259,7 +255,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
it 'signs up and redirects to the projects dashboard' do
fill_in_sign_up_form(new_user)
- fill_in_welcome_form
expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
@@ -283,7 +278,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
fill_in_sign_up_form(new_user, 'Register')
- expect(page).to have_current_path(users_sign_up_welcome_path, ignore_query: true)
+ expect(page).to have_current_path(activity_group_path(group))
+ expect(page).to have_content('You have been granted Owner access to group Owned.')
end
end
@@ -296,5 +292,29 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
+
+ context 'when inviting a registered user by a secondary email address' do
+ let(:user) { create(:user) }
+ let(:secondary_email) { create(:email, user: user) }
+
+ before do
+ create(:group_member, :invited, group: group, invite_email: secondary_email.email, created_by: owner)
+ gitlab_sign_in(user)
+ end
+
+ it 'does not accept the pending invitation and does not redirect to the groups activity path' do
+ expect(page).not_to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(group.reload.users).not_to include(user)
+ end
+
+ context 'when the secondary email address is confirmed' do
+ let(:secondary_email) { create(:email, :confirmed, user: user) }
+
+ it 'accepts the pending invitation and redirects to the groups activity path' do
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(group.reload.users).to include(user)
+ end
+ end
+ end
end
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 7bf9620f282..1020ea341ce 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe 'issuable list', :js, feature_category: :team_planning do
else
create(:merge_request, source_project: project, source_branch: generate(:branch))
source_branch = FFaker::Lorem.characters(8)
- pipeline = create(:ci_empty_pipeline, project: project, ref: source_branch, status: %w(running failed success).sample, sha: 'any')
+ pipeline = create(:ci_empty_pipeline, project: project, ref: source_branch, status: %w[running failed success].sample, sha: 'any')
create(:merge_request, title: FFaker::Lorem.sentence, source_project: project, source_branch: source_branch, head_pipeline: pipeline)
end
diff --git a/spec/features/issuables/markdown_references/jira_spec.rb b/spec/features/issuables/markdown_references/jira_spec.rb
index e072231c6e9..887bc7d0c87 100644
--- a/spec/features/issuables/markdown_references/jira_spec.rb
+++ b/spec/features/issuables/markdown_references/jira_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe "Jira", :js, feature_category: :team_planning do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
let(:actual_project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, target_project: actual_project, source_project: actual_project) }
@@ -26,7 +24,6 @@ RSpec.describe "Jira", :js, feature_category: :team_planning do
sign_in(user)
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
build_note
end
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index d35f037247d..4c4a5624d00 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -12,9 +12,9 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
url = new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
if title.empty?
- %{a[href="#{url}"]}
+ %(a[href="#{url}"])
else
- %{a[title="#{title}"][href="#{url}"]}
+ %(a[title="#{title}"][href="#{url}"])
end
end
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index ed2c712feb1..73c53e855b2 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
include ActionView::Helpers::JavaScriptHelper
include ListboxHelpers
- include ContentEditorHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, :no_super_sidebar) }
@@ -18,7 +17,6 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
let_it_be(:confidential_issue) { create(:issue, project: project, assignees: [user], milestone: milestone, confidential: true) }
let(:current_user) { user }
- let(:visible_label_selection_on_metadata) { false }
before_all do
project.add_maintainer(user)
@@ -28,7 +26,6 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
before do
stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)
- stub_feature_flags(visible_label_selection_on_metadata: visible_label_selection_on_metadata)
sign_in(current_user)
end
@@ -36,7 +33,6 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
describe 'new issue' do
before do
visit new_project_issue_path(project)
- close_rich_text_promo_popover_if_present
end
describe 'shorten users API pagination limit' do
@@ -117,232 +113,125 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
end
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- let(:visible_label_selection_on_metadata) { true }
-
- it 'allows user to create new issue' do
- fill_in 'issue_title', with: 'title'
- fill_in 'issue_description', with: 'title'
-
- expect(find('a', text: 'Assign to me')).to be_visible
- click_button 'Unassigned'
-
- wait_for_requests
-
- page.within '.dropdown-menu-user' do
- click_link user2.name
- end
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user2.id.to_s)
- page.within '.js-assignee-search' do
- expect(page).to have_content user2.name
- end
- expect(find('a', text: 'Assign to me')).to be_visible
-
- click_link 'Assign to me'
- assignee_ids = page.all('input[name="issue[assignee_ids][]"]', visible: false)
-
- expect(assignee_ids[0].value).to match(user.id.to_s)
-
- page.within '.js-assignee-search' do
- expect(page).to have_content user.name
- end
- expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
+ it 'allows user to create new issue' do
+ fill_in 'issue_title', with: 'title'
+ fill_in 'issue_description', with: 'title'
- click_button 'Select milestone'
- click_button milestone.title
- expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
- expect(page).to have_button milestone.title
+ expect(find('a', text: 'Assign to me')).to be_visible
+ click_button 'Unassigned'
- click_button _('Select label')
- wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
- click_button label.title
- click_button label2.title
- click_button _('Close')
- wait_for_requests
- page.within('[data-testid="embedded-labels-list"]') do
- expect(page).to have_content(label.title)
- expect(page).to have_content(label2.title)
- end
- end
+ wait_for_requests
- click_button 'Create issue'
+ page.within '.dropdown-menu-user' do
+ click_link user2.name
+ end
+ expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user2.id.to_s)
+ page.within '.js-assignee-search' do
+ expect(page).to have_content user2.name
+ end
+ expect(find('a', text: 'Assign to me')).to be_visible
- page.within '.issuable-sidebar' do
- page.within '.assignee' do
- expect(page).to have_content "Assignee"
- end
+ click_link 'Assign to me'
+ assignee_ids = page.all('input[name="issue[assignee_ids][]"]', visible: false)
- page.within '.milestone' do
- expect(page).to have_content milestone.title
- end
+ expect(assignee_ids[0].value).to match(user.id.to_s)
- page.within '.labels' do
- expect(page).to have_content label.title
- expect(page).to have_content label2.title
- end
- end
+ page.within '.js-assignee-search' do
+ expect(page).to have_content user.name
+ end
+ expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
- page.within '.breadcrumbs' do
- issue = Issue.find_by(title: 'title')
+ click_button 'Select milestone'
+ click_button milestone.title
+ expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
+ expect(page).to have_button milestone.title
- expect(page).to have_text("Issues #{issue.to_reference}")
+ click_button _('Select label')
+ wait_for_all_requests
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button label.title
+ click_button label2.title
+ click_button _('Close')
+ wait_for_requests
+ page.within('[data-testid="embedded-labels-list"]') do
+ expect(page).to have_content(label.title)
+ expect(page).to have_content(label2.title)
end
end
- it 'correctly updates the dropdown toggle when removing a label' do
- click_button _('Select label')
-
- wait_for_all_requests
-
- page.within '[data-testid="sidebar-labels"]' do
- click_button label.title
- click_button _('Close')
-
- wait_for_requests
-
- page.within('[data-testid="embedded-labels-list"]') do
- expect(page).to have_content(label.title)
- end
+ click_button 'Create issue'
- expect(page.find('.gl-dropdown-button-text')).to have_content(label.title)
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
+ expect(page).to have_content "Assignee"
end
- click_button label.title, class: 'gl-dropdown-toggle'
-
- wait_for_all_requests
-
- page.within '[data-testid="sidebar-labels"]' do
- click_button label.title, class: 'dropdown-item'
- click_button _('Close')
-
- wait_for_requests
+ page.within '.milestone' do
+ expect(page).to have_content milestone.title
+ end
- expect(page).not_to have_selector('[data-testid="embedded-labels-list"]')
- expect(page.find('.gl-dropdown-button-text')).to have_content(_('Select label'))
+ page.within '.labels' do
+ expect(page).to have_content label.title
+ expect(page).to have_content label2.title
end
end
- it 'clears label search input field when a label is selected', :js do
- click_button _('Select label')
-
- wait_for_all_requests
-
- page.within '[data-testid="sidebar-labels"]' do
- search_field = find('input[type="search"]')
-
- search_field.native.send_keys(label.title)
-
- expect(page).to have_css('.gl-search-box-by-type-clear')
-
- click_button label.title, class: 'dropdown-item'
+ page.within '.breadcrumbs' do
+ issue = Issue.find_by(title: 'title')
- expect(page).not_to have_css('.gl-search-box-by-type-clear')
- expect(search_field.value).to eq ''
- end
+ expect(page).to have_text("Issues #{issue.to_reference}")
end
end
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- let(:visible_label_selection_on_metadata) { false }
+ it 'correctly updates the dropdown toggle when removing a label' do
+ click_button _('Select label')
- it 'allows user to create new issue' do
- fill_in 'issue_title', with: 'title'
- fill_in 'issue_description', with: 'title'
+ wait_for_all_requests
- expect(find('a', text: 'Assign to me')).to be_visible
- click_button 'Unassigned'
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button label.title
+ click_button _('Close')
wait_for_requests
- page.within '.dropdown-menu-user' do
- click_link user2.name
- end
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user2.id.to_s)
- page.within '.js-assignee-search' do
- expect(page).to have_content user2.name
- end
- expect(find('a', text: 'Assign to me')).to be_visible
-
- click_link 'Assign to me'
- assignee_ids = page.all('input[name="issue[assignee_ids][]"]', visible: false)
-
- expect(assignee_ids[0].value).to match(user.id.to_s)
-
- page.within '.js-assignee-search' do
- expect(page).to have_content user.name
- end
- expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
-
- click_button 'Select milestone'
- click_button milestone.title
- expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
- expect(page).to have_button milestone.title
-
- click_button 'Labels'
- page.within '.dropdown-menu-labels' do
- click_link label.title
- click_link label2.title
- end
-
- find('.js-issuable-form-dropdown.js-label-select').click
-
- page.within '.js-label-select' do
- expect(page).to have_content label.title
+ page.within('[data-testid="embedded-labels-list"]') do
+ expect(page).to have_content(label.title)
end
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[1].value).to match(label.id.to_s)
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[2].value).to match(label2.id.to_s)
- click_button 'Create issue'
+ expect(page.find('.gl-dropdown-button-text')).to have_content(label.title)
+ end
- page.within '.issuable-sidebar' do
- page.within '.assignee' do
- expect(page).to have_content "Assignee"
- end
+ click_button label.title, class: 'gl-dropdown-toggle'
- page.within '.milestone' do
- expect(page).to have_content milestone.title
- end
+ wait_for_all_requests
- page.within '.labels' do
- expect(page).to have_content label.title
- expect(page).to have_content label2.title
- end
- end
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button label.title, class: 'dropdown-item'
+ click_button _('Close')
- page.within '.breadcrumbs' do
- issue = Issue.find_by(title: 'title')
+ wait_for_requests
- expect(page).to have_text("Issues #{issue.to_reference}")
- end
+ expect(page).not_to have_selector('[data-testid="embedded-labels-list"]')
+ expect(page.find('.gl-dropdown-button-text')).to have_content(_('Select label'))
end
+ end
- it 'correctly updates the dropdown toggle when removing a label' do
- click_button 'Labels'
-
- page.within '.dropdown-menu-labels' do
- click_link label.title
- end
+ it 'clears label search input field when a label is selected', :js do
+ click_button _('Select label')
- expect(find('.js-label-select')).to have_content(label.title)
+ wait_for_all_requests
- page.within '.dropdown-menu-labels' do
- click_link label.title
- end
+ page.within '[data-testid="sidebar-labels"]' do
+ search_field = find('input[type="search"]')
- expect(find('.js-label-select')).to have_content('Labels')
- end
+ search_field.native.send_keys(label.title)
- it 'clears label search input field when a label is selected' do
- click_button 'Labels'
+ expect(page).to have_css('.gl-search-box-by-type-clear')
- page.within '.dropdown-menu-labels' do
- search_field = find('input[type="search"]')
+ click_button label.title, class: 'dropdown-item'
- search_field.set(label2.title)
- click_link label2.title
- expect(search_field.value).to eq ''
- end
+ expect(page).not_to have_css('.gl-search-box-by-type-clear')
+ expect(search_field.value).to eq ''
end
end
@@ -559,100 +448,52 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
visit edit_project_issue_path(project, issue)
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- let(:visible_label_selection_on_metadata) { true }
-
- it 'allows user to update issue' do
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s)
- expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
- expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
-
- page.within '.js-user-search' do
- expect(page).to have_content user.name
- end
-
- expect(page).to have_button milestone.title
-
- click_button _('Select label')
-
- wait_for_all_requests
-
- page.within '[data-testid="sidebar-labels"]' do
- click_button label.title
- click_button label2.title
- click_button _('Close')
+ it 'allows user to update issue' do
+ expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s)
+ expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
+ expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
- wait_for_requests
+ page.within '.js-user-search' do
+ expect(page).to have_content user.name
+ end
- page.within('[data-testid="embedded-labels-list"]') do
- expect(page).to have_content(label.title)
- expect(page).to have_content(label2.title)
- end
- end
+ expect(page).to have_button milestone.title
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)
- .map(&:value))
- .to contain_exactly(label.id.to_s, label2.id.to_s)
+ click_button _('Select label')
- click_button 'Save changes'
+ wait_for_all_requests
- page.within '.issuable-sidebar' do
- page.within '.assignee' do
- expect(page).to have_content user.name
- end
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button label.title
+ click_button label2.title
+ click_button _('Close')
- page.within '.milestone' do
- expect(page).to have_content milestone.title
- end
+ wait_for_requests
- page.within '.labels' do
- expect(page).to have_content label.title
- expect(page).to have_content label2.title
- end
+ page.within('[data-testid="embedded-labels-list"]') do
+ expect(page).to have_content(label.title)
+ expect(page).to have_content(label2.title)
end
end
- end
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- let(:visible_label_selection_on_metadata) { false }
+ expect(page.all('input[name="issue[label_ids][]"]', visible: false)
+ .map(&:value))
+ .to contain_exactly(label.id.to_s, label2.id.to_s)
- it 'allows user to update issue' do
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s)
- expect(find('input[name="issue[milestone_id]"]', visible: false).value).to match(milestone.id.to_s)
- expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
+ click_button 'Save changes'
- page.within '.js-user-search' do
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
expect(page).to have_content user.name
end
- expect(page).to have_button milestone.title
-
- click_button 'Labels'
- page.within '.dropdown-menu-labels' do
- click_link label.title
- click_link label2.title
- end
- page.within '.js-label-select' do
- expect(page).to have_content label.title
+ page.within '.milestone' do
+ expect(page).to have_content milestone.title
end
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[1].value).to match(label.id.to_s)
- expect(page.all('input[name="issue[label_ids][]"]', visible: false)[2].value).to match(label2.id.to_s)
-
- click_button 'Save changes'
-
- page.within '.issuable-sidebar' do
- page.within '.assignee' do
- expect(page).to have_content user.name
- end
-
- page.within '.milestone' do
- expect(page).to have_content milestone.title
- end
- page.within '.labels' do
- expect(page).to have_content label.title
- expect(page).to have_content label2.title
- end
+ page.within '.labels' do
+ expect(page).to have_content label.title
+ expect(page).to have_content label2.title
end
end
end
@@ -733,9 +574,7 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
visit new_project_issue_path(sub_group_project)
end
- context 'with the visible_label_selection_on_metadata feature flag enabled', :js do
- let(:visible_label_selection_on_metadata) { true }
-
+ context 'labels', :js do
it 'creates project label from dropdown', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/416585' do
find('[data-testid="labels-select-dropdown-contents"] button').click
@@ -761,29 +600,6 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
end
end
end
-
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- let(:visible_label_selection_on_metadata) { false }
-
- it 'creates project label from dropdown' do
- click_button 'Labels'
-
- click_link 'Create project label'
-
- page.within '.dropdown-new-label' do
- fill_in 'new_label_name', with: 'test label'
- first('.suggest-colors-dropdown a').click
-
- click_button 'Create'
-
- wait_for_requests
- end
-
- page.within '.dropdown-menu-labels' do
- expect(page).to have_link 'test label'
- end
- end
- end
end
def before_for_selector(selector)
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 47e9575da54..728ba07e5c4 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -3,9 +3,7 @@
require 'spec_helper'
RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
- include CookieHelper
include Features::AutocompleteHelpers
- include ContentEditorHelpers
let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
let_it_be(:user2) { create(:user, name: 'Marge Simpson', username: 'msimpson') }
@@ -33,7 +31,6 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
before do
sign_in(user)
visit new_project_issue_path(project)
- close_rich_text_promo_popover_if_present
wait_for_requests
end
@@ -50,9 +47,7 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
visit project_issue_path(project, issue_to_edit)
- close_rich_text_promo_popover_if_present
wait_for_requests
end
@@ -88,7 +83,6 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
before do
sign_in(user)
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
wait_for_requests
end
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index 29a61d584ee..c8802a9cc71 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -130,7 +130,6 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
describe 'when an issue `issue_type` is edited' do
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
visit project_issue_path(project, issue)
wait_for_requests
end
@@ -164,7 +163,6 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
describe 'when an incident `issue_type` is edited' do
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
visit project_issue_path(project, incident)
wait_for_requests
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 4cf558b04cc..a015a83c793 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
include MobileHelpers
include Features::InviteMembersModalHelpers
- include CookieHelper
let_it_be(:group) { create(:group, :nested) }
let_it_be(:project) { create(:project, :public, namespace: group) }
@@ -21,7 +20,6 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
context 'when signed in' do
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
end
context 'when concerning the assignee', :js do
diff --git a/spec/features/issues/issue_state_spec.rb b/spec/features/issues/issue_state_spec.rb
index 2a8b33183bb..3fe49ff7080 100644
--- a/spec/features/issues/issue_state_spec.rb
+++ b/spec/features/issues/issue_state_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'issue state', :js, feature_category: :team_planning do
- include CookieHelper
-
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
@@ -12,7 +10,6 @@ RSpec.describe 'issue state', :js, feature_category: :team_planning do
before do
project.add_developer(user)
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
end
shared_examples 'issue closed' do |selector|
@@ -47,27 +44,14 @@ RSpec.describe 'issue state', :js, feature_category: :team_planning do
describe 'when open' do
context 'when clicking the top `Close issue` button', :aggregate_failures do
- context 'when move_close_into_dropdown FF is disabled' do
- let(:open_issue) { create(:issue, project: project) }
-
- before do
- stub_feature_flags(move_close_into_dropdown: false)
- visit project_issue_path(project, open_issue)
- end
+ let(:open_issue) { create(:issue, project: project) }
- it_behaves_like 'issue closed', '.detail-page-header-actions'
+ before do
+ visit project_issue_path(project, open_issue)
+ find('#new-actions-header-dropdown > button').click
end
- context 'when move_close_into_dropdown FF is enabled' do
- let(:open_issue) { create(:issue, project: project) }
-
- before do
- visit project_issue_path(project, open_issue)
- find('#new-actions-header-dropdown > button').click
- end
-
- it_behaves_like 'issue closed', '.dropdown-menu-right'
- end
+ it_behaves_like 'issue closed', '.dropdown-menu-right'
end
context 'when clicking the bottom `Close issue` button', :aggregate_failures do
@@ -83,27 +67,14 @@ RSpec.describe 'issue state', :js, feature_category: :team_planning do
describe 'when closed' do
context 'when clicking the top `Reopen issue` button', :aggregate_failures do
- context 'when move_close_into_dropdown FF is disabled' do
- let(:closed_issue) { create(:issue, project: project, state: 'closed', author: user) }
-
- before do
- stub_feature_flags(move_close_into_dropdown: false)
- visit project_issue_path(project, closed_issue)
- end
+ let(:closed_issue) { create(:issue, project: project, state: 'closed', author: user) }
- it_behaves_like 'issue reopened', '.detail-page-header-actions'
+ before do
+ visit project_issue_path(project, closed_issue)
+ find('#new-actions-header-dropdown > button').click
end
- context 'when move_close_into_dropdown FF is enabled' do
- let(:closed_issue) { create(:issue, project: project, state: 'closed', author: user) }
-
- before do
- visit project_issue_path(project, closed_issue)
- find('#new-actions-header-dropdown > button').click
- end
-
- it_behaves_like 'issue reopened', '.dropdown-menu-right'
- end
+ it_behaves_like 'issue reopened', '.dropdown-menu-right'
end
context 'when clicking the bottom `Reopen issue` button', :aggregate_failures do
diff --git a/spec/features/issues/markdown_toolbar_spec.rb b/spec/features/issues/markdown_toolbar_spec.rb
index b7a0949edce..484d160057c 100644
--- a/spec/features/issues/markdown_toolbar_spec.rb
+++ b/spec/features/issues/markdown_toolbar_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Issue markdown toolbar', :js, feature_category: :team_planning do
- include ContentEditorHelpers
-
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
@@ -13,7 +11,6 @@ RSpec.describe 'Issue markdown toolbar', :js, feature_category: :team_planning d
sign_in(user)
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
end
it "doesn't include first new line when adding bold" do
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index a6ed0b52e7d..4a38373db71 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'issue move to another project', feature_category: :team_planning
expect(page).to have_content("Text with #{cross_reference}#{mr.to_reference}")
expect(page).to have_content("moved from #{cross_reference}#{issue.to_reference}")
expect(page).to have_content(issue.title)
- expect(page).to have_current_path(%r(#{project_path(new_project)}))
+ expect(page).to have_current_path(%r{#{project_path(new_project)}})
end
it 'searching project dropdown', :js do
diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb
index 293b6c53eb5..054635d61b1 100644
--- a/spec/features/issues/note_polling_spec.rb
+++ b/spec/features/issues/note_polling_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Issue notes polling', :js, feature_category: :team_planning do
include NoteInteractionHelpers
- include ContentEditorHelpers
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
@@ -12,7 +11,6 @@ RSpec.describe 'Issue notes polling', :js, feature_category: :team_planning do
describe 'creates' do
it 'displays the new comment' do
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
note = create(:note, noteable: issue, project: project, note: 'Looks good!')
wait_for_requests
@@ -31,7 +29,6 @@ RSpec.describe 'Issue notes polling', :js, feature_category: :team_planning do
before do
sign_in(user)
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
end
it 'displays the updated content' do
diff --git a/spec/features/issues/notes_on_issues_spec.rb b/spec/features/issues/notes_on_issues_spec.rb
index 62855c7467f..8d6262efa53 100644
--- a/spec/features/issues/notes_on_issues_spec.rb
+++ b/spec/features/issues/notes_on_issues_spec.rb
@@ -3,12 +3,9 @@
require 'spec_helper'
RSpec.describe 'Create notes on issues', :js, feature_category: :team_planning do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
def submit_comment(text)
- close_rich_text_promo_popover_if_present
fill_in 'note[note]', with: text
click_button 'Comment'
wait_for_requests
diff --git a/spec/features/issues/related_issues_spec.rb b/spec/features/issues/related_issues_spec.rb
index 5102eeb2511..f460b4b1c7f 100644
--- a/spec/features/issues/related_issues_spec.rb
+++ b/spec/features/issues/related_issues_spec.rb
@@ -22,10 +22,6 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
let_it_be(:private_issue) { create(:issue, project: private_project) }
let_it_be(:public_issue) { create(:issue, project: public_project) }
- before do
- stub_feature_flags(move_close_into_dropdown: false)
- end
-
context 'widget visibility' do
context 'when not logged in' do
it 'does not show widget when internal project' do
diff --git a/spec/features/issues/resource_label_events_spec.rb b/spec/features/issues/resource_label_events_spec.rb
index 531361b19af..f9c1cc0acd4 100644
--- a/spec/features/issues/resource_label_events_spec.rb
+++ b/spec/features/issues/resource_label_events_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'List issue resource label events', :js, feature_category: :team_
end
it 'shows add note for newly added labels' do
- toggle_labels(%w(foo bar))
+ toggle_labels(%w[foo bar])
visit project_issue_path(project, issue)
wait_for_requests
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 120b4ddb6e1..8e952a23f05 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
visit service_desk_project_issues_path(project)
aggregate_failures do
- expect(page).to have_css('.empty-state')
+ expect(page).to have_css('[data-testid="issues-service-desk-empty-state"]')
expect(page).to have_text('Use Service Desk to connect with your users')
expect(page).to have_link('Learn more about Service Desk', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
@@ -209,7 +209,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
it 'displays the large info box and the documentation link' do
aggregate_failures do
- expect(page).to have_css('.empty-state')
+ expect(page).to have_css('[data-testid="issues-service-desk-empty-state"]')
expect(page).to have_text('Use Service Desk to connect with your users')
expect(page).to have_link('Learn more about Service Desk', href: help_page_path('user/project/service_desk/index'))
expect(page).not_to have_link('Enable Service Desk')
diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb
index c503c18be8d..2095453ac29 100644
--- a/spec/features/issues/todo_spec.rb
+++ b/spec/features/issues/todo_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Manually create a todo item from issue', :js, feature_category:
let!(:user) { create(:user, :no_super_sidebar) }
before do
+ stub_feature_flags(notifications_todos_buttons: false)
project.add_maintainer(user)
sign_in(user)
visit project_issue_path(project, issue)
diff --git a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
index a01ae9ae0c2..f82b18f3dd7 100644
--- a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe 'Issues > Labels bulk assignment', feature_category: :team_planni
context 'to all issues' do
before do
check 'Select all'
- open_labels_dropdown %w(bug feature)
+ open_labels_dropdown %w[bug feature]
update_issues
end
@@ -120,7 +120,7 @@ RSpec.describe 'Issues > Labels bulk assignment', feature_category: :team_planni
context 'to a issue' do
before do
check issue1.title
- open_labels_dropdown %w(bug feature)
+ open_labels_dropdown %w[bug feature]
update_issues
end
@@ -162,7 +162,7 @@ RSpec.describe 'Issues > Labels bulk assignment', feature_category: :team_planni
enable_bulk_update
check 'Select all'
- unmark_labels_in_dropdown %w(bug feature)
+ unmark_labels_in_dropdown %w[bug feature]
update_issues
end
diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb
index f18992325d8..a81a99771cc 100644
--- a/spec/features/issues/user_comments_on_issue_spec.rb
+++ b/spec/features/issues/user_comments_on_issue_spec.rb
@@ -5,7 +5,6 @@ require "spec_helper"
RSpec.describe "User comments on issue", :js, feature_category: :team_planning do
include Features::AutocompleteHelpers
include Features::NotesHelpers
- include ContentEditorHelpers
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
@@ -16,7 +15,6 @@ RSpec.describe "User comments on issue", :js, feature_category: :team_planning d
sign_in(user)
visit(project_issue_path(project, issue))
- close_rich_text_promo_popover_if_present
end
context "when adding comments" do
@@ -54,6 +52,17 @@ RSpec.describe "User comments on issue", :js, feature_category: :team_planning d
expect(find_highlighted_autocomplete_item).to have_content('/label')
end
+
+ it "switches back to edit mode if a comment is submitted in preview mode" do
+ fill_in 'Comment', with: 'just a regular comment'
+ click_button 'Preview'
+
+ expect(page).to have_content('Continue editing')
+
+ click_button 'Comment'
+
+ expect(page).not_to have_content('Continue editing')
+ end
end
context "when editing comments" do
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 857cb1f39a2..29b44bf165d 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -4,13 +4,10 @@ require "spec_helper"
RSpec.describe "User creates issue", feature_category: :team_planning do
include DropzoneHelper
- include ContentEditorHelpers
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
- let(:visible_label_selection_on_metadata) { false }
-
context "when unauthenticated" do
before do
sign_out(:user)
@@ -37,12 +34,10 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
context "when signed in as guest", :js do
before do
- stub_feature_flags(visible_label_selection_on_metadata: visible_label_selection_on_metadata)
project.add_guest(user)
sign_in(user)
visit(new_project_issue_path(project))
- close_rich_text_promo_popover_if_present
end
context 'available metadata' do
@@ -89,7 +84,7 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
end
context "with labels" do
- let(:label_titles) { %w(bug feature enhancement) }
+ let(:label_titles) { %w[bug feature enhancement] }
before do
label_titles.each do |title|
@@ -97,50 +92,28 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
end
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- let(:visible_label_selection_on_metadata) { true }
-
- it "creates issue" do
- issue_title = "500 error on profile"
-
- fill_in("Title", with: issue_title)
-
- click_button _('Select label')
+ it "creates issue" do
+ issue_title = "500 error on profile"
- wait_for_all_requests
+ fill_in("Title", with: issue_title)
- page.within '[data-testid="sidebar-labels"]' do
- click_button label_titles.first
- click_button _('Close')
+ click_button _('Select label')
- wait_for_requests
- end
+ wait_for_all_requests
- click_button("Create issue")
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button label_titles.first
+ click_button _('Close')
- expect(page).to have_content(issue_title)
- .and have_content(user.name)
- .and have_content(project.name)
- .and have_content(label_titles.first)
+ wait_for_requests
end
- end
-
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- let(:visible_label_selection_on_metadata) { false }
- it "creates issue" do
- issue_title = "500 error on profile"
+ click_button("Create issue")
- fill_in("Title", with: issue_title)
- click_button("Label")
- click_link(label_titles.first)
- click_button("Create issue")
-
- expect(page).to have_content(issue_title)
- .and have_content(user.name)
- .and have_content(project.name)
- .and have_content(label_titles.first)
- end
+ expect(page).to have_content(issue_title)
+ .and have_content(user.name)
+ .and have_content(project.name)
+ .and have_content(label_titles.first)
end
end
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 45d95db8ff1..7919e8f7ed4 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -3,9 +3,6 @@
require "spec_helper"
RSpec.describe "Issues > User edits issue", :js, feature_category: :team_planning do
- include CookieHelper
- include ContentEditorHelpers
-
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:project_with_milestones) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
@@ -21,14 +18,12 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin
project.add_developer(user)
project_with_milestones.add_developer(user)
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
end
context "from edit page" do
before do
stub_licensed_features(multiple_issue_assignees: false)
visit edit_project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
end
it_behaves_like 'edits content using the content editor'
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index e1099ba242e..539e429534e 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'User interacts with awards', feature_category: :team_planning do
include MobileHelpers
- include ContentEditorHelpers
let(:user) { create(:user) }
@@ -17,7 +16,6 @@ RSpec.describe 'User interacts with awards', feature_category: :team_planning do
sign_in(user)
visit(project_issue_path(project, issue))
- close_rich_text_promo_popover_if_present
end
it 'toggles the thumbsup award emoji', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27959' do
diff --git a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
index ef448c06a3f..91b18454af5 100644
--- a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
+++ b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Issues > Real-time sidebar', :js, :with_license, feature_category: :team_planning do
- include ContentEditorHelpers
-
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
@@ -22,7 +20,6 @@ RSpec.describe 'Issues > Real-time sidebar', :js, :with_license, feature_categor
using_session :other_session do
visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
expect(page.find('.assignee')).to have_content 'None'
end
@@ -46,7 +43,6 @@ RSpec.describe 'Issues > Real-time sidebar', :js, :with_license, feature_categor
using_session :other_session do
visit project_issue_path(project, issue)
wait_for_requests
- close_rich_text_promo_popover_if_present
expect(labels_value).to have_content('None')
end
@@ -54,7 +50,6 @@ RSpec.describe 'Issues > Real-time sidebar', :js, :with_license, feature_categor
visit project_issue_path(project, issue)
wait_for_requests
- close_rich_text_promo_popover_if_present
expect(labels_value).to have_content('None')
page.within(labels_widget) do
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index af8a31afd5f..713a169f061 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
context 'user is not logged in' do
before do
stub_feature_flags(moved_mr_sidebar: false)
+ stub_feature_flags(notifications_todos_buttons: false)
visit(project_issue_path(project, issue))
end
@@ -22,6 +23,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
context 'user is logged in' do
before do
stub_feature_flags(moved_mr_sidebar: false)
+ stub_feature_flags(notifications_todos_buttons: false)
project.add_developer(user)
sign_in(user)
visit(project_issue_path(project, issue))
@@ -54,6 +56,7 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
context 'user is logged in without edit permission' do
before do
stub_feature_flags(moved_mr_sidebar: false)
+ stub_feature_flags(notifications_todos_buttons: false)
sign_in(user2)
visit(project_issue_path(project, issue))
@@ -73,4 +76,24 @@ RSpec.describe "User toggles subscription", :js, feature_category: :team_plannin
expect(subscription_button).to have_css("button.is-checked")
end
end
+
+ context 'with notifications_todos_buttons feature flag enabled' do
+ before do
+ stub_feature_flags(moved_mr_sidebar: true)
+ stub_feature_flags(notifications_todos_buttons: true)
+ sign_in(user2)
+
+ visit(project_issue_path(project, issue))
+ end
+
+ it 'toggles subscription' do
+ subscription_button = find('[data-testid="subscribe-button"]')
+
+ expect(page).to have_selector("button[title='Notifications off']")
+ subscription_button.click
+ wait_for_requests
+
+ expect(page).to have_selector("button[title='Notifications on']")
+ end
+ end
end
diff --git a/spec/features/issues/user_uses_quick_actions_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb
index c15716243ae..d3552b87fea 100644
--- a/spec/features/issues/user_uses_quick_actions_spec.rb
+++ b/spec/features/issues/user_uses_quick_actions_spec.rb
@@ -9,7 +9,6 @@ require 'spec_helper'
# for each existing quick action unless they test something not tested by existing tests.
RSpec.describe 'Issues > User uses quick actions', :js, feature_category: :team_planning do
include Features::NotesHelpers
- include ContentEditorHelpers
context "issuable common quick actions" do
let(:new_url_opts) { {} }
@@ -35,7 +34,6 @@ RSpec.describe 'Issues > User uses quick actions', :js, feature_category: :team_
sign_in(user)
visit project_issue_path(project, issue)
wait_for_all_requests
- close_rich_text_promo_popover_if_present
end
after do
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index 0cb712622f2..72f5b46c3ad 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Labels Hierarchy', :js, feature_category: :team_planning do
include FilteredSearchHelpers
- include ContentEditorHelpers
let!(:user) { create(:user, :no_super_sidebar) }
let!(:grandparent) { create(:group) }
@@ -158,73 +157,34 @@ RSpec.describe 'Labels Hierarchy', :js, feature_category: :team_planning do
end
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
+ context 'when creating new issuable' do
before do
- stub_feature_flags(visible_label_selection_on_metadata: true)
+ visit new_project_issue_path(project_1)
end
- context 'when creating new issuable' do
- before do
- visit new_project_issue_path(project_1)
- close_rich_text_promo_popover_if_present
- end
-
- it 'is able to assign ancestor group labels' do
- fill_in 'issue_title', with: 'new created issue'
- fill_in 'issue_description', with: 'new issue description'
+ it 'is able to assign ancestor group labels' do
+ fill_in 'issue_title', with: 'new created issue'
+ fill_in 'issue_description', with: 'new issue description'
- click_button _('Select label')
+ click_button _('Select label')
- wait_for_all_requests
+ wait_for_all_requests
- page.within '[data-testid="sidebar-labels"]' do
- click_button grandparent_group_label.title
- click_button parent_group_label.title
- click_button project_label_1.title
- click_button _('Close')
-
- wait_for_requests
- end
-
- find('.btn-confirm').click
-
- expect(page.find('.issue-details h1.title')).to have_content('new created issue')
- expect(page).to have_selector('span.gl-label-text', text: grandparent_group_label.title)
- expect(page).to have_selector('span.gl-label-text', text: parent_group_label.title)
- expect(page).to have_selector('span.gl-label-text', text: project_label_1.title)
- end
- end
- end
+ page.within '[data-testid="sidebar-labels"]' do
+ click_button grandparent_group_label.title
+ click_button parent_group_label.title
+ click_button project_label_1.title
+ click_button _('Close')
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: false)
- end
-
- context 'when creating new issuable' do
- before do
- visit new_project_issue_path(project_1)
- close_rich_text_promo_popover_if_present
- end
-
- it 'is able to assign ancestor group labels' do
- fill_in 'issue_title', with: 'new created issue'
- fill_in 'issue_description', with: 'new issue description'
-
- find(".js-label-select").click
wait_for_requests
+ end
- find('a.label-item', text: grandparent_group_label.title).click
- find('a.label-item', text: parent_group_label.title).click
- find('a.label-item', text: project_label_1.title).click
-
- find('.btn-confirm').click
+ find('.btn-confirm').click
- expect(page.find('.issue-details h1.title')).to have_content('new created issue')
- expect(page).to have_selector('span.gl-label-text', text: grandparent_group_label.title)
- expect(page).to have_selector('span.gl-label-text', text: parent_group_label.title)
- expect(page).to have_selector('span.gl-label-text', text: project_label_1.title)
- end
+ expect(page.find('.issue-details h1.title')).to have_content('new created issue')
+ expect(page).to have_selector('span.gl-label-text', text: grandparent_group_label.title)
+ expect(page).to have_selector('span.gl-label-text', text: parent_group_label.title)
+ expect(page).to have_selector('span.gl-label-text', text: project_label_1.title)
end
end
@@ -236,7 +196,6 @@ RSpec.describe 'Labels Hierarchy', :js, feature_category: :team_planning do
project_1.add_developer(user)
visit project_issue_path(project_1, issue)
- close_rich_text_promo_popover_if_present
end
it_behaves_like 'assigning labels from sidebar'
diff --git a/spec/features/markdown/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index eb86393d59e..34c9219101f 100644
--- a/spec/features/markdown/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
@@ -167,13 +167,13 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
it 'allows markup inside link elements' do
aggregate_failures do
expect(doc.at_css('a[href="#link-emphasis"]').to_html)
- .to eq_no_sourcepos %{<a href="#link-emphasis"><em>text</em></a>}
+ .to eq_no_sourcepos %(<a href="#link-emphasis"><em>text</em></a>)
expect(doc.at_css('a[href="#link-strong"]').to_html)
- .to eq_no_sourcepos %{<a href="#link-strong"><strong>text</strong></a>}
+ .to eq_no_sourcepos %(<a href="#link-strong"><strong>text</strong></a>)
expect(doc.at_css('a[href="#link-code"]').to_html)
- .to eq_no_sourcepos %{<a href="#link-code"><code>text</code></a>}
+ .to eq_no_sourcepos %(<a href="#link-code"><code>text</code></a>)
end
end
end
diff --git a/spec/features/markdown/math_spec.rb b/spec/features/markdown/math_spec.rb
index 0bc8f2146e9..0d12aade807 100644
--- a/spec/features/markdown/math_spec.rb
+++ b/spec/features/markdown/math_spec.rb
@@ -48,47 +48,92 @@ RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
end
end
- it 'renders lazy load button' do
- description = <<~MATH
- ```math
- \Huge \sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}
- ```
- MATH
-
- create_and_visit_issue_with_description(description)
+ describe 'applying limits on math rendering' do
+ let(:lazy_load_description) do
+ <<~MATH
+ ```math
+ \Huge \sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}
+ ```
+ MATH
+ end
- page.within '.description > .md' do
- expect(page).to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/)
+ let(:excessive_expansion_description) do
+ <<~MATH
+ ```math
+ #{'\\mod e ' * 150}
+ ```
+ MATH
end
- end
- it 'allows many expansions', :js do
- description = <<~MATH
- ```math
- #{'\\mod e ' * 100}
- ```
- MATH
+ context 'when limits should be applied (default)' do
+ it 'renders lazy load button' do
+ create_and_visit_issue_with_description(lazy_load_description)
- create_and_visit_issue_with_description(description)
+ page.within '.description > .md' do
+ expect(page).to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/)
+ end
+ end
- page.within '.description > .md' do
- expect(page).not_to have_selector('.katex-error')
+ it 'allows many expansions', :js do
+ description = <<~MATH
+ ```math
+ #{'\\mod e ' * 100}
+ ```
+ MATH
+
+ create_and_visit_issue_with_description(description)
+
+ page.within '.description > .md' do
+ expect(page).not_to have_selector('.katex-error')
+ end
+ end
+
+ it 'shows error message when too many expansions', :js do
+ create_and_visit_issue_with_description(excessive_expansion_description)
+
+ page.within '.description > .md' do
+ click_button 'Display anyway'
+
+ expect(page).to have_selector('.katex-error', text: /Too many expansions/)
+ end
+ end
+
+ it 'renders without any limits on wiki page', :js do
+ wiki_page = build(:wiki_page, { container: project, content: lazy_load_description })
+ wiki_page.create message: 'math test commit' # rubocop:disable Rails/SaveBang
+ wiki_page = project.wiki.find_page(wiki_page.slug)
+
+ visit project_wiki_path(project, wiki_page)
+
+ wait_for_requests
+
+ page.within '.js-wiki-page-content' do
+ expect(page).not_to have_selector('.js-lazy-render-math')
+ end
+ end
end
- end
- it 'shows error message when too many expansions', :js do
- description = <<~MATH
- ```math
- #{'\\mod e ' * 150}
- ```
- MATH
+ context 'when limits are disabled' do
+ before do
+ stub_application_setting(math_rendering_limits_enabled: false)
+ end
- create_and_visit_issue_with_description(description)
+ it 'does not render lazy load button' do
+ create_and_visit_issue_with_description(lazy_load_description)
- page.within '.description > .md' do
- click_button 'Display anyway'
+ page.within '.description > .md' do
+ expect(page)
+ .not_to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/)
+ end
+ end
- expect(page).to have_selector('.katex-error', text: /Too many expansions/)
+ it 'does not show error message when too many expansions', :js do
+ create_and_visit_issue_with_description(excessive_expansion_description)
+
+ page.within '.description > .md' do
+ expect(page).not_to have_selector('.katex-error', text: /Too many expansions/)
+ end
+ end
end
end
@@ -121,26 +166,6 @@ RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
end
end
- it 'renders without any limits on wiki page', :js do
- description = <<~MATH
- ```math
- \Huge \sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}
- ```
- MATH
-
- wiki_page = build(:wiki_page, { container: project, content: description })
- wiki_page.create message: 'math test commit' # rubocop:disable Rails/SaveBang
- wiki_page = project.wiki.find_page(wiki_page.slug)
-
- visit project_wiki_path(project, wiki_page)
-
- wait_for_requests
-
- page.within '.js-wiki-page-content' do
- expect(page).not_to have_selector('.js-lazy-render-math')
- end
- end
-
it 'uses math-content-display for display math', :js do
description = <<~MATH
```math
diff --git a/spec/features/markdown/observability_spec.rb b/spec/features/markdown/observability_spec.rb
deleted file mode 100644
index ec414d4396e..00000000000
--- a/spec/features/markdown/observability_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Observability rendering', :js, feature_category: :metrics do
- let_it_be(:group) { create(:group, :public) }
- let_it_be(:project) { create(:project, :repository, group: group) }
- let_it_be(:user) { create(:user) }
- let_it_be(:observable_url) { "https://www.gitlab.com/groups/#{group.path}/-/observability/explore?observability_path=/explore?foo=bar" }
- let_it_be(:expected_observable_url) { "https://observe.gitlab.com/-/#{group.id}/explore?foo=bar" }
-
- before do
- stub_config_setting(url: "https://www.gitlab.com")
- group.add_developer(user)
- sign_in(user)
- end
-
- context 'when user is a developer of the embedded group' do
- context 'when embedding in an issue' do
- let(:issue) do
- create(:issue, project: project, description: observable_url)
- end
-
- before do
- visit project_issue_path(project, issue)
- wait_for_requests
- end
-
- it_behaves_like 'embeds observability'
- end
-
- context 'when embedding in an MR' do
- let(:merge_request) do
- create(:merge_request, source_project: project, target_project: project, description: observable_url)
- end
-
- before do
- visit merge_request_path(merge_request)
- wait_for_requests
- end
-
- it_behaves_like 'embeds observability'
- end
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(observability_group_tab: false)
- end
-
- context 'when embedding in an issue' do
- let(:issue) do
- create(:issue, project: project, description: observable_url)
- end
-
- before do
- visit project_issue_path(project, issue)
- wait_for_requests
- end
-
- it_behaves_like 'does not embed observability'
- end
-
- context 'when embedding in an MR' do
- let(:merge_request) do
- create(:merge_request, source_project: project, target_project: project, description: observable_url)
- end
-
- before do
- visit merge_request_path(merge_request)
- wait_for_requests
- end
-
- it_behaves_like 'does not embed observability'
- end
- end
-end
diff --git a/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb b/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
index 0dbb42a633b..f399e6a24ca 100644
--- a/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
+++ b/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
@@ -11,17 +11,16 @@ RSpec.describe 'Admin views hidden merge request', feature_category: :insider_th
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
visit(project_merge_request_path(project, merge_request))
end
- it 'shows a hidden merge request icon' do
- page.within('.detail-page-header-body') do
- tooltip = format(_('This %{issuable} is hidden because its author has been banned'),
- issuable: _('merge request'))
- expect(page).to have_css("div[data-testid='hidden'][title='#{tooltip}']")
- expect(page).to have_css('svg[data-testid="spam-icon"]')
- end
+ it 'shows a hidden merge request icon', :enable_admin_mode do
+ expect(page).to have_css 'svg[data-testid="spam-icon"]'
+
+ find('svg[data-testid="spam-icon"]').hover
+
+ expect(page).to have_text format(_('This %{issuable} is hidden because its author has been banned.'),
+ issuable: _('merge request'))
end
end
end
diff --git a/spec/features/merge_request/hide_default_award_emojis_spec.rb b/spec/features/merge_request/hide_default_award_emojis_spec.rb
new file mode 100644
index 00000000000..e42db446406
--- /dev/null
+++ b/spec/features/merge_request/hide_default_award_emojis_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User does not see default award emoji', :js, feature_category: :code_review_workflow do
+ let_it_be(:project) { create(:project, :public, :repository, show_default_award_emojis: false) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, author: user) }
+
+ before_all do
+ project.add_owner(user)
+ end
+
+ before do
+ sign_in(user)
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it { expect(page).not_to have_selector('[data-testid="award-button"]') }
+end
diff --git a/spec/features/merge_request/user_accepts_merge_request_spec.rb b/spec/features/merge_request/user_accepts_merge_request_spec.rb
index 38291573256..e3989a8a192 100644
--- a/spec/features/merge_request/user_accepts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_accepts_merge_request_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inline, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
@@ -17,7 +15,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
context 'presents merged merge request content' do
it 'when merge method is set to merge commit' do
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
click_merge_button
@@ -33,7 +30,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
merge_request = create(:merge_request, :rebased, source_project: project)
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
click_merge_button
@@ -44,7 +40,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
merge_request = create(:merge_request, :rebased, source_project: project, squash: true)
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
click_merge_button
@@ -56,7 +51,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
context 'with removing the source branch' do
before do
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
end
it 'accepts a merge request' do
@@ -75,7 +69,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
context 'without removing the source branch' do
before do
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
end
it 'accepts a merge request' do
@@ -93,7 +86,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
context 'when a URL has an anchor' do
before do
visit(merge_request_path(merge_request, anchor: 'note_123'))
- close_rich_text_promo_popover_if_present
end
it 'accepts a merge request' do
@@ -114,7 +106,6 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli
merge_request.mark_as_mergeable
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
end
it 'accepts a merge request' do
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 215fe1f7521..9135f5c7b98 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
- include ContentEditorHelpers
let(:project) { create(:project, :repository) }
let(:merge_request) do
@@ -140,7 +139,6 @@ RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_wo
end
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
page.within('.notes .discussion') do
find('.js-vue-discussion-reply').click
diff --git a/spec/features/merge_request/user_creates_custom_emoji_spec.rb b/spec/features/merge_request/user_creates_custom_emoji_spec.rb
new file mode 100644
index 00000000000..35593836dab
--- /dev/null
+++ b/spec/features/merge_request/user_creates_custom_emoji_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User creates custom emoji', :js, feature_category: :code_review_workflow do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: group) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, author: user) }
+
+ context 'with user who has permissions' do
+ before_all do
+ group.add_owner(user)
+ end
+
+ before do
+ sign_in(user)
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'shows link to create custom emoji' do
+ first('.add-reaction-button').click
+
+ wait_for_requests
+
+ click_link 'Create new emoji'
+
+ wait_for_requests
+
+ find_by_testid("custom-emoji-name-input").set 'parrot'
+ find_by_testid("custom-emoji-url-input").set 'https://example.com'
+
+ click_button 'Save'
+
+ wait_for_requests
+
+ expect(page).to have_content(':parrot:')
+ end
+ end
+
+ context 'with user who does not have permissions' do
+ before do
+ sign_in(user)
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'shows link to create custom emoji' do
+ first('.add-reaction-button').click
+
+ wait_for_requests
+
+ expect(page).not_to have_link('Create new emoji')
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index 6d3268ffe3a..3ff6fa2040b 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe 'Merge request > User creates image diff notes', :js, feature_cat
end
end
- %w(inline parallel).each do |view|
+ %w[inline parallel].each do |view|
context "#{view} view" do
let(:position) do
build(:image_diff_position, file: path, diff_refs: merge_request.diff_refs)
diff --git a/spec/features/merge_request/user_creates_mr_spec.rb b/spec/features/merge_request/user_creates_mr_spec.rb
index f48315a1636..950b64bb395 100644
--- a/spec/features/merge_request/user_creates_mr_spec.rb
+++ b/spec/features/merge_request/user_creates_mr_spec.rb
@@ -89,77 +89,34 @@ RSpec.describe 'Merge request > User creates MR', feature_category: :code_review
end
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: true)
- end
-
- context 'non-fork merge request' do
- include_context 'merge request create context'
- it_behaves_like 'a creatable merge request with visible selected labels'
- end
-
- context 'from a forked project' do
- let(:canonical_project) { create(:project, :public, :repository) }
-
- let(:source_project) do
- fork_project(canonical_project, user,
- repository: true,
- namespace: user.namespace)
- end
-
- context 'to canonical project' do
- include_context 'merge request create context'
- it_behaves_like 'a creatable merge request with visible selected labels'
- end
-
- context 'to another forked project' do
- let(:target_project) do
- fork_project(canonical_project, user,
- repository: true,
- namespace: user.namespace)
- end
-
- include_context 'merge request create context'
- it_behaves_like 'a creatable merge request with visible selected labels'
- end
- end
+ context 'non-fork merge request' do
+ include_context 'merge request create context'
+ it_behaves_like 'a creatable merge request with visible selected labels'
end
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: false)
+ context 'from a forked project' do
+ let(:canonical_project) { create(:project, :public, :repository) }
+
+ let(:source_project) do
+ fork_project(canonical_project, user,
+ repository: true,
+ namespace: user.namespace)
end
- context 'non-fork merge request' do
+ context 'to canonical project' do
include_context 'merge request create context'
- it_behaves_like 'a creatable merge request'
+ it_behaves_like 'a creatable merge request with visible selected labels'
end
- context 'from a forked project' do
- let(:canonical_project) { create(:project, :public, :repository) }
-
- let(:source_project) do
+ context 'to another forked project' do
+ let(:target_project) do
fork_project(canonical_project, user,
repository: true,
namespace: user.namespace)
end
- context 'to canonical project' do
- include_context 'merge request create context'
- it_behaves_like 'a creatable merge request'
- end
-
- context 'to another forked project' do
- let(:target_project) do
- fork_project(canonical_project, user,
- repository: true,
- namespace: user.namespace)
- end
-
- include_context 'merge request create context'
- it_behaves_like 'a creatable merge request'
- end
+ include_context 'merge request create context'
+ it_behaves_like 'a creatable merge request with visible selected labels'
end
end
diff --git a/spec/features/merge_request/user_edits_merge_request_spec.rb b/spec/features/merge_request/user_edits_merge_request_spec.rb
index b1cff72c374..584a17ae33d 100644
--- a/spec/features/merge_request/user_edits_merge_request_spec.rb
+++ b/spec/features/merge_request/user_edits_merge_request_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'User edits a merge request', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
@@ -87,8 +85,6 @@ RSpec.describe 'User edits a merge request', :js, feature_category: :code_review
describe 'changing target branch' do
it 'allows user to change target branch' do
- close_rich_text_promo_popover_if_present
-
expect(page).to have_content('From master into feature')
first('.js-target-branch').click
diff --git a/spec/features/merge_request/user_edits_mr_spec.rb b/spec/features/merge_request/user_edits_mr_spec.rb
index bf237e07ac8..fca3a76949a 100644
--- a/spec/features/merge_request/user_edits_mr_spec.rb
+++ b/spec/features/merge_request/user_edits_mr_spec.rb
@@ -198,39 +198,15 @@ RSpec.describe 'Merge request > User edits MR', feature_category: :code_review_w
stub_licensed_features(multiple_merge_request_assignees: false)
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: true)
- end
-
- context 'non-fork merge request' do
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with visible selected labels'
- end
-
- context 'for a forked project' do
- let(:source_project) { fork_project(target_project, nil, repository: true) }
-
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with visible selected labels'
- end
+ context 'non-fork merge request' do
+ include_context 'merge request edit context'
+ it_behaves_like 'an editable merge request with visible selected labels'
end
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: false)
- end
-
- context 'non-fork merge request' do
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request'
- end
+ context 'for a forked project' do
+ let(:source_project) { fork_project(target_project, nil, repository: true) }
- context 'for a forked project' do
- let(:source_project) { fork_project(target_project, nil, repository: true) }
-
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request'
- end
+ include_context 'merge request edit context'
+ it_behaves_like 'an editable merge request with visible selected labels'
end
end
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 84387965989..0853e3cb773 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -3,16 +3,15 @@
require 'spec_helper'
RSpec.describe 'User manages subscription', :js, feature_category: :code_review_workflow do
- include CookieHelper
-
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
let(:moved_mr_sidebar_enabled) { false }
+ let(:notifications_todos_buttons_enabled) { false }
before do
stub_feature_flags(moved_mr_sidebar: moved_mr_sidebar_enabled)
- set_cookie('new-actions-popover-viewed', 'true')
+ stub_feature_flags(notifications_todos_buttons: notifications_todos_buttons_enabled)
project.add_maintainer(user)
sign_in(user)
@@ -60,4 +59,19 @@ RSpec.describe 'User manages subscription', :js, feature_category: :code_review_
expect(page).to have_selector('.gl-toggle:not(.is-checked)')
end
end
+
+ context 'with notifications_todos_buttons feature flag enabled' do
+ let(:moved_mr_sidebar_enabled) { true }
+ let(:notifications_todos_buttons_enabled) { true }
+
+ it 'toggles subscription' do
+ wait_for_requests
+
+ find('[data-testid="subscribe-button"]').click
+ expect(page).to have_selector('[data-testid="notifications-off-icon"]')
+
+ find('[data-testid="subscribe-button"]').click
+ expect(page).to have_selector('[data-testid="notifications-icon"]')
+ end
+ end
end
diff --git a/spec/features/merge_request/user_merges_immediately_spec.rb b/spec/features/merge_request/user_merges_immediately_spec.rb
index 5fe9947d0df..71af2045bab 100644
--- a/spec/features/merge_request/user_merges_immediately_spec.rb
+++ b/spec/features/merge_request/user_merges_immediately_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge requests > User merges immediately', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let!(:merge_request) do
@@ -33,7 +31,6 @@ RSpec.describe 'Merge requests > User merges immediately', :js, feature_category
project.add_maintainer(user)
sign_in(user)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
it 'enables merge immediately' do
diff --git a/spec/features/merge_request/user_merges_merge_request_spec.rb b/spec/features/merge_request/user_merges_merge_request_spec.rb
index aee42784d05..ede686cc700 100644
--- a/spec/features/merge_request/user_merges_merge_request_spec.rb
+++ b/spec/features/merge_request/user_merges_merge_request_spec.rb
@@ -3,8 +3,6 @@
require "spec_helper"
RSpec.describe "User merges a merge request", :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
@@ -31,7 +29,6 @@ RSpec.describe "User merges a merge request", :js, feature_category: :code_revie
create(:merge_request, source_project: project, source_branch: 'branch-1')
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
expect(page).to have_css('.js-merge-counter', text: '2')
diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
index 62404077cea..78814e36cfe 100644
--- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:merge_request) { create(:merge_request_with_diffs) }
let(:project) { merge_request.target_project }
@@ -116,7 +114,6 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea
it 'allows MR to be merged immediately' do
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
wait_for_requests
diff --git a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
index c12816b6521..230111fe439 100644
--- a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
@@ -4,14 +4,12 @@ require 'spec_helper'
RSpec.describe 'Merge request > User opens checkout branch modal', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
- include CookieHelper
let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
end
describe 'for fork' do
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 0278d2af08f..ca3503b187c 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
RSpec.describe 'Merge request > User posts notes', :js, feature_category: :code_review_workflow do
include NoteInteractionHelpers
- include ContentEditorHelpers
-
let_it_be(:project) { create(:project, :repository) }
let(:user) { project.creator }
@@ -22,7 +20,6 @@ RSpec.describe 'Merge request > User posts notes', :js, feature_category: :code_
sign_in(user)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
subject { page }
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index e8ffca43aa2..4bdef20304a 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge request > User resolves diff notes and threads', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:guest) { create(:user) }
@@ -543,7 +541,5 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js, feat
# Wait for MR widget to load
wait_for_requests
-
- close_rich_text_promo_popover_if_present
end
end
diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb
index 68adc4d47b6..8c782056aa4 100644
--- a/spec/features/merge_request/user_reverts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'User reverts a merge request', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
@@ -15,7 +13,6 @@ RSpec.describe 'User reverts a merge request', :js, feature_category: :code_revi
sign_in(user)
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
page.within('.mr-state-widget') do
click_button 'Merge'
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 94393ea00e4..98ea72b9569 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js, feature_c
end
end
- %w(parallel).each do |view|
+ %w[parallel].each do |view|
context "#{view} view" do
before do
visit diffs_project_merge_request_path(project, merge_request, view: view)
diff --git a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
index e55ecd2a531..921c12134a9 100644
--- a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_category: :code_review_workflow do
- include CookieHelper
-
let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository, creator: user) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@@ -12,7 +10,6 @@ RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
visit project_merge_request_path(project, merge_request)
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb
index 3482d468bc1..3ca5ac23ddb 100644
--- a/spec/features/merge_request/user_sees_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sees threads', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -31,7 +29,6 @@ RSpec.describe 'Merge request > User sees threads', :js, feature_category: :code
before do
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
context 'active threads' do
@@ -74,7 +71,6 @@ RSpec.describe 'Merge request > User sees threads', :js, feature_category: :code
before do
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
# TODO: https://gitlab.com/gitlab-org/gitlab-foss/issues/48034
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index e052d06c158..69eb6b0dc17 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'created', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Created', count: 2)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -122,7 +122,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending', count: 4)
expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
@@ -220,7 +220,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees a branch pipeline in pipeline tab' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'created', count: 1)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Created', count: 1)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
end
end
@@ -273,7 +273,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending', count: 2)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -289,7 +289,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees pipeline list in forked project' do
visit project_pipelines_path(forked_project)
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending', count: 2)
end
context 'when a user updated a merge request from a forked project to the parent project' do
@@ -315,7 +315,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending', count: 4)
expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
@@ -358,7 +358,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees pipeline list in forked project' do
visit project_pipelines_path(forked_project)
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending', count: 4)
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 1db09790e1c..96cad397441 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
include ProjectForksHelper
include TestReportsHelper
include ReactiveCachingHelpers
- include ContentEditorHelpers
let(:project) { create(:project, :repository) }
let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
@@ -58,7 +57,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
merge_request.update!(head_pipeline: pipeline)
deployment.update!(status: :success)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
it 'shows environments link' do
@@ -132,7 +130,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
create(:ci_build, :pending, pipeline: pipeline)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
it 'has merge button that shows modal when pipeline does not succeeded' do
@@ -409,7 +406,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
before do
allow_any_instance_of(Repository).to receive(:merge).and_return(false)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
it 'updates the MR widget', :sidekiq_might_not_need_inline do
@@ -431,7 +427,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
sign_in(user2)
merge_request.update!(source_project: forked_project)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
it 'user can merge into the target project', :sidekiq_inline do
@@ -469,7 +464,6 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
allow_any_instance_of(MergeRequest).to receive(:merge_ongoing?).and_return(true)
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
index ad7ed1ceada..92bedc47718 100644
--- a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Merge request > User sees notes from forked project', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
- include ContentEditorHelpers
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
@@ -30,7 +29,6 @@ RSpec.describe 'Merge request > User sees notes from forked project', :js, featu
it 'user can reply to the comment', :sidekiq_might_not_need_inline do
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
expect(page).to have_content('A commit comment')
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index bb3890f5242..a68b3c444fe 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :co
wait_for_requests
page.within(find('[data-testid="pipeline-table-row"]', match: :first)) do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Passed')
expect(page).to have_content(pipeline.id)
expect(page).to have_content('API')
expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index 16578af238d..654c71c87e0 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_category: :code_review_workflow do
include ListboxHelpers
- include CookieHelper
let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
@@ -17,7 +16,6 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_
before do
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
end
it 'selects the source branch sha when a tag with the same name exists' do
diff --git a/spec/features/merge_request/user_sets_to_auto_merge_spec.rb b/spec/features/merge_request/user_sets_to_auto_merge_spec.rb
index 4dc0c03aedc..e43c749f93f 100644
--- a/spec/features/merge_request/user_sets_to_auto_merge_spec.rb
+++ b/spec/features/merge_request/user_sets_to_auto_merge_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) do
@@ -42,7 +40,6 @@ RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category:
describe 'setting to auto-merge when pipeline succeeds' do
shared_examples 'Set to auto-merge activator' do
it 'activates auto-merge feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
- close_rich_text_promo_popover_if_present
expect(page).to have_content 'Set to auto-merge'
click_button "Set to auto-merge"
wait_for_requests
@@ -60,7 +57,6 @@ RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category:
context 'when enabled after it was previously canceled' do
before do
- close_rich_text_promo_popover_if_present
click_button "Set to auto-merge"
wait_for_requests
@@ -119,8 +115,6 @@ RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category:
end
it 'allows to cancel the auto-merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
- close_rich_text_promo_popover_if_present
-
click_button "Cancel auto-merge"
expect(page).to have_button "Set to auto-merge"
diff --git a/spec/features/merge_request/user_squashes_merge_request_spec.rb b/spec/features/merge_request/user_squashes_merge_request_spec.rb
index 200f310d929..5fd0f353e56 100644
--- a/spec/features/merge_request/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_request/user_squashes_merge_request_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'User squashes a merge request', :js, feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:source_branch) { 'csv' }
@@ -14,8 +12,6 @@ RSpec.describe 'User squashes a merge request', :js, feature_category: :code_rev
shared_examples 'squash' do
it 'squashes the commits into a single commit, and adds a merge commit', :sidekiq_might_not_need_inline do
- close_rich_text_promo_popover_if_present
-
expect(page).to have_content('Merged')
latest_master_commits = project.repository.commits_between(original_head.sha, 'master').map(&:raw)
@@ -41,16 +37,12 @@ RSpec.describe 'User squashes a merge request', :js, feature_category: :code_rev
shared_examples 'no squash' do
it 'accepts the merge request without squashing', :sidekiq_might_not_need_inline do
- close_rich_text_promo_popover_if_present
-
expect(page).to have_content('Merged')
expect(project.repository).to be_merged_to_root_ref(source_branch)
end
end
def accept_mr
- close_rich_text_promo_popover_if_present
-
expect(page).to have_button('Merge')
uncheck 'Delete source branch' unless protected_source_branch
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 1a814aeb89d..6152d9f8259 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
- include ContentEditorHelpers
def expect_suggestion_has_content(element, expected_changing_content, expected_suggested_content)
changing_content = element.all(:css, '.line_holder.old').map { |el| el.text(normalize_ws: true) }
@@ -36,7 +35,6 @@ RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_wo
context 'single suggestion note' do
it 'hides suggestion popover' do
click_diff_line(find_by_scrolling("[id='#{sample_compare.changes[1][:line_code]}']"))
- close_rich_text_promo_popover_if_present
expect(page).to have_selector('.diff-suggest-popover')
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index cd0ea639d4d..bc93e6caccb 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -3,24 +3,10 @@
require 'spec_helper'
RSpec.describe 'User views an open merge request', feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project, description: '# Description header')
end
- context 'feature flags' do
- let_it_be(:project) { create(:project, :public, :repository) }
-
- it 'pushes content_editor_on_issues feature flag to frontend' do
- stub_feature_flags(content_editor_on_issues: true)
-
- visit merge_request_path(merge_request)
-
- expect(page).to have_pushed_frontend_feature_flags(contentEditorOnIssues: true)
- end
- end
-
context 'when a merge request does not have repository' do
let(:project) { create(:project, :public, :repository) }
@@ -55,7 +41,6 @@ RSpec.describe 'User views an open merge request', feature_category: :code_revie
sign_in(user)
visit(edit_project_merge_request_path(project, merge_request))
- close_rich_text_promo_popover_if_present
end
it 'renders empty description preview' do
diff --git a/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
index e7727fbb9dc..7e33946f713 100644
--- a/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
+++ b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Admin views hidden merge requests', feature_category: :insider_t
it 'shows a hidden merge request icon' do
page.within("#merge_request_#{merge_request.id}") do
- tooltip = format(_('This %{issuable} is hidden because its author has been banned'),
+ tooltip = format(_('This %{issuable} is hidden because its author has been banned.'),
issuable: _('merge request'))
expect(page).to have_css("span[title='#{tooltip}']")
expect(page).to have_css('svg[data-testid="spam-icon"]')
diff --git a/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb b/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb
index 2c7567b1b40..96f7c26944e 100644
--- a/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb
+++ b/spec/features/merge_requests/user_sees_note_updates_in_real_time_spec.rb
@@ -4,14 +4,12 @@ require 'spec_helper'
RSpec.describe 'Merge request note updates in real time', :js, feature_category: :code_review_workflow do
include NoteInteractionHelpers
- include ContentEditorHelpers
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
before do
visit project_merge_request_path(project, merge_request)
- close_rich_text_promo_popover_if_present
end
describe 'new notes' do
diff --git a/spec/features/merge_requests/user_views_all_merge_requests_spec.rb b/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
index c2eb43d7476..d8fb3171b32 100644
--- a/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
@@ -4,7 +4,11 @@ require 'spec_helper'
RSpec.describe 'User views all merge requests', feature_category: :code_review_workflow do
let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:issue) { create(:issue, project: project) }
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project, title: "##{issue.iid} my title")
+ end
+
let(:project) { create(:project, :public) }
before do
@@ -14,4 +18,10 @@ RSpec.describe 'User views all merge requests', feature_category: :code_review_w
it 'shows all merge requests' do
expect(page).to have_content(merge_request.title).and have_content(closed_merge_request.title)
end
+
+ it 'links to listed merge requests' do
+ expect(page).to have_link(merge_request.title, href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(closed_merge_request.title,
+ href: project_merge_request_path(project, closed_merge_request))
+ end
end
diff --git a/spec/features/nav/pinned_nav_items_spec.rb b/spec/features/nav/pinned_nav_items_spec.rb
index 1a3ac973ed4..b4d6464ec50 100644
--- a/spec/features/nav/pinned_nav_items_spec.rb
+++ b/spec/features/nav/pinned_nav_items_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigatio
it 'can be unpinned from within its section' do
section = find("button", text: 'Operate')
- within(section.sibling('ul')) do
+ within(section.sibling('div')) do
remove_pin('Terraform modules')
end
diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
index bcda30ccb84..8bb5f2514ef 100644
--- a/spec/features/populate_new_pipeline_vars_with_params_spec.rb
+++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
@@ -15,16 +15,16 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js, featur
end
it "var[key1]=value1 populates env_var variable correctly" do
- page.within(all("[data-testid='ci-variable-row']")[0]) do
- expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key1')
- expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value1')
+ page.within(all("[data-testid='ci-variable-row-container']")[0]) do
+ expect(find("[data-testid='pipeline-form-ci-variable-key-field']").value).to eq('key1')
+ expect(find("[data-testid='pipeline-form-ci-variable-value-field']").value).to eq('value1')
end
end
it "file_var[key2]=value2 populates file variable correctly" do
- page.within(all("[data-testid='ci-variable-row']")[1]) do
- expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key2')
- expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value2')
+ page.within(all("[data-testid='ci-variable-row-container']")[1]) do
+ expect(find("[data-testid='pipeline-form-ci-variable-key-field']").value).to eq('key2')
+ expect(find("[data-testid='pipeline-form-ci-variable-value-field']").value).to eq('value2')
end
end
end
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index 033711f699e..4da1a7ba81a 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
end
describe 'User changes their language', :js do
- it 'creates a flash message', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31404' do
+ it 'creates a flash message' do
select_from_listbox 'English', from: 'English'
click_button 'Save changes'
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 973a1e76679..8879636e4dc 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Project active tab', feature_category: :groups_and_projects do
it_behaves_like 'page has active tab', 'Repository'
- %w(Files Commits Graph Compare Branches Tags).each do |sub_menu|
+ %w[Files Commits Graph Compare Branches Tags].each do |sub_menu|
context "on project Repository/#{sub_menu}" do
before do
click_tab(sub_menu)
diff --git a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
index eaf57c566e8..8c8946d67c7 100644
--- a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe "User downloads artifacts", feature_category: :build_artifacts do
shared_examples "downloading" do
it "downloads the zip" do
- expect(page.response_headers['Content-Disposition']).to eq(%{attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}})
+ expect(page.response_headers['Content-Disposition']).to eq(%(attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}))
expect(page.response_headers['Content-Transfer-Encoding']).to eq("binary")
expect(page.response_headers['Content-Type']).to eq("application/zip")
expect(page.source.b).to eq(job.artifacts_file.file.read.b)
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index f0058e75e52..89f6e41bec0 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -55,10 +55,50 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, fe
end
describe 'Click "Blame" button' do
+ context 'when redirect_with_ref_type is disabled' do
+ before do
+ stub_feature_flags(redirect_with_ref_type: false)
+ end
+
+ it 'works with no initial line number fragment hash' do
+ visit_blob
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path))))
+ end
+
+ it 'maintains intitial fragment hash' do
+ fragment = "L3"
+
+ visit_blob(fragment)
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: fragment)))
+ end
+
+ it 'changes fragment hash if line number clicked' do
+ visit_blob
+
+ find('#L3').click
+ find("#L5").click
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: "L5")))
+ end
+
+ it 'with initial fragment hash, changes fragment hash if line number clicked' do
+ fragment = "L1"
+
+ visit_blob(fragment)
+
+ find('#L3').click
+ find("#L5").click
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: "L5")))
+ end
+ end
+
it 'works with no initial line number fragment hash' do
visit_blob
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path))))
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), ref_type: 'heads')))
end
it 'maintains intitial fragment hash' do
@@ -66,7 +106,7 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, fe
visit_blob(fragment)
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: fragment)))
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), ref_type: 'heads', anchor: fragment)))
end
it 'changes fragment hash if line number clicked' do
@@ -75,7 +115,7 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, fe
find('#L3').click
find("#L5").click
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: "L5")))
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), ref_type: 'heads', anchor: "L5")))
end
it 'with initial fragment hash, changes fragment hash if line number clicked' do
@@ -86,7 +126,7 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, fe
find('#L3').click
find("#L5").click
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: "L5")))
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), ref_type: 'heads', anchor: "L5")))
end
end
end
diff --git a/spec/features/projects/branches/user_views_branches_spec.rb b/spec/features/projects/branches/user_views_branches_spec.rb
index 52327cc6543..e396455b371 100644
--- a/spec/features/projects/branches/user_views_branches_spec.rb
+++ b/spec/features/projects/branches/user_views_branches_spec.rb
@@ -29,6 +29,10 @@ RSpec.describe "User views branches", :js, feature_category: :groups_and_project
it "does not show the \"More actions\" dropdown" do
expect(page).not_to have_selector('[data-testid="branch-more-actions"]')
end
+
+ it "passes axe automated accessibility testing" do
+ expect(page).to be_axe_clean.within('#content-body')
+ end
end
describe 'non-default branch' do
@@ -45,6 +49,10 @@ RSpec.describe "User views branches", :js, feature_category: :groups_and_project
it "shows the \"More actions\" dropdown" do
expect(page).to have_button('More actions')
end
+
+ it "passes axe automated accessibility testing" do
+ expect(page).to be_axe_clean.within('#content-body')
+ end
end
end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 50df7bb7ca5..79e9ca7998e 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -206,7 +206,7 @@ RSpec.describe 'Branches', feature_category: :groups_and_projects do
page.refresh
search_for_branch('fix')
-
+ clear_search_input
expect(page).not_to have_content('fix')
expect(all('.all-branches', wait: false).last).to have_selector('li', count: 0)
end
@@ -299,7 +299,7 @@ RSpec.describe 'Branches', feature_category: :groups_and_projects do
it 'shows pipeline status when available' do
page.within first('.all-branches li') do
- expect(page).to have_css 'a.ci-status-icon-success'
+ expect(page).to have_css 'a.gl-badge .ci-status-icon-success'
end
end
@@ -376,6 +376,10 @@ RSpec.describe 'Branches', feature_category: :groups_and_projects do
branch_search.native.send_keys(:enter)
end
+ def clear_search_input
+ find('input[data-testid="branch-search"]').set('')
+ end
+
def delete_branch_and_confirm
wait_for_requests
find('[data-testid="branch-more-actions"] .gl-new-dropdown-toggle', match: :first).click
diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb
index baef26e3e63..dcda24eb2e4 100644
--- a/spec/features/projects/cluster_agents_spec.rb
+++ b/spec/features/projects/cluster_agents_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'ClusterAgents', :js, feature_category: :groups_and_projects do
end
it 'displays empty state', :aggregate_failures do
- expect(page).to have_selector('.empty-state')
+ expect(page).to have_selector('[data-testid="cluster-agent-empty-state"]')
end
end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index e075cc86319..c5d960f2308 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Clusters', :js, feature_category: :groups_and_projects do
end
it 'sees empty state' do
- expect(page).to have_selector('.empty-state')
+ expect(page).to have_selector('[data-testid="clusters-empty-state"]')
end
end
diff --git a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
index bc5d468c97a..00cb5474ea0 100644
--- a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
+++ b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Commit > Pipelines tab', :js, feature_category: :source_code_man
wait_for_requests
page.within('[data-testid="pipeline-table-row"]') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Passed')
expect(page).to have_content(pipeline.id)
expect(page).to have_content('API')
expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 0a77c671fce..c285d039d08 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Container Registry', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Container Registry', :js, feature_category: :container_registry do
include_context 'container registry tags'
let(:user) { create(:user) }
@@ -75,7 +75,6 @@ RSpec.describe 'Container Registry', :js, feature_category: :groups_and_projects
visit_container_registry
expect_any_instance_of(ContainerRepository).to receive(:delete_scheduled!).and_call_original
- expect(DeleteContainerRepositoryWorker).not_to receive(:perform_async)
find('[title="Remove repository"]').click
expect(find('.modal .modal-title')).to have_content _('Delete image repository?')
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 3abe3ce1396..fdedaa3f469 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
end
def auto_stop_button_selector
- %q{button[title="Prevent environment from auto-stopping"]}
+ %q(button[title="Prevent environment from auto-stopping"])
end
describe 'environment details page', :js do
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index 03d5e68d2aa..0bd5020e9bf 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'User activates Jira', :js, feature_category: :integrations do
end
it 'activates the Jira integration' do
- stub_request(:get, test_url).with(basic_auth: %w(username password))
+ stub_request(:get, test_url).with(basic_auth: %w[username password])
.to_raise(JIRA::HTTPError.new(double(message: 'message', code: '200')))
visit_project_integration('Jira')
diff --git a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
index aea76944c7f..e490e32149e 100644
--- a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
@@ -139,7 +139,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js, feature_category: :integ
it 'shows the correct trigger url' do
value = find_field('request_url').value
- expect(value).to match("api/v4/projects/#{project.id}/services/mattermost_slash_commands/trigger")
+ expect(value).to match("api/v4/projects/#{project.id}/integrations/mattermost_slash_commands/trigger")
end
it 'shows a token placeholder' do
diff --git a/spec/features/projects/integrations/user_activates_prometheus_spec.rb b/spec/features/projects/integrations/user_activates_prometheus_spec.rb
deleted file mode 100644
index db71256b294..00000000000
--- a/spec/features/projects/integrations/user_activates_prometheus_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User activates Prometheus', feature_category: :integrations do
- include_context 'project integration activation'
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- stub_request(:get, /.*prometheus.example.com.*/)
- end
-
- it 'saves and activates integration', :js do
- visit_project_integration('Prometheus')
- check('Active')
-
- click_button('Save changes')
-
- expect(page).to have_content('Prometheus settings saved and active.')
- end
-end
diff --git a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
index 38491501c65..b4d755291da 100644
--- a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Slack slash commands', :js, feature_category: :integrations do
it 'shows the correct trigger url' do
value = find_field('url').value
- expect(value).to match("api/v4/projects/#{project.id}/services/slack_slash_commands/trigger")
+ expect(value).to match("api/v4/projects/#{project.id}/integrations/slack_slash_commands/trigger")
end
it 'shows help content' do
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index 4221fa26e00..9ba4b544191 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects do
include ProjectForksHelper
- include CookieHelper
- include ContentEditorHelpers
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
@@ -14,7 +12,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
before do
project.add_maintainer(user)
sign_in user
- set_cookie('new-actions-popover-viewed', 'true')
end
context 'user creates an issue using templates' do
@@ -37,7 +34,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
- close_rich_text_promo_popover_if_present
page.find('.js-issuable-edit').click
fill_in :'issuable-title', with: 'test issue title'
end
@@ -81,7 +77,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
- close_rich_text_promo_popover_if_present
page.find('.js-issuable-edit').click
fill_in :'issuable-title', with: 'test issue title'
fill_in :'issue-description', with: prior_description
@@ -111,7 +106,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
it 'does not overwrite autosaved description' do
visit new_project_issue_path project
wait_for_requests
- close_rich_text_promo_popover_if_present
assert_template # default template is loaded the first time
@@ -145,7 +139,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
message: 'added merge request bug template',
branch_name: 'master')
visit edit_project_merge_request_path project, merge_request
- close_rich_text_promo_popover_if_present
fill_in :'merge_request[title]', with: 'test merge request title'
end
@@ -205,7 +198,6 @@ RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects
message: 'added merge request template',
branch_name: 'master')
visit edit_project_merge_request_path project, merge_request
- close_rich_text_promo_popover_if_present
fill_in :'merge_request[title]', with: 'test merge request title'
end
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index e1bcc160092..73635480b95 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe 'Project Jobs Permissions', feature_category: :groups_and_project
it_behaves_like 'recent job page details responds with status', 200 do
it 'renders job details', :js do
- expect(page).to have_content "Job #{job.name}"
+ expect(page).to have_content(job.name)
expect(page).to have_css '.log-line'
end
end
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index 795084f8008..448db8b6d89 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'User browses a job', :js, feature_category: :groups_and_projects
visit(project_job_path(project, build))
wait_for_requests
- expect(page).to have_content("Job #{build.name}")
+ expect(page).to have_content(build.name)
expect(page).to have_css('.job-log')
# scroll to the top of the page first
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index afcf0e660f7..fc67d7dedcc 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Canceled')
expect(page).not_to have_selector('[data-testid="jobs-table-error-alert"]')
end
end
@@ -93,7 +93,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending')
end
end
@@ -133,7 +133,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending')
end
it 'unschedules a job successfully' do
@@ -141,7 +141,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'manual')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Manual')
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 1bee4cc5081..12ed2558712 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Passed')
end
it 'shows commit`s data', :js do
@@ -93,7 +93,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
visit project_job_path(project, job)
within '.js-pipeline-info' do
- expect(page).to have_content("Pipeline ##{pipeline.id} #{pipeline.status} for #{pipeline.ref}")
+ expect(page).to have_content("Pipeline ##{pipeline.id} Pending for #{pipeline.ref}")
end
end
@@ -306,7 +306,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
artifact_request = requests.find { |req| req.url.include?('artifacts/download') }
- expect(artifact_request.response_headers['Content-Disposition']).to eq(%{attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}})
+ expect(artifact_request.response_headers['Content-Disposition']).to eq(%(attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}))
expect(artifact_request.response_headers['Content-Transfer-Encoding']).to eq("binary")
expect(artifact_request.response_headers['Content-Type']).to eq("image/gif")
expect(artifact_request.body).to eq(job.artifacts_file.file.read.b)
@@ -939,7 +939,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
context 'when available runners can not run specified tag' do
let(:runner) { create(:ci_runner, :instance, active: false) }
- let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w(docker linux)) }
+ let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w[docker linux]) }
it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
@@ -951,7 +951,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
context 'when runners are offline and build has tags' do
let(:runner) { create(:ci_runner, :instance, active: true) }
- let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w(docker linux)) }
+ let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w[docker linux]) }
it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
@@ -1052,7 +1052,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
find('[data-testid="retry-button-modal"]').click
within '[data-testid="job-header-content"]' do
- expect(page).to have_content('pending')
+ expect(page).to have_content('Pending')
end
end
end
diff --git a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
index 846a0a25891..4898bf253be 100644
--- a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
+++ b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'Issue prioritization', feature_category: :team_planning do
issue_titles = all('.issues-list .issue-title-text').map(&:text)
- expect(issue_titles).to eq(%w(issue_4 issue_3 issue_5 issue_2 issue_1))
+ expect(issue_titles).to eq(%w[issue_4 issue_3 issue_5 issue_2 issue_1])
end
end
@@ -80,7 +80,7 @@ RSpec.describe 'Issue prioritization', feature_category: :team_planning do
expect(issue_titles[0..1]).to contain_exactly('issue_5', 'issue_8')
expect(issue_titles[2..4]).to contain_exactly('issue_1', 'issue_3', 'issue_7')
- expect(issue_titles[5..]).to eq(%w(issue_2 issue_4 issue_6))
+ expect(issue_titles[5..]).to eq(%w[issue_2 issue_4 issue_6])
end
end
end
diff --git a/spec/features/projects/labels/sort_labels_spec.rb b/spec/features/projects/labels/sort_labels_spec.rb
index 74ce2f40df8..6b6d000cbf5 100644
--- a/spec/features/projects/labels/sort_labels_spec.rb
+++ b/spec/features/projects/labels/sort_labels_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
# assert default sorting
within '.other-labels' do
- expect(page.all('.label-list-item').first.text).to include('Bar')
- expect(page.all('.label-list-item').last.text).to include('Foo')
+ expect(page.all('.js-label-list-item').first.text).to include('Bar')
+ expect(page.all('.js-label-list-item').last.text).to include('Foo')
end
end
@@ -43,8 +43,8 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
# assert default sorting
within '.other-labels' do
- expect(page.all('.label-list-item').first.text).to include('Foo')
- expect(page.all('.label-list-item').last.text).to include('Bar')
+ expect(page.all('.js-label-list-item').first.text).to include('Foo')
+ expect(page.all('.js-label-list-item').last.text).to include('Bar')
end
end
end
diff --git a/spec/features/projects/labels/update_prioritization_spec.rb b/spec/features/projects/labels/update_prioritization_spec.rb
index 4af5dd380c1..967497bccd2 100644
--- a/spec/features/projects/labels/update_prioritization_spec.rb
+++ b/spec/features/projects/labels/update_prioritization_spec.rb
@@ -101,19 +101,19 @@ RSpec.describe 'Prioritize labels', feature_category: :team_planning do
expect(page).to have_content 'wontfix'
# Sort labels
- drag_to(selector: '.label-list-item .label-content', from_index: 1, to_index: 2)
+ drag_to(selector: '.js-label-list-item .label-content', from_index: 1, to_index: 2)
page.within('.prioritized-labels') do
- expect(first('.label-list-item')).to have_content('feature')
- expect(page.all('.label-list-item').last).to have_content('bug')
+ expect(first('.js-label-list-item')).to have_content('feature')
+ expect(page.all('.js-label-list-item').last).to have_content('bug')
end
refresh
wait_for_requests
page.within('.prioritized-labels') do
- expect(first('.label-list-item')).to have_content('feature')
- expect(page.all('.label-list-item').last).to have_content('bug')
+ expect(first('.js-label-list-item')).to have_content('feature')
+ expect(page.all('.js-label-list-item').last).to have_content('bug')
end
end
@@ -159,11 +159,11 @@ RSpec.describe 'Prioritize labels', feature_category: :team_planning do
end
it 'cannot sort prioritized labels', :js do
- drag_to(selector: '.prioritized-labels .label-list-item', from_index: 1, to_index: 2)
+ drag_to(selector: '.prioritized-labels .js-label-list-item', from_index: 1, to_index: 2)
page.within('.prioritized-labels') do
- expect(first('.label-list-item')).to have_content('bug')
- expect(page.all('.label-list-item').last).to have_content('feature')
+ expect(first('.js-label-list-item')).to have_content('bug')
+ expect(page.all('.js-label-list-item').last).to have_content('feature')
end
end
end
diff --git a/spec/features/projects/labels/user_removes_labels_spec.rb b/spec/features/projects/labels/user_removes_labels_spec.rb
index d0175c53951..c3677d815d1 100644
--- a/spec/features/projects/labels/user_removes_labels_spec.rb
+++ b/spec/features/projects/labels/user_removes_labels_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe "User removes labels", feature_category: :team_planning do
it "removes label", :js do
page.within(".other-labels") do
- page.first(".label-list-item") do
+ page.first(".js-label-list-item") do
first('.js-label-options-dropdown').click
first('.js-delete-label-modal-button').click
end
@@ -43,7 +43,7 @@ RSpec.describe "User removes labels", feature_category: :team_planning do
it "removes all labels" do
loop do
- li = page.first(".label-list-item", minimum: 0)
+ li = page.first(".js-label-list-item", minimum: 0)
break unless li
li.find('.js-label-options-dropdown').click
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 599f5a1ffb7..5bcd0d28fd9 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Pipeline Schedules', :js, feature_category: :continuous_integration do
include Spec::Support::Helpers::ModalHelpers
let!(:project) { create(:project, :repository) }
@@ -106,7 +106,7 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects
wait_for_requests
end
- describe 'The view' do
+ describe 'the view' do
it 'displays the required information description' do
page.within('[data-testid="pipeline-schedule-table-row"]') do
expect(page).to have_content('pipeline schedule')
@@ -293,36 +293,47 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects
end
end
- context 'logged in as non-member' do
- before do
- gitlab_sign_in(user)
- end
-
+ shared_examples 'when not logged in' do
describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
- end
-
- describe 'The view' do
+ describe 'the view' do
it 'does not show create schedule button' do
+ visit_pipelines_schedules
+
expect(page).not_to have_link('New schedule')
end
+
+ context 'when project is public' do
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: true) }
+
+ it 'shows Pipelines Schedules page' do
+ visit_pipelines_schedules
+
+ expect(page).to have_link('New schedule')
+ end
+
+ context 'when public pipelines are disabled' do
+ before do
+ project.update!(public_builds: false)
+ visit_pipelines_schedules
+ end
+
+ it 'shows Not Found page' do
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+ end
end
end
end
- context 'not logged in' do
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
- end
+ it_behaves_like 'when not logged in'
- describe 'The view' do
- it 'does not show create schedule button' do
- expect(page).not_to have_link('New schedule')
- end
- end
+ context 'logged in as non-member' do
+ before do
+ gitlab_sign_in(user)
end
+
+ it_behaves_like 'when not logged in'
end
def visit_new_pipeline_schedule
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 2fc8345fb47..f042a12884c 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
+RSpec.describe 'Pipeline', :js, feature_category: :continuous_integration do
include RoutesHelpers
include ProjectForksHelper
include ::ExclusiveLeaseHelpers
@@ -224,7 +224,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Retry job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
end
end
@@ -278,7 +278,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Retry job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
end
@@ -312,7 +312,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Play job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
end
end
@@ -537,7 +537,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
it 'shows running status in pipeline header', :sidekiq_might_not_need_inline do
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
end
end
@@ -900,7 +900,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
subject
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_content('pending')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Pending')
end
within('.js-pipeline-graph') do
@@ -925,7 +925,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
subject
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_content('running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
within('.js-pipeline-graph') do
@@ -954,7 +954,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
subject
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_content('waiting')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Waiting')
end
within('.js-pipeline-graph') do
@@ -974,7 +974,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
subject
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_content('running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
within('.js-pipeline-graph') do
@@ -1002,7 +1002,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
subject
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_content('waiting')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Waiting')
end
within('.js-pipeline-graph') do
@@ -1303,7 +1303,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
page.within('[data-testid="pipeline-details-header"]') do
expect(page).to have_selector(
- %{span[title="#{pipeline.yaml_errors}"]})
+ %(span[title="#{pipeline.yaml_errors}"]))
end
end
@@ -1316,7 +1316,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
page.within('[data-testid="pipeline-details-header"]') do
expect(page).to have_selector(
- %{span[title="#{pipeline.present.failure_reason}"]})
+ %(span[title="#{pipeline.present.failure_reason}"]))
end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index c1aa2c35337..ca3b7f0ad47 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline can be canceled' do
expect(page).to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
context 'when canceling' do
@@ -127,7 +127,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicated that pipelines was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Canceled')
end
end
end
@@ -144,7 +144,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline can be retried' do
expect(page).to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'failed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Failed')
end
context 'when retrying' do
@@ -155,7 +155,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'shows running pipeline that is not retryable' do
expect(page).not_to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
end
end
@@ -255,7 +255,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'contains badge with tooltip which contains error' do
expect(pipeline).to have_yaml_errors
expect(page).to have_selector(
- %{span[title="#{pipeline.yaml_errors}"]})
+ %(span[title="#{pipeline.yaml_errors}"]))
end
it 'contains badge that indicates failure reason' do
@@ -265,7 +265,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'contains badge with tooltip which contains failure reason' do
expect(pipeline.failure_reason?).to eq true
expect(page).to have_selector(
- %{span[title="#{pipeline.present.failure_reason}"]})
+ %(span[title="#{pipeline.present.failure_reason}"]))
end
end
@@ -396,7 +396,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'shows the pipeline as preparing' do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'preparing')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Preparing')
end
end
@@ -417,7 +417,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'has pipeline running' do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Running')
end
context 'when canceling' do
@@ -428,7 +428,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Canceled')
end
end
end
@@ -450,7 +450,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'has failed pipeline', :sidekiq_might_not_need_inline do
- expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'failed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'Failed')
end
end
end
@@ -694,7 +694,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'creates a new pipeline' do
expect do
- find('[data-testid="run_pipeline_button"]', text: 'Run pipeline').click
+ find('[data-testid="run-pipeline-button"]', text: 'Run pipeline').click
wait_for_requests
end
.to change { Ci::Pipeline.count }.by(1)
@@ -704,13 +704,13 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
context 'when variables are specified' do
it 'creates a new pipeline with variables' do
- page.within(find("[data-testid='ci-variable-row']")) do
- find("[data-testid='pipeline-form-ci-variable-key']").set('key_name')
- find("[data-testid='pipeline-form-ci-variable-value']").set('value')
+ page.within(find("[data-testid='ci-variable-row-container']")) do
+ find("[data-testid='pipeline-form-ci-variable-key-field']").set('key_name')
+ find("[data-testid='pipeline-form-ci-variable-value-field']").set('value')
end
expect do
- find('[data-testid="run_pipeline_button"]', text: 'Run pipeline').click
+ find('[data-testid="run-pipeline-button"]', text: 'Run pipeline').click
wait_for_requests
end
.to change { Ci::Pipeline.count }.by(1)
@@ -723,7 +723,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
context 'without gitlab-ci.yml' do
before do
- find('[data-testid="run_pipeline_button"]', text: 'Run pipeline').click
+ find('[data-testid="run-pipeline-button"]', text: 'Run pipeline').click
wait_for_requests
end
@@ -733,7 +733,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
stub_ci_pipeline_to_return_yaml_file
expect do
- find('[data-testid="run_pipeline_button"]', text: 'Run pipeline').click
+ find('[data-testid="run-pipeline-button"]', text: 'Run pipeline').click
wait_for_requests
end
.to change { Ci::Pipeline.count }.by(1)
@@ -818,7 +818,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
describe 'when the `ios_specific_templates` experiment is enabled and the "Set up a runner" button is clicked' do
before do
stub_experiments(ios_specific_templates: :candidate)
- project.project_setting.update!(target_platforms: %w(ios))
+ project.project_setting.update!(target_platforms: %w[ios])
visit project_pipelines_path(project)
click_button 'Set up a runner'
end
diff --git a/spec/features/projects/project_overview_spec.rb b/spec/features/projects/project_overview_spec.rb
new file mode 100644
index 00000000000..e563b03c22a
--- /dev/null
+++ b/spec/features/projects/project_overview_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "Project overview when default branch collides with tag", :js, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let(:user) { project.first_owner }
+
+ before_all do
+ # Create a branch called main that does not contain a readme (this will be the default branch)
+ project.repository.create_file(
+ project.creator,
+ 'NOTREADME.md',
+ '',
+ message: "Initial commit",
+ branch_name: 'main'
+ )
+
+ # Create a branch called readme_branch that contains a readme
+ project.repository.create_file(
+ project.creator,
+ 'README.md',
+ 'readme',
+ message: "Add README.md",
+ branch_name: 'readme_branch'
+ )
+
+ # Create a tag called main pointing to readme_branch
+ project.repository.add_tag(
+ project.creator,
+ 'main',
+ 'readme_branch'
+ )
+ end
+
+ before do
+ sign_in(user)
+ visit project_path(project)
+ end
+
+ it "shows last commit" do
+ page.within(".commit-detail") do
+ expect(page).to have_content('Initial commit')
+ end
+
+ page.execute_script(%{
+ document.getElementsByClassName('tree-content-holder')[0].scrollIntoView()}
+ )
+ wait_for_all_requests
+
+ page.within(".tree-content-holder") do
+ expect(page).to have_content('Initial commit')
+ end
+ end
+
+ it 'has a button to button to add readme' do
+ expect(page).to have_link 'Add README'
+ end
+end
diff --git a/spec/features/projects/settings/auto_devops_spec.rb b/spec/features/projects/settings/auto_devops_spec.rb
new file mode 100644
index 00000000000..6b8164b130f
--- /dev/null
+++ b/spec/features/projects/settings/auto_devops_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects Auto DevOps settings', :js, feature_category: :groups_and_projects do
+ let_it_be(:project) { create(:project) }
+
+ let(:user) { project.first_owner }
+ let(:toggle) { page.find('input[name="project[auto_devops_attributes][enabled]"]') }
+
+ before do
+ sign_in(user)
+ visit project_settings_ci_cd_path(project, anchor: 'autodevops-settings')
+ end
+
+ context 'when toggling Auto DevOps pipelines setting' do
+ it 'toggles the extra settings section' do
+ extra_settings = '[data-testid="extra-auto-devops-settings"].hidden'
+
+ expect(page).not_to have_selector(extra_settings, visible: :all)
+
+ toggle.click
+
+ expect(page).to have_selector(extra_settings, visible: :all)
+
+ toggle.click
+
+ expect(page).not_to have_selector(extra_settings, visible: :all)
+ end
+ end
+end
diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb
index 5cc2e2d3c05..2cd4d793f9c 100644
--- a/spec/features/projects/settings/service_desk_setting_spec.rb
+++ b/spec/features/projects/settings/service_desk_setting_spec.rb
@@ -107,4 +107,10 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c
expect(page).to have_pushed_frontend_feature_flags(serviceDeskCustomEmail: true)
end
+
+ it 'pushes issue_email_participants feature flag to frontend' do
+ visit edit_project_path(project)
+
+ expect(page).to have_pushed_frontend_feature_flags(issueEmailParticipants: true)
+ end
end
diff --git a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
index 70e316983d9..2267cff5490 100644
--- a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
+++ b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Projects > Show > User sees last commit CI status', feature_cate
page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6])
- expect(page).to have_selector('[aria-label="Pipeline: skipped"]')
+ expect(page).to have_selector('[aria-label="Pipeline: Skipped"]')
end
end
end
diff --git a/spec/features/projects/work_items/linked_work_items_spec.rb b/spec/features/projects/work_items/linked_work_items_spec.rb
new file mode 100644
index 00000000000..66016cf8b7b
--- /dev/null
+++ b/spec/features/projects/work_items/linked_work_items_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Work item linked items', :js, feature_category: :team_planning do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let(:work_items_path) { project_work_item_path(project, work_item.iid) }
+ let_it_be(:task) { create(:work_item, :task, project: project, title: 'Task 1') }
+
+ context 'for signed in user' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ before do
+ sign_in(user)
+
+ stub_feature_flags(work_items: true)
+ stub_feature_flags(linked_work_items: true)
+
+ visit work_items_path
+
+ wait_for_requests
+ end
+
+ it 'are not displayed when issue does not have work item links', :aggregate_failures do
+ page.within('.work-item-relationships') do
+ expect(page).to have_selector('[data-testid="link-item-add-button"]')
+ expect(page).not_to have_selector('[data-testid="link-work-item-form"]')
+ expect(page).not_to have_selector('[data-testid="work-item-linked-items-list"]')
+ end
+ end
+
+ it 'toggles widget body', :aggregate_failures do
+ page.within('.work-item-relationships') do
+ expect(page).to have_selector('[data-testid="widget-body"]')
+
+ click_button 'Collapse'
+
+ expect(page).not_to have_selector('[data-testid="widget-body"]')
+
+ click_button 'Expand'
+
+ expect(page).to have_selector('[data-testid="widget-body"]')
+ end
+ end
+
+ it 'toggles form', :aggregate_failures do
+ page.within('.work-item-relationships') do
+ expect(page).not_to have_selector('[data-testid="link-work-item-form"]')
+
+ click_button 'Add'
+
+ expect(page).to have_selector('[data-testid="link-work-item-form"]')
+
+ click_button 'Cancel'
+
+ expect(page).not_to have_selector('[data-testid="link-work-item-form"]')
+ end
+ end
+
+ it 'links a new item', :aggregate_failures do
+ page.within('.work-item-relationships') do
+ click_button 'Add'
+
+ within_testid('link-work-item-form') do
+ expect(page).to have_button('Add', disabled: true)
+ find_by_testid('work-item-token-select-input').set(task.title)
+ wait_for_all_requests
+ click_button task.title
+
+ expect(page).to have_button('Add', disabled: false)
+
+ click_button 'Add'
+
+ wait_for_all_requests
+ end
+
+ expect(find('.work-items-list')).to have_content('Task 1')
+ end
+ end
+
+ it 'removes a linked item', :aggregate_failures do
+ page.within('.work-item-relationships') do
+ click_button 'Add'
+
+ within_testid('link-work-item-form') do
+ expect(page).to have_button('Add', disabled: true)
+ find_by_testid('work-item-token-select-input').set(task.title)
+ wait_for_all_requests
+ click_button task.title
+
+ expect(page).to have_button('Add', disabled: false)
+
+ click_button 'Add'
+
+ wait_for_all_requests
+ end
+
+ expect(find('.work-items-list')).to have_content('Task 1')
+
+ find_by_testid('links-menu').click
+ click_button 'Remove'
+
+ wait_for_all_requests
+
+ expect(page).not_to have_content('Task 1')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/work_items/work_item_children_spec.rb b/spec/features/projects/work_items/work_item_children_spec.rb
index 43a6b2771f6..843afb54dec 100644
--- a/spec/features/projects/work_items/work_item_children_spec.rb
+++ b/spec/features/projects/work_items/work_item_children_spec.rb
@@ -124,7 +124,9 @@ RSpec.describe 'Work item children', :js, feature_category: :team_planning do
expect(page).to have_button('Add task', disabled: false)
- click_button 'Add task'
+ send_keys :escape
+
+ click_button('Add task')
wait_for_all_requests
@@ -162,7 +164,9 @@ RSpec.describe 'Work item children', :js, feature_category: :team_planning do
wait_for_all_requests
click_button task.title
- click_button 'Add task'
+ send_keys :escape
+
+ click_button('Add task')
wait_for_all_requests
end
diff --git a/spec/features/projects/work_items/work_item_spec.rb b/spec/features/projects/work_items/work_item_spec.rb
index a1f5466f5bf..5210d67b78c 100644
--- a/spec/features/projects/work_items/work_item_spec.rb
+++ b/spec/features/projects/work_items/work_item_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:milestones) { create_list(:milestone, 25, project: project) }
let_it_be(:note) { create(:note, noteable: work_item, project: work_item.project) }
- let(:work_items_path) { project_work_items_path(project, work_items_path: work_item.iid) }
+ let(:work_items_path) { project_work_item_path(project, work_item.iid) }
+ let_it_be(:label) { create(:label, project: work_item.project, title: "testing-label") }
context 'for signed in user' do
before do
@@ -58,25 +59,6 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
expect(work_item.reload.assignees).to include(user2)
end
- it 'updates the assignee in real-time' do
- Capybara::Session.new(:other_session)
-
- using_session :other_session do
- visit work_items_path
- expect(work_item.reload.assignees).not_to include(user)
- end
-
- find('[data-testid="work-item-assignees-input"]').hover
- find('[data-testid="assign-self"]').click
- wait_for_requests
-
- expect(work_item.reload.assignees).to include(user)
-
- using_session :other_session do
- expect(work_item.reload.assignees).to include(user)
- end
- end
-
it_behaves_like 'work items title'
it_behaves_like 'work items toggle status button'
it_behaves_like 'work items assignees'
@@ -134,5 +116,11 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
expect(page).to have_field(type: 'text', disabled: true)
end
end
+
+ it 'labels input field is disabled' do
+ within('[data-testid="work-item-labels-input"]') do
+ expect(page).to have_field(type: 'text', disabled: true)
+ end
+ end
end
end
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index ee5d92b7cdb..41105630204 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -74,9 +74,6 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
fill_in 'delete_branch_input', with: 'fix'
click_button 'Yes, delete protected branch'
- find('input[data-testid="branch-search"]').set('fix')
- find('input[data-testid="branch-search"]').native.send_keys(:enter)
-
expect(page).to have_content('No branches to show')
end
end
diff --git a/spec/features/registrations/oauth_registration_spec.rb b/spec/features/registrations/oauth_registration_spec.rb
index c88a018a592..98300cbeaaa 100644
--- a/spec/features/registrations/oauth_registration_spec.rb
+++ b/spec/features/registrations/oauth_registration_spec.rb
@@ -50,11 +50,11 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
stub_omniauth_setting(block_auto_created_users: false)
end
- it 'redirects to the initial welcome path' do
+ it 'redirects to the dashboard projects path' do
register_via(provider, uid, email, additional_info: additional_info)
- expect(page).to have_current_path users_sign_up_welcome_path
- expect(page).to have_content('Welcome to GitLab, mockuser!')
+ expect(page).to have_current_path dashboard_projects_path
+ expect(page).to have_content('Welcome to GitLab')
end
context 'when terms are enforced' do
@@ -62,11 +62,11 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
enforce_terms
end
- it 'auto accepts terms and redirects to the initial welcome path' do
+ it 'auto accepts terms and redirects to the dashboard projects path' do
register_via(provider, uid, email, additional_info: additional_info)
- expect(page).to have_current_path users_sign_up_welcome_path
- expect(page).to have_content('Welcome to GitLab, mockuser!')
+ expect(page).to have_current_path dashboard_projects_path
+ expect(page).to have_content('Welcome to GitLab')
end
end
@@ -108,7 +108,6 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
it 'redirects to the activity page with all the projects/groups invitations accepted' do
visit invite_path(group_invite.raw_invite_token, extra_params)
click_link_or_button "oauth-login-#{provider}"
- fill_in_welcome_form
expect(page).to have_content('You have been granted Owner access to group Owned.')
expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
@@ -116,9 +115,4 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
end
end
end
-
- def fill_in_welcome_form
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
- end
end
diff --git a/spec/features/reportable_note/issue_spec.rb b/spec/features/reportable_note/issue_spec.rb
index a18cdf27294..c33f202a8b2 100644
--- a/spec/features/reportable_note/issue_spec.rb
+++ b/spec/features/reportable_note/issue_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Reportable note on issue', :js, feature_category: :team_planning do
- include CookieHelper
-
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
@@ -13,7 +11,6 @@ RSpec.describe 'Reportable note on issue', :js, feature_category: :team_planning
before do
project.add_maintainer(user)
sign_in(user)
- set_cookie('new-actions-popover-viewed', 'true')
visit project_issue_path(project, issue)
end
diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb
index 81a41951377..154311853f8 100644
--- a/spec/features/tags/developer_views_tags_spec.rb
+++ b/spec/features/tags/developer_views_tags_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'Developer views tags', feature_category: :source_code_management
it 'avoids a N+1 query in branches index' do
control_count = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }.count
- %w(one two three four five).each { |tag| repository.add_tag(user, tag, 'master', 'foo') }
+ %w[one two three four five].each { |tag| repository.add_tag(user, tag, 'master', 'foo') }
expect { visit project_tags_path(project) }.not_to exceed_query_limit(control_count)
end
diff --git a/spec/features/user_sees_revert_modal_spec.rb b/spec/features/user_sees_revert_modal_spec.rb
index 9ee3fe846a6..fdeee6a2808 100644
--- a/spec/features/user_sees_revert_modal_spec.rb
+++ b/spec/features/user_sees_revert_modal_spec.rb
@@ -4,8 +4,6 @@ require 'spec_helper'
RSpec.describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not_need_inline,
feature_category: :code_review_workflow do
- include ContentEditorHelpers
-
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -24,7 +22,6 @@ RSpec.describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not
stub_feature_flags(unbatch_graphql_queries: false)
sign_in(user)
visit(project_merge_request_path(project, merge_request))
- close_rich_text_promo_popover_if_present
page.within('.mr-state-widget') do
click_button 'Merge'
@@ -40,7 +37,6 @@ RSpec.describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not
context 'with page reload validates js correctly loaded' do
before do
visit(merge_request_path(merge_request))
- close_rich_text_promo_popover_if_present
end
it_behaves_like 'showing the revert modal'
diff --git a/spec/features/users/google_analytics_csp_spec.rb b/spec/features/users/google_analytics_csp_spec.rb
deleted file mode 100644
index 45cc6c5f39d..00000000000
--- a/spec/features/users/google_analytics_csp_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Google Analytics 4 content security policy', feature_category: :purchase do
- it 'includes the GA4 content security policy headers' do
- visit root_path
-
- expect(response_headers['Content-Security-Policy']).to include(
- '*.googletagmanager.com',
- '*.google-analytics.com',
- '*.analytics.google.com'
- )
- end
-end
diff --git a/spec/features/users/google_syndication_csp_spec.rb b/spec/features/users/google_syndication_csp_spec.rb
deleted file mode 100644
index e71539f87c8..00000000000
--- a/spec/features/users/google_syndication_csp_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Google Syndication content security policy', feature_category: :purchase do
- include ContentSecurityPolicyHelpers
-
- let_it_be(:connect_src) { 'https://other-cdn.test' }
-
- let_it_be(:google_analytics_src) do
- 'localhost https://cdn.cookielaw.org https://*.onetrust.com *.google-analytics.com ' \
- '*.analytics.google.com *.googletagmanager.com'
- end
-
- let_it_be(:allowed_src) do
- '*.google.com/pagead/landing pagead2.googlesyndication.com/pagead/landing'
- end
-
- let(:extra) { { google_tag_manager_nonce_id: 'google_tag_manager_nonce_id' } }
-
- let(:csp) do
- ActionDispatch::ContentSecurityPolicy.new do |p|
- p.connect_src(*connect_src.split)
- end
- end
-
- subject { response_headers['Content-Security-Policy'] }
-
- before do
- setup_csp_for_controller(SessionsController, csp, any_time: true)
- stub_config(extra: extra)
- visit new_user_session_path
- end
-
- context 'when self-hosted' do
- context 'when there is no CSP config' do
- let(:extra) { {} }
- let(:csp) { ActionDispatch::ContentSecurityPolicy.new }
-
- it { is_expected.to be_blank }
- end
-
- context 'when connect-src CSP config exists' do
- it { is_expected.to include("connect-src #{connect_src} #{google_analytics_src}") }
- it { is_expected.not_to include(allowed_src) }
- end
- end
-
- context 'when SaaS', :saas do
- context 'when connect-src CSP config exists' do
- it { is_expected.to include("connect-src #{connect_src} #{google_analytics_src} #{allowed_src}") }
- end
- end
-end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index c07e419be1f..87afcbd416b 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -887,7 +887,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
it 'correctly renders tabs and panes' do
visit new_user_session_path
- ensure_tab_pane_correctness(%w(Crowd Standard))
+ ensure_tab_pane_correctness(%w[Crowd Standard])
end
it 'displays the remember me checkbox' do
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 111c0cce1b1..968308938d1 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -67,16 +67,6 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
end
shared_examples 'signup process' do
- def fill_in_signup_form
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- fill_in 'new_user_password', with: new_user.password
-
- wait_for_all_requests
- end
-
def confirm_email
new_user_token = User.find_by_email(new_user.email).confirmation_token
@@ -226,9 +216,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
visit new_user_registration_path
- fill_in_signup_form
-
- expect { click_button 'Register' }.to change { User.count }.by(1)
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
expect(page).to have_current_path users_almost_there_path, ignore_query: true
expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
@@ -246,10 +234,8 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
- fill_in_signup_form
-
- expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
+ expect(page).to have_current_path dashboard_projects_path
end
end
end
@@ -262,10 +248,9 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'creates the user account and goes to dashboard' do
visit new_user_registration_path
- fill_in_signup_form
- click_button "Register"
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ expect(page).to have_current_path dashboard_projects_path
end
end
@@ -277,9 +262,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'creates the user but does not sign them in' do
visit new_user_registration_path
- fill_in_signup_form
-
- expect { click_button 'Register' }.to change { User.count }.by(1)
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(page).to have_content(<<~TEXT.squish)
You have signed up successfully. However, we could not sign you in
@@ -294,8 +277,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
create(:user, email: new_user.email)
visit new_user_registration_path
- fill_in_signup_form
- click_button "Register"
+ fill_in_sign_up_form(new_user)
expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page).to have_content("error prohibited this user from being saved")
@@ -306,8 +288,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
create(:user, email: new_user.email)
visit new_user_registration_path
- fill_in_signup_form
- click_button "Register"
+ fill_in_sign_up_form(new_user)
expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page.body).not_to match(/#{new_user.password}/)
@@ -328,18 +309,8 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
visit new_user_registration_path
expect(page).to have_content(terms_text)
- fill_in_signup_form
- click_button 'Register'
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path(users_sign_up_welcome_path), ignore_query: true
-
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
-
- created_user = User.find_by_username(new_user.username)
-
- expect(created_user.software_developer_role?).to be_truthy
- expect(created_user.setup_for_company).to be_nil
expect(page).to have_current_path(dashboard_projects_path)
end
@@ -366,9 +337,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'prevents from signing up' do
visit new_user_registration_path
- fill_in_signup_form
-
- expect { click_button 'Register' }.not_to change { User.count }
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
expect(page).to have_content(
"Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
@@ -379,9 +348,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'prevents from signing up' do
visit new_user_registration_path
- fill_in_signup_form
-
- expect { click_button 'Register' }.not_to change { User.count }
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
expect(page).to have_content('That was a bit too quick! Please resubmit.')
end
end
@@ -390,9 +357,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'allows visiting of a page after initial registration' do
visit new_user_registration_path
- fill_in_signup_form
-
- click_button 'Register'
+ fill_in_sign_up_form(new_user)
visit new_project_path
@@ -403,8 +368,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
create(:user, email: new_user.email)
visit new_user_registration_path
- fill_in_signup_form
- click_button "Register"
+ fill_in_sign_up_form(new_user)
expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page.body).not_to match(/#{new_user.password}/)
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index 3495af3ae85..e51ed3a0e80 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -157,8 +157,7 @@ RSpec.describe 'Users > Terms', :js, feature_category: :user_profile do
it 'allows the user to sign out without a response' do
visit terms_path
- find('.header-user-dropdown-toggle').click
- click_link('Sign out')
+ click_button('Decline and sign out')
expect(page).to have_content('Sign in')
expect(page).to have_content('Register')
diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb
index 3c37d52d6c3..4d0324b9f1a 100644
--- a/spec/finders/alert_management/alerts_finder_spec.rb
+++ b/spec/finders/alert_management/alerts_finder_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
let(:params) { { sort: 'severity_asc' } }
it 'sorts alerts by severity from less critical to more critical' do
- expect(execute.pluck(:severity).uniq).to eq(%w(unknown info low medium high critical))
+ expect(execute.pluck(:severity).uniq).to eq(%w[unknown info low medium high critical])
end
end
@@ -191,7 +191,7 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
let(:params) { { sort: 'severity_desc' } }
it 'sorts alerts by severity from more critical to less critical' do
- expect(execute.pluck(:severity).uniq).to eq(%w(critical high medium low info unknown))
+ expect(execute.pluck(:severity).uniq).to eq(%w[critical high medium low info unknown])
end
end
end
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 9f185c8b8fb..004629eda95 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
result = subject
expect(result.count).to eq(3)
- expect(result.map(&:name)).to eq(%w{csv fix lfs})
+ expect(result.map(&:name)).to eq(%w[csv fix lfs])
end
end
@@ -265,7 +265,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(feature_conflict few-commits))
+ expect(result.map(&:name)).to eq(%w[feature_conflict few-commits])
end
end
@@ -275,7 +275,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(fix flatten-dir))
+ expect(result.map(&:name)).to eq(%w[fix flatten-dir])
end
end
@@ -306,7 +306,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(feature improve/awesome merge-test markdown feature_conflict))
+ expect(result.map(&:name)).to eq(%w[feature improve/awesome merge-test markdown feature_conflict])
end
end
@@ -316,7 +316,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(merge-test markdown))
+ expect(result.map(&:name)).to eq(%w[merge-test markdown])
end
end
end
@@ -328,7 +328,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
result = subject
expect(result.count).to eq(3)
- expect(result.map(&:name)).to eq(%w{csv fix lfs})
+ expect(result.map(&:name)).to eq(%w[csv fix lfs])
end
end
@@ -338,7 +338,7 @@ RSpec.describe BranchesFinder, feature_category: :source_code_management do
it 'falls back to default execute and ignore paginations' do
result = subject
- expect(result.map(&:name)).to eq(%w(feature feature_conflict few-commits fix flatten-dir))
+ expect(result.map(&:name)).to eq(%w[feature feature_conflict few-commits fix flatten-dir])
end
end
end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index 5d249ddb391..06cca035c6f 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -171,7 +171,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
it_behaves_like 'sorts by created_at descending'
end
- %w(created_date created_at_desc).each do |sort|
+ %w[created_date created_at_desc].each do |sort|
context "with sort param equal to #{sort}" do
let(:params) { { sort: sort } }
diff --git a/spec/finders/concerns/packages/finder_helper_spec.rb b/spec/finders/concerns/packages/finder_helper_spec.rb
index 94bcec6163e..f81e940c7ed 100644
--- a/spec/finders/concerns/packages/finder_helper_spec.rb
+++ b/spec/finders/concerns/packages/finder_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Packages::FinderHelper do
+RSpec.describe ::Packages::FinderHelper, feature_category: :package_registry do
describe '#packages_for_project' do
let_it_be_with_reload(:project1) { create(:project) }
let_it_be(:package1) { create(:package, project: project1) }
@@ -107,6 +107,34 @@ RSpec.describe ::Packages::FinderHelper do
it_behaves_like params[:shared_example_name]
end
+
+ context 'when the second project has the package registry disabled' do
+ before do
+ project1.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project2.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC,
+ package_registry_access_level: 'disabled', packages_enabled: false)
+ end
+
+ it_behaves_like 'returning both packages'
+
+ context 'with with_package_registry_enabled set to true' do
+ let(:finder_class) do
+ Class.new do
+ include ::Packages::FinderHelper
+
+ def initialize(user)
+ @current_user = user
+ end
+
+ def execute(group)
+ packages_visible_to_user(@current_user, within_group: group, with_package_registry_enabled: true)
+ end
+ end
+ end
+
+ it_behaves_like 'returning package1'
+ end
+ end
end
context 'with a group deploy token' do
diff --git a/spec/finders/environments/environments_finder_spec.rb b/spec/finders/environments/environments_finder_spec.rb
index df66bbdc235..e3b1a121497 100644
--- a/spec/finders/environments/environments_finder_spec.rb
+++ b/spec/finders/environments/environments_finder_spec.rb
@@ -24,13 +24,13 @@ RSpec.describe Environments::EnvironmentsFinder do
end
it 'returns environments with any of the requested states' do
- result = described_class.new(project, user, states: %w(available stopped)).execute
+ result = described_class.new(project, user, states: %w[available stopped]).execute
expect(result).to contain_exactly(environment, environment_stopped, environment_available, stopped_environment)
end
it 'raises exception when requested state is invalid' do
- expect { described_class.new(project, user, states: %w(invalid stopped)).execute }.to(
+ expect { described_class.new(project, user, states: %w[invalid stopped]).execute }.to(
raise_error(described_class::InvalidStatesError, 'Requested states are invalid')
)
end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index 23d73b48199..f20c03c9658 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -12,30 +12,30 @@ RSpec.describe GroupsFinder, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
where(:user_type, :params, :results) do
- nil | { all_available: true } | %i(public_group user_public_group)
- nil | { all_available: false } | %i(public_group user_public_group)
- nil | {} | %i(public_group user_public_group)
-
- :regular | { all_available: true } | %i(public_group internal_group user_public_group user_internal_group
- user_private_group)
- :regular | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
- :regular | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group)
- :regular | { min_access_level: Gitlab::Access::DEVELOPER } | %i(user_public_group user_internal_group user_private_group)
-
- :external | { all_available: true } | %i(public_group user_public_group user_internal_group user_private_group)
- :external | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
- :external | {} | %i(public_group user_public_group user_internal_group user_private_group)
-
- :admin_without_admin_mode | { all_available: true } | %i(public_group internal_group user_public_group
- user_internal_group user_private_group)
- :admin_without_admin_mode | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
- :admin_without_admin_mode | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group)
-
- :admin_with_admin_mode | { all_available: true } | %i(public_group internal_group private_group user_public_group
- user_internal_group user_private_group)
- :admin_with_admin_mode | { all_available: false } | %i(user_public_group user_internal_group user_private_group)
- :admin_with_admin_mode | {} | %i(public_group internal_group private_group user_public_group user_internal_group
- user_private_group)
+ nil | { all_available: true } | %i[public_group user_public_group]
+ nil | { all_available: false } | %i[public_group user_public_group]
+ nil | {} | %i[public_group user_public_group]
+
+ :regular | { all_available: true } | %i[public_group internal_group user_public_group user_internal_group
+ user_private_group]
+ :regular | { all_available: false } | %i[user_public_group user_internal_group user_private_group]
+ :regular | {} | %i[public_group internal_group user_public_group user_internal_group user_private_group]
+ :regular | { min_access_level: Gitlab::Access::DEVELOPER } | %i[user_public_group user_internal_group user_private_group]
+
+ :external | { all_available: true } | %i[public_group user_public_group user_internal_group user_private_group]
+ :external | { all_available: false } | %i[user_public_group user_internal_group user_private_group]
+ :external | {} | %i[public_group user_public_group user_internal_group user_private_group]
+
+ :admin_without_admin_mode | { all_available: true } | %i[public_group internal_group user_public_group
+ user_internal_group user_private_group]
+ :admin_without_admin_mode | { all_available: false } | %i[user_public_group user_internal_group user_private_group]
+ :admin_without_admin_mode | {} | %i[public_group internal_group user_public_group user_internal_group user_private_group]
+
+ :admin_with_admin_mode | { all_available: true } | %i[public_group internal_group private_group user_public_group
+ user_internal_group user_private_group]
+ :admin_with_admin_mode | { all_available: false } | %i[user_public_group user_internal_group user_private_group]
+ :admin_with_admin_mode | {} | %i[public_group internal_group private_group user_public_group user_internal_group
+ user_private_group]
end
with_them do
diff --git a/spec/finders/license_template_finder_spec.rb b/spec/finders/license_template_finder_spec.rb
index 754b92faccc..21c4cf74fd0 100644
--- a/spec/finders/license_template_finder_spec.rb
+++ b/spec/finders/license_template_finder_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe LicenseTemplateFinder do
context 'template names hash keys' do
it 'has all the expected keys' do
- expect(template_names.values.flatten.first.keys).to match_array(%i(id key name project_id))
+ expect(template_names.values.flatten.first.keys).to match_array(%i[id key name project_id])
end
end
end
diff --git a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
index 6dffaff294d..9832c74c6b8 100644
--- a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
+++ b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::OldestPerCommitFinder do
+RSpec.describe MergeRequests::OldestPerCommitFinder, feature_category: :code_review_workflow do
describe '#execute' do
it 'returns a Hash mapping commit SHAs to their oldest merge requests' do
project = create(:project)
@@ -18,8 +18,8 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
merge_commit_sha: sha3
)
- mr1_diff = create(:merge_request_diff, merge_request: mr1)
- mr2_diff = create(:merge_request_diff, merge_request: mr2)
+ mr1_diff = mr1.merge_request_diff
+ mr2_diff = mr2.merge_request_diff
create(:merge_request_diff_commit, merge_request_diff: mr1_diff, sha: sha1)
create(:merge_request_diff_commit, merge_request_diff: mr2_diff, sha: sha1)
@@ -71,6 +71,7 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
# This expectation is set so we're certain that the merge commit SHAs (if
# a matching merge request is found) aren't also used for finding MRs
# according to diffs.
+ #
expect(MergeRequestDiffCommit)
.not_to receive(:oldest_merge_request_id_per_commit)
@@ -116,6 +117,27 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
.to eq(sha1 => mr, sha2 => mr)
end
+ it 'includes a merge request for fast-forward merged MR' do
+ project = create(:project)
+ sha = Digest::SHA1.hexdigest('foo')
+ # When there is only a merged_commit_sha, then it means the MR was
+ # fast-forward merged without a squash, but possibly including a rebase.
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ merged_commit_sha: sha
+ )
+
+ commits = [double(:commit1, id: sha)]
+
+ expect(MergeRequestDiffCommit)
+ .not_to receive(:oldest_merge_request_id_per_commit)
+
+ expect(described_class.new(project).execute(commits))
+ .to eq(sha => mr)
+ end
+
it 'includes the oldest merge request when a merge commit is present in a newer merge request' do
project = create(:project)
sha = Digest::SHA1.hexdigest('foo')
@@ -126,9 +148,12 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
)
mr2 = create(:merge_request, :merged, target_project: project)
- mr_diff = create(:merge_request_diff, merge_request: mr2)
- create(:merge_request_diff_commit, merge_request_diff: mr_diff, sha: sha)
+ create(
+ :merge_request_diff_commit,
+ merge_request_diff: mr2.merge_request_diff,
+ sha: sha
+ )
commits = [double(:commit, id: sha)]
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 9aa98189f30..3f9c1baec82 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -163,9 +163,9 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
it { is_expected.to eq([merge_request2]) }
it 'queries merge_request_metrics.target_project_id table' do
- expect(query.to_sql).to include(%{"merge_request_metrics"."target_project_id" = #{merge_request2.target_project_id}})
+ expect(query.to_sql).to include(%("merge_request_metrics"."target_project_id" = #{merge_request2.target_project_id}))
- expect(query.to_sql).not_to include(%{"merge_requests"."target_project_id"})
+ expect(query.to_sql).not_to include(%("merge_requests"."target_project_id"))
end
end
end
@@ -537,7 +537,7 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
context 'filtering by approved by username' do
let(:params) { { approved_by_usernames: user2.username } }
- where(:sort) { [nil] + %w(milestone merged_at merged_at_desc closed_at closed_at_desc) }
+ where(:sort) { [nil] + %w[milestone merged_at merged_at_desc closed_at closed_at_desc] }
before do
create(:approval, merge_request: merge_request3, user: user2)
diff --git a/spec/finders/packages/maven/package_finder_spec.rb b/spec/finders/packages/maven/package_finder_spec.rb
index 8b45dbdad51..f769471fcc7 100644
--- a/spec/finders/packages/maven/package_finder_spec.rb
+++ b/spec/finders/packages/maven/package_finder_spec.rb
@@ -17,21 +17,21 @@ RSpec.describe ::Packages::Maven::PackageFinder do
group.add_developer(user)
end
- describe '#execute!' do
- subject { finder.execute! }
+ describe '#execute' do
+ subject { finder.execute }
shared_examples 'handling valid and invalid paths' do
context 'with a valid path' do
let(:param_path) { package.maven_metadatum.path }
- it { is_expected.to eq(package) }
+ it { is_expected.to include(package) }
end
context 'with an invalid path' do
let(:param_path) { 'com/example/my-app/1.0-SNAPSHOT' }
- it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ it 'returns an empty array' do
+ is_expected.to be_empty
end
end
@@ -42,7 +42,9 @@ RSpec.describe ::Packages::Maven::PackageFinder do
package.update_column(:status, :error)
end
- it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ it 'returns an empty array' do
+ is_expected.to be_empty
+ end
end
end
@@ -59,8 +61,8 @@ RSpec.describe ::Packages::Maven::PackageFinder do
end
context 'across all projects' do
- it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ it 'returns an empty array' do
+ is_expected.to be_empty
end
end
@@ -86,13 +88,13 @@ RSpec.describe ::Packages::Maven::PackageFinder do
end
context 'without order by package file' do
- it { is_expected.to eq(package3) }
+ it { is_expected.to match_array([package1, package2, package3]) }
end
context 'with order by package file' do
let(:param_order_by_package_file) { true }
- it { is_expected.to eq(package2) }
+ it { expect(subject.last).to eq(package2) }
end
end
end
diff --git a/spec/finders/packages/npm/packages_for_user_finder_spec.rb b/spec/finders/packages/npm/packages_for_user_finder_spec.rb
index e2dc21e1008..ffbb4f9e484 100644
--- a/spec/finders/packages/npm/packages_for_user_finder_spec.rb
+++ b/spec/finders/packages/npm/packages_for_user_finder_spec.rb
@@ -36,6 +36,24 @@ RSpec.describe ::Packages::Npm::PackagesForUserFinder, feature_category: :packag
end
it_behaves_like 'searches for packages'
+
+ context 'when an user is a reporter of both projects' do
+ before_all do
+ project2.add_reporter(user)
+ end
+
+ it { is_expected.to contain_exactly(package, package_with_diff_project) }
+
+ context 'when the second project has the package registry disabled' do
+ before_all do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project2.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC,
+ package_registry_access_level: 'disabled', packages_enabled: false)
+ end
+
+ it_behaves_like 'searches for packages'
+ end
+ end
end
end
end
diff --git a/spec/finders/projects/ml/model_finder_spec.rb b/spec/finders/projects/ml/model_finder_spec.rb
index 48333ae49e5..1d869e1792d 100644
--- a/spec/finders/projects/ml/model_finder_spec.rb
+++ b/spec/finders/projects/ml/model_finder_spec.rb
@@ -22,4 +22,8 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
it 'does not return models belonging to a different project' do
is_expected.not_to include(model3)
end
+
+ it 'includes version count' do
+ expect(models[0].version_count).to be(models[0].versions.count)
+ end
end
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index a5cd90b444e..716eee5c9ac 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -112,9 +112,7 @@ RSpec.describe SnippetsFinder do
expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
end
- it 'returns all snippets (everything) for an admin when all_available="true" passed in' do
- allow(admin).to receive(:can_read_all_resources?).and_return(true)
-
+ it 'returns all snippets (everything) for an admin when all_available="true" passed in', :enable_admin_mode do
snippets = described_class.new(admin, author: user, all_available: true).execute
expect(snippets).to contain_exactly(
@@ -326,6 +324,50 @@ RSpec.describe SnippetsFinder do
end
end
+ context 'filtering for snippets authored by banned users', feature_category: :insider_threat do
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ let_it_be(:banned_public_personal_snippet) { create(:personal_snippet, :public, author: banned_user) }
+ let_it_be(:banned_public_project_snippet) { create(:project_snippet, :public, project: project, author: banned_user) }
+
+ it 'returns banned snippets for admins when in admin mode', :enable_admin_mode do
+ snippets = described_class.new(
+ admin,
+ ids: [banned_public_personal_snippet.id, banned_public_project_snippet.id]
+ ).execute
+
+ expect(snippets).to contain_exactly(
+ banned_public_personal_snippet, banned_public_project_snippet
+ )
+ end
+
+ it 'does not return banned snippets for non-admin users' do
+ snippets = described_class.new(
+ user,
+ ids: [banned_public_personal_snippet.id, banned_public_project_snippet.id]
+ ).execute
+
+ expect(snippets).to be_empty
+ end
+
+ context 'when hide_snippets_of_banned_users feature flag is off' do
+ before do
+ stub_feature_flags(hide_snippets_of_banned_users: false)
+ end
+
+ it 'returns banned snippets for non-admin users' do
+ snippets = described_class.new(
+ user,
+ ids: [banned_public_personal_snippet.id, banned_public_project_snippet.id]
+ ).execute
+
+ expect(snippets).to contain_exactly(
+ banned_public_personal_snippet, banned_public_project_snippet
+ )
+ end
+ end
+ end
+
context 'when the user cannot read cross project' do
before do
allow(Ability).to receive(:allowed?).and_call_original
diff --git a/spec/finders/tags_finder_spec.rb b/spec/finders/tags_finder_spec.rb
index 2af23c466fb..525c19ba137 100644
--- a/spec/finders/tags_finder_spec.rb
+++ b/spec/finders/tags_finder_spec.rb
@@ -133,7 +133,7 @@ RSpec.describe TagsFinder do
it 'filters tags' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.1.0))
+ expect(result.map(&:name)).to eq(%w[v1.1.0])
end
end
@@ -143,7 +143,7 @@ RSpec.describe TagsFinder do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.1.1))
+ expect(result.map(&:name)).to eq(%w[v1.1.1])
end
end
@@ -153,7 +153,7 @@ RSpec.describe TagsFinder do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.0.0 v1.1.0))
+ expect(result.map(&:name)).to eq(%w[v1.0.0 v1.1.0])
end
end
@@ -174,7 +174,7 @@ RSpec.describe TagsFinder do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.1.1 v1.1.0 v1.0.0))
+ expect(result.map(&:name)).to eq(%w[v1.1.1 v1.1.0 v1.0.0])
end
end
@@ -184,7 +184,7 @@ RSpec.describe TagsFinder do
it 'filters branches' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.1.0 v1.0.0))
+ expect(result.map(&:name)).to eq(%w[v1.1.0 v1.0.0])
end
end
end
@@ -195,7 +195,7 @@ RSpec.describe TagsFinder do
it 'ignores the pagination for search' do
result = subject
- expect(result.map(&:name)).to eq(%w(v1.1.1))
+ expect(result.map(&:name)).to eq(%w[v1.1.1])
end
end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index eacce0bd996..57956b5aa95 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe TemplateFinder do
context 'template names hash keys' do
it 'has all the expected keys' do
- expect(result.first.to_h.keys).to match_array(%i(id key name project_id))
+ expect(result.first.to_h.keys).to match_array(%i[id key name project_id])
end
end
end
@@ -223,7 +223,7 @@ RSpec.describe TemplateFinder do
context 'template names hash keys' do
it 'has all the expected keys' do
- expect(result.first.to_h.keys).to match_array(%i(id key name project_id))
+ expect(result.first.to_h.keys).to match_array(%i[id key name project_id])
end
end
end
diff --git a/spec/finders/vs_code/settings/settings_finder_spec.rb b/spec/finders/vs_code/settings/settings_finder_spec.rb
new file mode 100644
index 00000000000..b7b4308bbbd
--- /dev/null
+++ b/spec/finders/vs_code/settings/settings_finder_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::SettingsFinder, feature_category: :web_ide do
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ context 'when nil is passed in as the list of settings' do
+ let(:finder) { described_class.new(user, nil) }
+
+ subject { finder.execute }
+
+ context 'when user has no settings' do
+ it 'returns an empty array' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'when user has settings' do
+ before do
+ create(:vscode_setting, user: user)
+ end
+
+ it 'returns an array of settings' do
+ expect(subject.length).to eq(1)
+ expect(subject[0].user_id).to eq(user.id)
+ expect(subject[0].setting_type).to eq('settings')
+ end
+ end
+ end
+
+ context 'when a list of settings is passed, filters by the setting' do
+ let_it_be(:setting) { create(:vscode_setting, user: user) }
+
+ context 'when user has no settings with that type' do
+ subject { finder.execute }
+
+ it 'returns an empty array' do
+ finder = described_class.new(user, ['profile'])
+ expect(finder.execute).to eq([])
+ end
+ end
+
+ context 'when user does have settings with the type' do
+ subject { finder.execute }
+
+ it 'returns the record when a single setting exists' do
+ result = described_class.new(user, ['settings']).execute
+ expect(result.length).to eq(1)
+ expect(result[0].user_id).to eq(user.id)
+ expect(result[0].setting_type).to eq('settings')
+ end
+
+ it 'returns multiple records when more than one setting exists' do
+ create(:vscode_setting, user: user, setting_type: 'profile')
+
+ result = described_class.new(user, %w[settings profile]).execute
+ expect(result.length).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/entities/note_user_entity.json b/spec/fixtures/api/schemas/entities/note_user_entity.json
index f5d28dd7b71..16d33ada51d 100644
--- a/spec/fixtures/api/schemas/entities/note_user_entity.json
+++ b/spec/fixtures/api/schemas/entities/note_user_entity.json
@@ -3,19 +3,42 @@
"required": [
"id",
"state",
+ "locked",
"avatar_url",
"path",
"name",
"username"
],
"properties": {
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": [ "string", "null" ] },
- "path": { "type": "string" },
- "name": { "type": "string" },
- "username": { "type": "string" },
- "status_tooltip_html": { "$ref": "../types/nullable_string.json" },
- "show_status": { "type": "boolean" }
+ "id": {
+ "type": "integer"
+ },
+ "state": {
+ "type": "string"
+ },
+ "locked": {
+ "type": "boolean"
+ },
+ "avatar_url": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "path": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "username": {
+ "type": "string"
+ },
+ "status_tooltip_html": {
+ "$ref": "../types/nullable_string.json"
+ },
+ "show_status": {
+ "type": "boolean"
+ }
}
}
diff --git a/spec/fixtures/api/schemas/entities/user.json b/spec/fixtures/api/schemas/entities/user.json
index 984b7184d36..ca2ad201d66 100644
--- a/spec/fixtures/api/schemas/entities/user.json
+++ b/spec/fixtures/api/schemas/entities/user.json
@@ -3,6 +3,7 @@
"required": [
"id",
"state",
+ "locked",
"avatar_url",
"web_url",
"path",
@@ -10,13 +11,35 @@
"username"
],
"properties": {
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": [ "string", "null" ] },
- "web_url": { "type": "string" },
- "path": { "type": "string" },
- "name": { "type": "string" },
- "username": { "type": "string" },
- "status_tooltip_html": { "$ref": "../types/nullable_string.json" }
+ "id": {
+ "type": "integer"
+ },
+ "state": {
+ "type": "string"
+ },
+ "locked": {
+ "type": "boolean"
+ },
+ "avatar_url": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "web_url": {
+ "type": "string"
+ },
+ "path": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "username": {
+ "type": "string"
+ },
+ "status_tooltip_html": {
+ "$ref": "../types/nullable_string.json"
+ }
}
}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index f66f5eb35b5..2e7a950d330 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -14,6 +14,7 @@
"pipelines",
"versions",
"status",
+ "statusMessage",
"canDestroy",
"lastDownloadedAt",
"_links"
@@ -171,6 +172,12 @@
"ERROR"
]
},
+ "statusMessage": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
"dependencyLinks": {
"type": "object",
"additionalProperties": false,
diff --git a/spec/fixtures/api/schemas/jira_connect/pull_request.json b/spec/fixtures/api/schemas/jira_connect/pull_request.json
index 430752335be..a24af318d30 100644
--- a/spec/fixtures/api/schemas/jira_connect/pull_request.json
+++ b/spec/fixtures/api/schemas/jira_connect/pull_request.json
@@ -16,6 +16,12 @@
"author": {
"$ref": "./author.json"
},
+ "reviewers": {
+ "type": "array",
+ "items": {
+ "$ref": "./reviewer.json"
+ }
+ },
"commentCount": {
"type": "integer"
},
@@ -60,4 +66,4 @@
"updateSequenceId"
],
"additionalProperties": false
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/api/schemas/jira_connect/reviewer.json b/spec/fixtures/api/schemas/jira_connect/reviewer.json
new file mode 100644
index 00000000000..a51af87e4ca
--- /dev/null
+++ b/spec/fixtures/api/schemas/jira_connect/reviewer.json
@@ -0,0 +1,20 @@
+{
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "email": {
+ "type": "string"
+ },
+ "approvalStatus": {
+ "type": "string"
+ }
+ },
+ "required": [
+ "name",
+ "email",
+ "approvalStatus"
+ ],
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/issue.json b/spec/fixtures/api/schemas/public_api/v4/issue.json
index c2b096a922f..dbec110bccf 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issue.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issue.json
@@ -150,6 +150,9 @@
"state": {
"type": "string"
},
+ "locked": {
+ "type": "boolean"
+ },
"avatar_url": {
"type": "string",
"format": "uri"
@@ -180,6 +183,9 @@
"state": {
"type": "string"
},
+ "locked": {
+ "type": "boolean"
+ },
"avatar_url": {
"type": "string",
"format": "uri"
@@ -206,6 +212,9 @@
"state": {
"type": "string"
},
+ "locked": {
+ "type": "boolean"
+ },
"avatar_url": {
"type": "string",
"format": "uri"
@@ -218,6 +227,7 @@
"required": [
"id",
"state",
+ "locked",
"avatar_url",
"name",
"username",
@@ -308,4 +318,4 @@
"confidential",
"web_url"
]
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
index 29dd2ce0e84..64cf9371858 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
@@ -1,5 +1,5 @@
---
-# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
+# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/internal_analytics/metrics/metrics_dictionary.html
key_path: counts_weekly.test_metric
description:
product_section:
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index 37376713355..8e11f95be8b 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -203,7 +203,7 @@ Note: work item references use `#`, which get built as an issue link.
- Ignored in code: `<%= work_item.to_reference %>`
- Ignored in links: [Link to <%= work_item.to_reference %>](#work_item-link)
- Ignored when backslash escaped: \<%= work_item.to_reference %>
-- Work item by URL: <%= urls.project_work_item_url(work_item.project, work_item) %>
+- Work item by URL: <%= urls.project_work_item_url(work_item.project, work_item.iid) %>
- Link to work item by reference (counted as an issue reference): [Work item](<%= work_item.to_reference %>)
- Link to work item by URL: [Work item](<%= urls.project_work_item_url(work_item.project, work_item) %>)
diff --git a/spec/fixtures/packages/nuget/package_with_symbols.snupkg b/spec/fixtures/packages/nuget/package_with_symbols.snupkg
new file mode 100644
index 00000000000..b4ff4ba7f9f
--- /dev/null
+++ b/spec/fixtures/packages/nuget/package_with_symbols.snupkg
Binary files differ
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
index 31a86d3a8ae..47e2a503b02 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -12,6 +12,12 @@
"id": "gemnasium",
"name": "Gemnasium"
},
+ "cvss": [
+ {
+ "vendor": "GitLab",
+ "vector_string": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
+ }
+ ],
"location": {
"file": "some/kind/of/file.c",
"dependency": {
@@ -414,7 +420,9 @@
"value": "foo"
}
],
- "links": []
+ "links": [
+
+ ]
}
],
"remediations": [
@@ -476,7 +484,9 @@
"diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
}
],
- "dependency_files": [],
+ "dependency_files": [
+
+ ],
"scan": {
"analyzer": {
"id": "common-analyzer",
diff --git a/spec/fixtures/structure.sql b/spec/fixtures/structure.sql
deleted file mode 100644
index 421fb6c3593..00000000000
--- a/spec/fixtures/structure.sql
+++ /dev/null
@@ -1,108 +0,0 @@
-CREATE INDEX missing_index ON events USING btree (created_at, author_id);
-
-CREATE UNIQUE INDEX wrong_index ON table_name (column_name, column_name_2);
-
-CREATE UNIQUE INDEX "index" ON achievements USING btree (namespace_id, lower(name));
-
-CREATE INDEX index_namespaces_public_groups_name_id ON namespaces USING btree (name, id) WHERE (((type)::text = 'Group'::text) AND (visibility_level = 20));
-
-CREATE UNIQUE INDEX index_on_deploy_keys_id_and_type_and_public ON keys USING btree (id, type) WHERE (public = true);
-
-CREATE INDEX index_users_on_public_email_excluding_null_and_empty ON users USING btree (public_email) WHERE (((public_email)::text <> ''::text) AND (public_email IS NOT NULL));
-
-CREATE TABLE test_table (
- id bigint NOT NULL,
- integer_column integer,
- integer_with_default_column integer DEFAULT 1,
- smallint_column smallint,
- smallint_with_default_column smallint DEFAULT 0 NOT NULL,
- numeric_column numeric NOT NULL,
- numeric_with_default_column numeric DEFAULT 1.0 NOT NULL,
- boolean_colum boolean,
- boolean_with_default_colum boolean DEFAULT true NOT NULL,
- double_precision_column double precision,
- double_precision_with_default_column double precision DEFAULT 1.0,
- varying_column character varying,
- varying_with_default_column character varying DEFAULT 'DEFAULT'::character varying NOT NULL,
- varying_with_limit_column character varying(255),
- varying_with_limit_and_default_column character varying(255) DEFAULT 'DEFAULT'::character varying,
- text_column text NOT NULL,
- text_with_default_column text DEFAULT ''::text NOT NULL,
- array_column character varying(255)[] NOT NULL,
- array_with_default_column character varying(255)[] DEFAULT '{one,two}'::character varying[] NOT NULL,
- jsonb_column jsonb,
- jsonb_with_default_column jsonb DEFAULT '[]'::jsonb NOT NULL,
- timestamptz_column timestamp with time zone,
- timestamptz_with_default_column timestamp(6) with time zone DEFAULT now(),
- timestamp_column timestamp(6) without time zone NOT NULL,
- timestamp_with_default_column timestamp(6) without time zone DEFAULT '2022-01-23 00:00:00+00'::timestamp without time zone NOT NULL,
- date_column date,
- date_with_default_column date DEFAULT '2023-04-05',
- inet_column inet NOT NULL,
- inet_with_default_column inet DEFAULT '0.0.0.0'::inet NOT NULL,
- macaddr_column macaddr,
- macaddr_with_default_column macaddr DEFAULT '00-00-00-00-00-000'::macaddr NOT NULL,
- uuid_column uuid NOT NULL,
- uuid_with_default_column uuid DEFAULT '00000000-0000-0000-0000-000000000000'::uuid NOT NULL,
- bytea_column bytea,
- bytea_with_default_column bytea DEFAULT '\xDEADBEEF'::bytea,
- unmapped_column_type anyarray,
- partition_key bigint DEFAULT 1 NOT NULL,
- created_at timestamp with time zone DEFAULT now() NOT NULL
-) PARTITION BY HASH (partition_key, created_at);
-
-CREATE TABLE ci_project_mirrors (
- id bigint NOT NULL,
- project_id integer NOT NULL,
- namespace_id integer NOT NULL
-);
-
-CREATE TABLE wrong_table (
- id bigint NOT NULL,
- description character varying(255) NOT NULL
-);
-
-CREATE TABLE extra_table_columns (
- id bigint NOT NULL,
- name character varying(255) NOT NULL
-);
-
-CREATE TABLE missing_table (
- id bigint NOT NULL,
- description text NOT NULL
-);
-
-CREATE TABLE missing_table_columns (
- id bigint NOT NULL,
- email character varying(255) NOT NULL
-);
-
-CREATE TABLE operations_user_lists (
- id bigint NOT NULL,
- project_id bigint NOT NULL,
- created_at timestamp with time zone NOT NULL,
- updated_at timestamp with time zone NOT NULL,
- iid integer NOT NULL,
- name character varying(255) NOT NULL,
- user_xids text DEFAULT ''::text NOT NULL
-);
-
-CREATE TRIGGER trigger AFTER INSERT ON public.t1 FOR EACH ROW EXECUTE FUNCTION t1();
-
-CREATE TRIGGER wrong_trigger BEFORE UPDATE ON public.t2 FOR EACH ROW EXECUTE FUNCTION my_function();
-
-CREATE TRIGGER missing_trigger_1 BEFORE INSERT OR UPDATE ON public.t3 FOR EACH ROW EXECUTE FUNCTION t3();
-
-CREATE TRIGGER projects_loose_fk_trigger AFTER DELETE ON projects REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
-
-ALTER TABLE web_hooks
- ADD CONSTRAINT web_hooks_project_id_fkey FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
-
-ALTER TABLE ONLY issues
- ADD CONSTRAINT wrong_definition_fk FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
-
-ALTER TABLE ONLY issues
- ADD CONSTRAINT missing_fk FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
-
-ALTER TABLE ONLY bulk_import_configurations
- ADD CONSTRAINT fk_rails_536b96bff1 FOREIGN KEY (bulk_import_id) REFERENCES bulk_imports(id) ON DELETE CASCADE;
diff --git a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
index 42818c14029..2bd2b17a12d 100644
--- a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
+++ b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
@@ -21,7 +21,6 @@ exports[`~/access_tokens/components/expires_at_field should render datepicker wi
mindate="Mon Jul 06 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
placeholder="YYYY-MM-DD"
showclearbutton="true"
- size="medium"
theme=""
/>
</gl-form-group-stub>
diff --git a/spec/frontend/admin/abuse_report/components/report_actions_spec.js b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
index 0e20630db14..3c366980c14 100644
--- a/spec/frontend/admin/abuse_report/components/report_actions_spec.js
+++ b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
@@ -17,6 +17,9 @@ import {
ERROR_MESSAGE,
NO_ACTION,
USER_ACTION_OPTIONS,
+ TRUST_ACTION,
+ TRUST_REASON,
+ REASON_OPTIONS,
} from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
@@ -40,10 +43,11 @@ describe('ReportActions', () => {
const setCloseReport = (close) => wrapper.findByTestId('close').find('input').setChecked(close);
const setSelectOption = (id, value) =>
wrapper.findByTestId(`${id}-select`).find(`option[value=${value}]`).setSelected();
- const selectAction = (action) => setSelectOption('action', action);
+ const selectAction = (chosenAction) => setSelectOption('action', chosenAction);
const selectReason = (reason) => setSelectOption('reason', reason);
const setComment = (comment) => wrapper.findByTestId('comment').find('input').setValue(comment);
const submitForm = () => wrapper.findByTestId('submit-button').vm.$emit('click');
+ const findReasonOptions = () => wrapper.findByTestId('reason-select');
const createComponent = (props = {}) => {
wrapper = mountExtended(ReportActions, {
@@ -79,8 +83,8 @@ describe('ReportActions', () => {
expect(options).toHaveLength(USER_ACTION_OPTIONS.length);
- USER_ACTION_OPTIONS.forEach((action, index) => {
- expect(options.at(index).text()).toBe(action.text);
+ USER_ACTION_OPTIONS.forEach((userAction, index) => {
+ expect(options.at(index).text()).toBe(userAction.text);
});
});
});
@@ -100,6 +104,51 @@ describe('ReportActions', () => {
});
});
+ describe('reasons', () => {
+ beforeEach(() => {
+ clickActionsButton();
+ });
+
+ it('shows all non-trust reasons by default', () => {
+ const reasons = findReasonOptions().findAll('option');
+ expect(reasons).toHaveLength(REASON_OPTIONS.length);
+
+ REASON_OPTIONS.forEach((reason, index) => {
+ expect(reasons.at(index).text()).toBe(reason.text);
+ });
+ });
+
+ describe('when user selects any non-trust action', () => {
+ it('shows non-trust reasons', () => {
+ const reasonLength = REASON_OPTIONS.length;
+ let reasons;
+
+ USER_ACTION_OPTIONS.forEach((userAction) => {
+ if (userAction !== TRUST_ACTION && userAction !== NO_ACTION) {
+ selectAction(userAction.value);
+
+ reasons = findReasonOptions().findAll('option');
+ expect(reasons).toHaveLength(reasonLength);
+ }
+ });
+ });
+ });
+
+ describe('when user selects "Trust user"', () => {
+ beforeEach(() => {
+ selectAction(TRUST_ACTION.value);
+ });
+
+ it('only shows "Confirmed trusted user" reason', () => {
+ const reasons = findReasonOptions().findAll('option');
+
+ expect(reasons).toHaveLength(1);
+
+ expect(reasons.at(0).text()).toBe(TRUST_REASON.text);
+ });
+ });
+ });
+
describe('when clicking the actions button', () => {
beforeEach(() => {
clickActionsButton();
diff --git a/spec/frontend/admin/abuse_report/components/user_details_spec.js b/spec/frontend/admin/abuse_report/components/user_details_spec.js
index f3d8d5bb610..24ec0cdb1b2 100644
--- a/spec/frontend/admin/abuse_report/components/user_details_spec.js
+++ b/spec/frontend/admin/abuse_report/components/user_details_spec.js
@@ -70,14 +70,6 @@ describe('UserDetails', () => {
expect(findUserDetailLabel('credit-card-verification')).toBe(USER_DETAILS_I18N.creditCard);
});
- it('renders the users name', () => {
- expect(findUserDetail('credit-card-verification').text()).toContain(
- sprintf(USER_DETAILS_I18N.registeredWith, { ...user.creditCard }),
- );
-
- expect(findUserDetail('credit-card-verification').text()).toContain(user.creditCard.name);
- });
-
describe('similar credit cards', () => {
it('renders the number of similar records', () => {
expect(findUserDetail('credit-card-verification').text()).toContain(
diff --git a/spec/frontend/alert_spec.js b/spec/frontend/alert_spec.js
index 1ae8373016b..de3093c6c19 100644
--- a/spec/frontend/alert_spec.js
+++ b/spec/frontend/alert_spec.js
@@ -271,6 +271,74 @@ describe('Flash', () => {
expect(findTextContent()).toBe('message 1 message 2');
});
});
+
+ describe('with message links', () => {
+ const findAlertMessageLinks = () =>
+ Array.from(document.querySelectorAll('.flash-container a'));
+
+ it('creates a link', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {
+ exampleLink: 'https://example.com',
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(1);
+ const link = messageLinks.at(0);
+ expect(link.textContent).toBe('example site');
+ expect(link.getAttribute('href')).toBe('https://example.com');
+ });
+
+ it('creates multiple links', () => {
+ alert = createAlert({
+ message:
+ 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}, or on %{docsLinkStart}the documentation%{docsLinkEnd}.',
+ messageLinks: {
+ exampleLink: 'https://example.com',
+ docsLink: 'https://docs.example.com',
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(2);
+ const [firstLink, secondLink] = messageLinks;
+ expect(firstLink.textContent).toBe('example site');
+ expect(firstLink.getAttribute('href')).toBe('https://example.com');
+ expect(secondLink.textContent).toBe('the documentation');
+ expect(secondLink.getAttribute('href')).toBe('https://docs.example.com');
+ });
+
+ it('allows passing more props to gl-link', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {
+ exampleLink: {
+ href: 'https://example.com',
+ target: '_blank',
+ },
+ },
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(1);
+ const link = messageLinks.at(0);
+ expect(link.textContent).toBe('example site');
+ expect(link.getAttribute('href')).toBe('https://example.com');
+ expect(link.getAttribute('target')).toBe('_blank');
+ });
+
+ it('does not create any links when given an empty messageLinks object', () => {
+ alert = createAlert({
+ message: 'Read more at %{exampleLinkStart}example site%{exampleLinkEnd}.',
+ messageLinks: {},
+ });
+ const messageLinks = findAlertMessageLinks();
+
+ expect(messageLinks).toHaveLength(0);
+ });
+ });
});
});
});
diff --git a/spec/frontend/analytics/cycle_analytics/components/base_spec.js b/spec/frontend/analytics/cycle_analytics/components/base_spec.js
index 653934000b3..cd477ff36aa 100644
--- a/spec/frontend/analytics/cycle_analytics/components/base_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/components/base_spec.js
@@ -141,9 +141,11 @@ describe('Value stream analytics component', () => {
namespacePath: groupPath,
endDate: createdBefore,
hasDateRangeFilter: true,
+ hasPredefinedDateRangesFilter: true,
hasProjectFilter: false,
selectedProjects: [],
startDate: createdAfter,
+ predefinedDateRange: null,
});
});
diff --git a/spec/frontend/analytics/cycle_analytics/components/value_stream_filters_spec.js b/spec/frontend/analytics/cycle_analytics/components/value_stream_filters_spec.js
index e3bcb0ab557..a04ffa79a68 100644
--- a/spec/frontend/analytics/cycle_analytics/components/value_stream_filters_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/components/value_stream_filters_spec.js
@@ -1,20 +1,29 @@
-import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Daterange from '~/analytics/shared/components/daterange.vue';
import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
import FilterBar from '~/analytics/cycle_analytics/components/filter_bar.vue';
import ValueStreamFilters from '~/analytics/cycle_analytics/components/value_stream_filters.vue';
+import DateRangesDropdown from '~/analytics/shared/components/date_ranges_dropdown.vue';
import {
- createdAfter as startDate,
- createdBefore as endDate,
- currentGroup,
- selectedProjects,
-} from '../mock_data';
+ DATE_RANGE_LAST_30_DAYS_VALUE,
+ DATE_RANGE_CUSTOM_VALUE,
+ LAST_30_DAYS,
+} from '~/analytics/shared/constants';
+import { useFakeDate } from 'helpers/fake_date';
+import { currentGroup, selectedProjects } from '../mock_data';
const { path } = currentGroup;
const groupPath = `groups/${path}`;
+const defaultFeatureFlags = {
+ vsaPredefinedDateRanges: false,
+};
-function createComponent(props = {}) {
- return shallowMount(ValueStreamFilters, {
+const startDate = LAST_30_DAYS;
+const endDate = new Date('2019-01-14T00:00:00.000Z');
+
+function createComponent({ props = {}, featureFlags = defaultFeatureFlags } = {}) {
+ return shallowMountExtended(ValueStreamFilters, {
propsData: {
selectedProjects,
groupPath,
@@ -23,15 +32,23 @@ function createComponent(props = {}) {
endDate,
...props,
},
+ provide: {
+ glFeatures: {
+ ...featureFlags,
+ },
+ },
});
}
describe('ValueStreamFilters', () => {
+ useFakeDate(2019, 0, 14, 10, 10);
+
let wrapper;
const findProjectsDropdown = () => wrapper.findComponent(ProjectsDropdownFilter);
const findDateRangePicker = () => wrapper.findComponent(Daterange);
const findFilterBar = () => wrapper.findComponent(FilterBar);
+ const findDateRangesDropdown = () => wrapper.findComponent(DateRangesDropdown);
beforeEach(() => {
wrapper = createComponent();
@@ -55,6 +72,10 @@ describe('ValueStreamFilters', () => {
expect(findDateRangePicker().exists()).toBe(true);
});
+ it('will not render the date ranges dropdown', () => {
+ expect(findDateRangesDropdown().exists()).toBe(false);
+ });
+
it('will emit `selectProject` when a project is selected', () => {
findProjectsDropdown().vm.$emit('selected');
@@ -69,21 +90,168 @@ describe('ValueStreamFilters', () => {
describe('hasDateRangeFilter = false', () => {
beforeEach(() => {
- wrapper = createComponent({ hasDateRangeFilter: false });
+ wrapper = createComponent({ props: { hasDateRangeFilter: false } });
});
- it('will not render the date range picker', () => {
+ it('should not render the date range picker', () => {
expect(findDateRangePicker().exists()).toBe(false);
});
});
describe('hasProjectFilter = false', () => {
beforeEach(() => {
- wrapper = createComponent({ hasProjectFilter: false });
+ wrapper = createComponent({ props: { hasProjectFilter: false } });
});
it('will not render the project dropdown', () => {
expect(findProjectsDropdown().exists()).toBe(false);
});
});
+
+ describe('`vsaPredefinedDateRanges` feature flag is enabled', () => {
+ const lastMonthValue = 'lastMonthValue';
+ const mockDateRange = {
+ value: lastMonthValue,
+ startDate: new Date('2023-08-08T00:00:00.000Z'),
+ endDate: new Date('2023-09-08T00:00:00.000Z'),
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent({ featureFlags: { vsaPredefinedDateRanges: true } });
+ });
+
+ it('should render date ranges dropdown', () => {
+ expect(findDateRangesDropdown().exists()).toBe(true);
+ });
+
+ it('should not render date range picker', () => {
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+
+ describe('when a date range is selected from the dropdown', () => {
+ describe('predefined date range option', () => {
+ beforeEach(async () => {
+ findDateRangesDropdown().vm.$emit('selected', mockDateRange);
+
+ await nextTick();
+ });
+
+ it('should emit `setDateRange` with date range', () => {
+ const { value, ...dateRange } = mockDateRange;
+
+ expect(wrapper.emitted('setDateRange')).toEqual([[dateRange]]);
+ });
+
+ it('should emit `setPredefinedDateRange` with correct value', () => {
+ expect(wrapper.emitted('setPredefinedDateRange')).toEqual([[lastMonthValue]]);
+ });
+ });
+
+ describe('custom date range option', () => {
+ beforeEach(async () => {
+ findDateRangesDropdown().vm.$emit('customDateRangeSelected');
+
+ await nextTick();
+ });
+
+ it('should emit `setPredefinedDateRange` with custom date range value', () => {
+ expect(wrapper.emitted('setPredefinedDateRange')).toEqual([[DATE_RANGE_CUSTOM_VALUE]]);
+ });
+
+ it('should not emit `setDateRange`', () => {
+ expect(wrapper.emitted('setDateRange')).toBeUndefined();
+ });
+ });
+ });
+
+ describe.each`
+ predefinedDateRange | shouldRenderDateRangePicker | dateRangeType
+ ${DATE_RANGE_CUSTOM_VALUE} | ${true} | ${'custom date range'}
+ ${lastMonthValue} | ${false} | ${'predefined date range'}
+ `(
+ 'when the `predefinedDateRange` prop is set to a $dateRangeType',
+ ({ predefinedDateRange, shouldRenderDateRangePicker }) => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: { predefinedDateRange },
+ featureFlags: { vsaPredefinedDateRanges: true },
+ });
+ });
+
+ it("should be passed into the dropdown's `selected` prop", () => {
+ expect(findDateRangesDropdown().props('selected')).toBe(predefinedDateRange);
+ });
+
+ it(`should ${
+ shouldRenderDateRangePicker ? '' : 'not'
+ } render the date range picker`, () => {
+ expect(findDateRangePicker().exists()).toBe(shouldRenderDateRangePicker);
+ });
+ },
+ );
+
+ describe('when the `predefinedDateRange` prop is null', () => {
+ const laterStartDate = new Date('2018-12-01T00:00:00.000Z');
+ const earlierStartDate = new Date('2019-01-01T00:00:00.000Z');
+ const customEndDate = new Date('2019-02-01T00:00:00.000Z');
+
+ describe.each`
+ dateRange | expectedDateRangeOption | shouldRenderDateRangePicker | description
+ ${{ startDate, endDate }} | ${DATE_RANGE_LAST_30_DAYS_VALUE} | ${false} | ${'is default'}
+ ${{ startDate: laterStartDate, endDate }} | ${DATE_RANGE_CUSTOM_VALUE} | ${true} | ${'has a later start date than the default'}
+ ${{ startDate: earlierStartDate, endDate }} | ${DATE_RANGE_CUSTOM_VALUE} | ${true} | ${'has an earlier start date than the default'}
+ ${{ startDate, endDate: customEndDate }} | ${DATE_RANGE_CUSTOM_VALUE} | ${true} | ${'has an end date that is not today'}
+ `(
+ 'date range $description',
+ ({ dateRange, expectedDateRangeOption, shouldRenderDateRangePicker }) => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: { predefinedDateRange: null, ...dateRange },
+ featureFlags: { vsaPredefinedDateRanges: true },
+ });
+ });
+
+ it("should set the dropdown's `selected` prop to the correct value", () => {
+ expect(findDateRangesDropdown().props('selected')).toBe(expectedDateRangeOption);
+ });
+
+ it(`should ${
+ shouldRenderDateRangePicker ? '' : 'not'
+ } render the date range picker`, () => {
+ expect(findDateRangePicker().exists()).toBe(shouldRenderDateRangePicker);
+ });
+ },
+ );
+ });
+
+ describe('hasPredefinedDateRangesFilter = false', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: { hasPredefinedDateRangesFilter: false },
+ featureFlags: { vsaPredefinedDateRanges: true },
+ });
+ });
+
+ it('should not render the date ranges dropdown', () => {
+ expect(findDateRangesDropdown().exists()).toBe(false);
+ });
+ });
+
+ describe('hasDateRangeFilter = false', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: { hasDateRangeFilter: false },
+ featureFlags: { vsaPredefinedDateRanges: true },
+ });
+ });
+
+ it('should not render the date range picker', () => {
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+
+ it('should remove custom date range option from date ranges dropdown', () => {
+ expect(findDateRangesDropdown().props('includeCustomDateRangeOption')).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/analytics/cycle_analytics/mock_data.js b/spec/frontend/analytics/cycle_analytics/mock_data.js
index f9587bf1967..7ad95cab9ad 100644
--- a/spec/frontend/analytics/cycle_analytics/mock_data.js
+++ b/spec/frontend/analytics/cycle_analytics/mock_data.js
@@ -261,3 +261,5 @@ export const basePaginationResult = {
direction: PAGINATION_SORT_DIRECTION_DESC,
page: null,
};
+
+export const predefinedDateRange = 'last_week';
diff --git a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
index b2ce8596c22..c3551d3da6f 100644
--- a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
@@ -14,6 +14,7 @@ import {
initialPaginationState,
reviewEvents,
projectNamespace as namespace,
+ predefinedDateRange,
} from '../mock_data';
const { path: groupPath } = currentGroup;
@@ -32,6 +33,7 @@ const defaultState = {
createdAfter,
createdBefore,
pagination: initialPaginationState,
+ predefinedDateRange,
};
describe('Project Value Stream Analytics actions', () => {
@@ -53,6 +55,7 @@ describe('Project Value Stream Analytics actions', () => {
describe.each`
action | payload | expectedActions | expectedMutations
${'setDateRange'} | ${{ createdAfter, createdBefore }} | ${[{ type: 'refetchStageData' }]} | ${[mockSetDateActionCommit]}
+ ${'setPredefinedDateRange'} | ${{ predefinedDateRange }} | ${[]} | ${[{ type: 'SET_PREDEFINED_DATE_RANGE', payload: { predefinedDateRange } }]}
${'setFilters'} | ${[]} | ${[{ type: 'refetchStageData' }]} | ${[]}
${'setSelectedStage'} | ${{ selectedStage }} | ${[{ type: 'refetchStageData' }]} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]}
${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${[{ type: 'fetchValueStreamStages' }]} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]}
diff --git a/spec/frontend/analytics/cycle_analytics/store/mutations_spec.js b/spec/frontend/analytics/cycle_analytics/store/mutations_spec.js
index 70b7454f4a0..25fed2b1714 100644
--- a/spec/frontend/analytics/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/store/mutations_spec.js
@@ -18,6 +18,7 @@ import {
stageCounts,
initialPaginationState as pagination,
projectNamespace as mockNamespace,
+ predefinedDateRange,
} from '../mock_data';
let state;
@@ -94,6 +95,7 @@ describe('Project Value Stream Analytics mutations', () => {
mutation | payload | stateKey | value
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdAfter'} | ${mockCreatedAfter}
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdBefore'} | ${mockCreatedBefore}
+ ${types.SET_PREDEFINED_DATE_RANGE} | ${predefinedDateRange} | ${'predefinedDateRange'} | ${predefinedDateRange}
${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
diff --git a/spec/frontend/analytics/shared/components/date_ranges_dropdown_spec.js b/spec/frontend/analytics/shared/components/date_ranges_dropdown_spec.js
new file mode 100644
index 00000000000..63407900be7
--- /dev/null
+++ b/spec/frontend/analytics/shared/components/date_ranges_dropdown_spec.js
@@ -0,0 +1,165 @@
+import { nextTick } from 'vue';
+import { GlCollapsibleListbox, GlIcon } from '@gitlab/ui';
+import DateRangesDropdown from '~/analytics/shared/components/date_ranges_dropdown.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+describe('DateRangesDropdown', () => {
+ let wrapper;
+
+ const customDateRangeValue = 'custom';
+ const lastWeekValue = 'lastWeek';
+ const last30DaysValue = 'lastMonth';
+ const mockLastWeek = {
+ text: 'Last week',
+ value: lastWeekValue,
+ startDate: new Date('2023-09-08T00:00:00.000Z'),
+ endDate: new Date('2023-09-14T00:00:00.000Z'),
+ };
+ const mockLast30Days = {
+ text: 'Last month',
+ value: last30DaysValue,
+ startDate: new Date('2023-08-16T00:00:00.000Z'),
+ endDate: new Date('2023-09-14T00:00:00.000Z'),
+ };
+ const mockCustomDateRangeItem = {
+ text: 'Custom',
+ value: customDateRangeValue,
+ };
+ const mockDateRanges = [mockLastWeek, mockLast30Days];
+ const mockItems = mockDateRanges.map(({ text, value }) => ({ text, value }));
+ const mockTooltipText = 'Max date range is 180 days';
+
+ const createComponent = ({ props = {}, dateRangeOptions = mockDateRanges } = {}) => {
+ wrapper = shallowMountExtended(DateRangesDropdown, {
+ propsData: {
+ dateRangeOptions,
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ const findListBox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findDaysSelectedCount = () => wrapper.findByTestId('predefined-date-range-days-count');
+ const findHelpIcon = () => wrapper.findComponent(GlIcon);
+
+ describe('default state', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should pass items to listbox `items` prop in correct order', () => {
+ expect(findListBox().props('items')).toStrictEqual([...mockItems, mockCustomDateRangeItem]);
+ });
+
+ it('should display first option as selected', () => {
+ expect(findListBox().props('selected')).toBe(lastWeekValue);
+ });
+
+ it('should not display info icon', () => {
+ expect(findHelpIcon().exists()).toBe(false);
+ });
+
+ describe.each`
+ dateRangeValue | dateRangeItem
+ ${lastWeekValue} | ${mockLastWeek}
+ ${last30DaysValue} | ${mockLast30Days}
+ `('when $dateRangeValue date range is selected', ({ dateRangeValue, dateRangeItem }) => {
+ beforeEach(async () => {
+ findListBox().vm.$emit('select', dateRangeValue);
+
+ await nextTick();
+ });
+
+ it('should emit `selected` event with value and date range', () => {
+ const { text, ...dateRangeProps } = dateRangeItem;
+
+ expect(wrapper.emitted('selected')).toEqual([[dateRangeProps]]);
+ });
+
+ it('should display days selected indicator', () => {
+ expect(findDaysSelectedCount().exists()).toBe(true);
+ });
+
+ it('should not emit `customDateRangeSelected` event', () => {
+ expect(wrapper.emitted('customDateRangeSelected')).toBeUndefined();
+ });
+ });
+
+ describe('when the custom date range option is selected', () => {
+ beforeEach(async () => {
+ findListBox().vm.$emit('select', customDateRangeValue);
+
+ await nextTick();
+ });
+
+ it('should emit `customDateRangeSelected` event', () => {
+ expect(wrapper.emitted('customDateRangeSelected')).toHaveLength(1);
+ });
+
+ it('should hide days selected indicator', () => {
+ expect(findDaysSelectedCount().exists()).toBe(false);
+ });
+
+ it('should not emit `selected` event', () => {
+ expect(wrapper.emitted('selected')).toBeUndefined();
+ });
+ });
+ });
+
+ describe('when a date range is preselected', () => {
+ beforeEach(() => {
+ createComponent({ props: { selected: 'lastMonth' } });
+ });
+
+ it('should display preselected date range as selected in listbox', () => {
+ expect(findListBox().props('selected')).toBe(last30DaysValue);
+ });
+ });
+
+ describe('days selected indicator', () => {
+ it.each`
+ selected | includeEndDateInDaysSelected | expectedDaysCount
+ ${lastWeekValue} | ${true} | ${7}
+ ${last30DaysValue} | ${true} | ${30}
+ ${lastWeekValue} | ${false} | ${6}
+ ${last30DaysValue} | ${false} | ${29}
+ `(
+ 'should display correct days selected when includeEndDateInDaysSelected=$includeEndDateInDaysSelected',
+ ({ selected, includeEndDateInDaysSelected, expectedDaysCount }) => {
+ createComponent({ props: { selected, includeEndDateInDaysSelected } });
+
+ expect(wrapper.findByText(`${expectedDaysCount} days selected`).exists()).toBe(true);
+ },
+ );
+ });
+
+ describe('when the `tooltip` prop is set', () => {
+ beforeEach(() => {
+ createComponent({ props: { tooltip: mockTooltipText } });
+ });
+
+ it('should display info icon with tooltip', () => {
+ const helpIcon = findHelpIcon();
+ const tooltip = getBinding(helpIcon.element, 'gl-tooltip');
+
+ expect(helpIcon.props('name')).toBe('information-o');
+ expect(helpIcon.attributes('title')).toBe(mockTooltipText);
+
+ expect(tooltip).toBeDefined();
+ });
+ });
+
+ describe('when `includeCustomDateRangeOption` = false', () => {
+ beforeEach(() => {
+ createComponent({ props: { includeCustomDateRangeOption: false } });
+ });
+
+ it('should pass items without custom date range option to listbox `items` prop', () => {
+ expect(findListBox().props('items')).toEqual(mockItems);
+ });
+ });
+});
diff --git a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
index 802da47d6cd..15f5759752d 100644
--- a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
+++ b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
@@ -26,6 +26,7 @@ const projects = [
avatarUrl: null,
},
];
+const groupNamespace = 'gitlab-org';
const defaultMocks = {
$apollo: {
@@ -46,7 +47,7 @@ describe('ProjectsDropdownFilter component', () => {
mocks: { ...defaultMocks },
propsData: {
groupId: 1,
- groupNamespace: 'gitlab-org',
+ groupNamespace,
...props,
},
stubs: {
@@ -93,34 +94,50 @@ describe('ProjectsDropdownFilter component', () => {
const findSelectedButtonAvatarItemAtIndex = (index) =>
findSelectedDropdownAtIndex(index).find('img.gl-avatar');
- describe('queryParams are applied when fetching data', () => {
+ describe('when fetching data', () => {
+ const mockQueryParams = {
+ first: 50,
+ includeSubgroups: true,
+ };
+
+ const mockVariables = {
+ groupFullPath: groupNamespace,
+ ...mockQueryParams,
+ };
+
beforeEach(() => {
createComponent({
props: {
- queryParams: {
- first: 50,
- includeSubgroups: true,
- },
+ queryParams: mockQueryParams,
},
});
+
+ spyQuery.mockClear();
});
- it('applies the correct queryParams when making an api call', async () => {
+ it('should apply the correct queryParams when making an API call', async () => {
findDropdown().vm.$emit('search', 'gitlab');
+ await waitForPromises();
+
expect(spyQuery).toHaveBeenCalledTimes(1);
- await nextTick();
- expect(spyQuery).toHaveBeenCalledWith({
+ expect(spyQuery).toHaveBeenLastCalledWith({
query: getProjects,
variables: {
search: 'gitlab',
- groupFullPath: wrapper.vm.groupNamespace,
- first: 50,
- includeSubgroups: true,
+ ...mockVariables,
},
});
});
+
+ it('should not make an API call when search query is below minimum search length', async () => {
+ findDropdown().vm.$emit('search', 'hi');
+
+ await waitForPromises();
+
+ expect(spyQuery).toHaveBeenCalledTimes(0);
+ });
});
describe('highlighted items', () => {
@@ -230,6 +247,31 @@ describe('ProjectsDropdownFilter component', () => {
});
});
+ describe('with an array of projects passed to `defaultProjects` and a search term', () => {
+ const { name: searchQuery } = projects[2];
+
+ beforeEach(async () => {
+ createComponent({
+ mountFn: mountExtended,
+ props: {
+ defaultProjects: [projects[0], projects[1]],
+ multiSelect: true,
+ },
+ });
+
+ await waitForPromises();
+
+ findDropdown().vm.$emit('search', searchQuery);
+ });
+
+ it('should add search result to selected projects when selected', async () => {
+ await selectDropdownItemAtIndex([0, 1, 2]);
+
+ expect(findSelectedDropdownItems()).toHaveLength(3);
+ expect(findDropdownButton().text()).toBe('3 projects selected');
+ });
+ });
+
describe('when multiSelect is false', () => {
const blockDefaultProps = { multiSelect: false };
beforeEach(() => {
diff --git a/spec/frontend/batch_comments/components/preview_dropdown_spec.js b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
index 608e9c82961..c0ad40b75ad 100644
--- a/spec/frontend/batch_comments/components/preview_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
@@ -16,7 +16,7 @@ Vue.use(Vuex);
let wrapper;
-const setCurrentFileHash = jest.fn();
+const goToFile = jest.fn();
const scrollToDraft = jest.fn();
const findPreviewItem = () => wrapper.findComponent(PreviewItem);
@@ -27,7 +27,7 @@ function factory({ viewDiffsFileByFile = false, draftsCount = 1, sortedDrafts =
diffs: {
namespaced: true,
actions: {
- setCurrentFileHash,
+ goToFile,
},
state: {
viewDiffsFileByFile,
@@ -59,12 +59,12 @@ describe('Batch comments preview dropdown', () => {
it('toggles active file when viewDiffsFileByFile is true', async () => {
factory({
viewDiffsFileByFile: true,
- sortedDrafts: [{ id: 1, file_hash: 'hash' }],
+ sortedDrafts: [{ id: 1, file_hash: 'hash', file_path: 'foo' }],
});
findPreviewItem().trigger('click');
await nextTick();
- expect(setCurrentFileHash).toHaveBeenCalledWith(expect.anything(), 'hash');
+ expect(goToFile).toHaveBeenCalledWith(expect.anything(), { path: 'foo' });
await nextTick();
expect(scrollToDraft).toHaveBeenCalledWith(
diff --git a/spec/frontend/behaviors/autosize_spec.js b/spec/frontend/behaviors/autosize_spec.js
deleted file mode 100644
index 7008b7b2eb6..00000000000
--- a/spec/frontend/behaviors/autosize_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import '~/behaviors/autosize';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-
-jest.mock('~/helpers/startup_css_helper', () => {
- return {
- waitForCSSLoaded: jest.fn().mockImplementation((cb) => {
- // This is a hack:
- // autosize.js will execute and modify the DOM
- // whenever waitForCSSLoaded calls its callback function.
- // This setTimeout is here because everything within setTimeout will be queued
- // as async code until the current call stack is executed.
- // If we would not do this, the mock for waitForCSSLoaded would call its callback
- // before the fixture in the beforeEach is set and the Test would fail.
- // more on this here: https://johnresig.com/blog/how-javascript-timers-work/
- setTimeout(() => {
- cb.apply();
- }, 0);
- }),
- };
-});
-
-describe('Autosize behavior', () => {
- beforeEach(() => {
- setHTMLFixture('<textarea class="js-autosize"></textarea>');
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('is applied to the textarea', () => {
- // This is the second part of the Hack:
- // Because we are forcing the mock for WaitForCSSLoaded and the very end of our callstack
- // to call its callback. This querySelector needs to go to the very end of our callstack
- // as well, if we would not have this jest.runOnlyPendingTimers here, the querySelector
- // would not run and the test would fail.
- jest.runOnlyPendingTimers();
-
- const textarea = document.querySelector('textarea');
- expect(textarea.classList).toContain('js-autosize-initialized');
- });
-});
diff --git a/spec/frontend/behaviors/components/global_alerts_spec.js b/spec/frontend/behaviors/components/global_alerts_spec.js
new file mode 100644
index 00000000000..4a20805c9a6
--- /dev/null
+++ b/spec/frontend/behaviors/components/global_alerts_spec.js
@@ -0,0 +1,135 @@
+import { nextTick } from 'vue';
+import { GlAlert } from '@gitlab/ui';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GlobalAlerts from '~/behaviors/components/global_alerts.vue';
+import { getGlobalAlerts, setGlobalAlerts, removeGlobalAlertById } from '~/lib/utils/global_alerts';
+
+jest.mock('~/lib/utils/global_alerts');
+
+describe('GlobalAlerts', () => {
+ const alert1 = {
+ dismissible: true,
+ persistOnPages: [],
+ id: 'foo',
+ variant: 'success',
+ title: 'Foo title',
+ message: 'Foo',
+ };
+ const alert2 = {
+ dismissible: true,
+ persistOnPages: [],
+ id: 'bar',
+ variant: 'danger',
+ message: 'Bar',
+ };
+ const alert3 = {
+ dismissible: true,
+ persistOnPages: ['dashboard:groups:index'],
+ id: 'baz',
+ variant: 'info',
+ message: 'Baz',
+ };
+
+ let wrapper;
+
+ const createComponent = async () => {
+ wrapper = shallowMountExtended(GlobalAlerts);
+ await nextTick();
+ };
+
+ const findAllAlerts = () => wrapper.findAllComponents(GlAlert);
+
+ describe('when there are alerts to display', () => {
+ beforeEach(() => {
+ getGlobalAlerts.mockImplementationOnce(() => [alert1, alert2]);
+ });
+
+ it('displays alerts and removes them from session storage', async () => {
+ await createComponent();
+
+ const alerts = findAllAlerts();
+
+ expect(alerts.at(0).text()).toBe('Foo');
+ expect(alerts.at(0).props()).toMatchObject({
+ title: 'Foo title',
+ variant: 'success',
+ dismissible: true,
+ });
+
+ expect(alerts.at(1).text()).toBe('Bar');
+ expect(alerts.at(1).props()).toMatchObject({
+ variant: 'danger',
+ dismissible: true,
+ });
+
+ expect(setGlobalAlerts).toHaveBeenCalledWith([]);
+ });
+
+ describe('when alert is dismissed', () => {
+ it('removes alert', async () => {
+ await createComponent();
+
+ wrapper.findComponent(GlAlert).vm.$emit('dismiss');
+ await nextTick();
+
+ expect(findAllAlerts().length).toBe(1);
+ expect(removeGlobalAlertById).toHaveBeenCalledWith(alert1.id);
+ });
+ });
+ });
+
+ describe('when alert has `persistOnPages` key set', () => {
+ const alerts = [alert3];
+
+ beforeEach(() => {
+ getGlobalAlerts.mockImplementationOnce(() => alerts);
+ });
+
+ describe('when page matches specified page', () => {
+ beforeEach(() => {
+ document.body.dataset.page = 'dashboard:groups:index';
+ });
+
+ afterEach(() => {
+ delete document.body.dataset.page;
+ });
+
+ it('renders alert and does not remove it from session storage', async () => {
+ await createComponent();
+
+ expect(wrapper.findComponent(GlAlert).text()).toBe('Baz');
+ expect(setGlobalAlerts).toHaveBeenCalledWith(alerts);
+ });
+ });
+
+ describe('when page does not match specified page', () => {
+ beforeEach(() => {
+ document.body.dataset.page = 'dashboard:groups:show';
+ });
+
+ afterEach(() => {
+ delete document.body.dataset.page;
+ });
+
+ it('does not render alert and does not remove it from session storage', async () => {
+ await createComponent();
+
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ expect(setGlobalAlerts).toHaveBeenCalledWith(alerts);
+ });
+ });
+ });
+
+ describe('when there are no alerts to display', () => {
+ beforeEach(() => {
+ getGlobalAlerts.mockImplementationOnce(() => []);
+ });
+
+ it('renders nothing', async () => {
+ await createComponent();
+
+ expect(wrapper.html()).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/behaviors/components/json_table_spec.js b/spec/frontend/behaviors/components/json_table_spec.js
index ae62d28d6c0..3277e58669a 100644
--- a/spec/frontend/behaviors/components/json_table_spec.js
+++ b/spec/frontend/behaviors/components/json_table_spec.js
@@ -70,7 +70,7 @@ describe('behaviors/components/json_table', () => {
});
it('renders gltable', () => {
- expect(findTable().props()).toEqual({
+ expect(findTable().props()).toMatchObject({
fields: [],
items: [],
});
@@ -121,7 +121,7 @@ describe('behaviors/components/json_table', () => {
});
it('passes cleaned fields and items to table', () => {
- expect(findTable().props()).toEqual({
+ expect(findTable().props()).toMatchObject({
fields: [
'A',
{
diff --git a/spec/frontend/behaviors/markdown/render_observability_spec.js b/spec/frontend/behaviors/markdown/render_observability_spec.js
deleted file mode 100644
index f464c01ac15..00000000000
--- a/spec/frontend/behaviors/markdown/render_observability_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import Vue from 'vue';
-import { createWrapper } from '@vue/test-utils';
-import renderObservability from '~/behaviors/markdown/render_observability';
-import { INLINE_EMBED_DIMENSIONS, SKELETON_VARIANT_EMBED } from '~/observability/constants';
-import ObservabilityApp from '~/observability/components/observability_app.vue';
-
-describe('renderObservability', () => {
- let subject;
-
- beforeEach(() => {
- subject = document.createElement('div');
- subject.classList.add('js-render-observability');
- subject.dataset.frameUrl = 'https://observe.gitlab.com/';
- document.body.appendChild(subject);
- });
-
- afterEach(() => {
- subject.remove();
- });
-
- it('should return an array of Vue instances', () => {
- const vueInstances = renderObservability([
- ...document.querySelectorAll('.js-render-observability'),
- ]);
- expect(vueInstances).toEqual([expect.any(Vue)]);
- });
-
- it('should correctly pass props to the ObservabilityApp component', () => {
- const vueInstances = renderObservability([
- ...document.querySelectorAll('.js-render-observability'),
- ]);
-
- const wrapper = createWrapper(vueInstances[0]);
-
- expect(wrapper.findComponent(ObservabilityApp).props()).toMatchObject({
- observabilityIframeSrc: 'https://observe.gitlab.com/',
- skeletonVariant: SKELETON_VARIANT_EMBED,
- inlineEmbed: true,
- height: INLINE_EMBED_DIMENSIONS.HEIGHT,
- width: INLINE_EMBED_DIMENSIONS.WIDTH,
- });
- });
-});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index 292a0da2bfe..f32dd902b8e 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -13,7 +13,7 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
/>
<strong
class="file-title-name js-blob-header-filepath mr-1"
- data-qa-selector="file_title_content"
+ data-testid="file-title-content"
>
foo/bar/dummy.md
</strong>
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
index 4c8c256121f..cc4c13060a5 100644
--- a/spec/frontend/blob/components/blob_header_default_actions_spec.js
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -35,7 +35,7 @@ describe('Blob Header Default Actions', () => {
});
describe('renders', () => {
- const findCopyButton = () => wrapper.findByTestId('copyContentsButton');
+ const findCopyButton = () => wrapper.findByTestId('copy-contents-button');
const findViewRawButton = () => wrapper.findByTestId('viewRawButton');
it('gl-button-group component', () => {
diff --git a/spec/frontend/blob/csv/csv_viewer_spec.js b/spec/frontend/blob/csv/csv_viewer_spec.js
index 8f105f04aa7..04d11011e70 100644
--- a/spec/frontend/blob/csv/csv_viewer_spec.js
+++ b/spec/frontend/blob/csv/csv_viewer_spec.js
@@ -1,10 +1,12 @@
-import { GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { GlLoadingIcon, GlTable, GlButton } from '@gitlab/ui';
import { getAllByRole } from '@testing-library/dom';
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Papa from 'papaparse';
import CsvViewer from '~/blob/csv/csv_viewer.vue';
import PapaParseAlert from '~/vue_shared/components/papa_parse_alert.vue';
+import { s__ } from '~/locale';
+import { MAX_ROWS_TO_RENDER } from '~/blob/csv/constants';
const validCsv = 'one,two,three';
const brokenCsv = '{\n "json": 1,\n "key": [1, 2, 3]\n}';
@@ -28,6 +30,8 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
const findCsvTable = () => wrapper.findComponent(GlTable);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAlert = () => wrapper.findComponent(PapaParseAlert);
+ const findSwitchToRawViewBtn = () => wrapper.findComponent(GlButton);
+ const findLargeCsvText = () => wrapper.find('[data-testid="large-csv-text"]');
it('should render loading spinner', () => {
createComponent();
@@ -76,6 +80,33 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
});
});
+ describe('when the CSV is larger than 2000 lines', () => {
+ beforeEach(async () => {
+ const largeCsv = validCsv.repeat(3000);
+ jest.spyOn(Papa, 'parse').mockImplementation(() => {
+ return { data: largeCsv.split(','), errors: [] };
+ });
+ createComponent({ csv: largeCsv });
+ await nextTick();
+ });
+ it('renders not more than max rows value', () => {
+ expect(Papa.parse).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.items).toHaveLength(MAX_ROWS_TO_RENDER);
+ });
+ it('renders large csv text', () => {
+ expect(findLargeCsvText().text()).toBe(
+ s__(
+ 'CsvViewer|The file is too large to render all the rows. To see the entire file, switch to the raw view.',
+ ),
+ );
+ });
+ it('renders button with link to raw view', () => {
+ const url = 'http://test.host/?plain=1';
+ expect(findSwitchToRawViewBtn().text()).toBe(s__('CsvViewer|View raw data'));
+ expect(findSwitchToRawViewBtn().attributes('href')).toBe(url);
+ });
+ });
+
describe('when csv prop is path and indicates a remote file', () => {
it('should render call parse with download flag true', async () => {
const path = 'path/to/remote/file.csv';
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 95b5712bab0..8314cbda7a1 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -10,6 +10,7 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import IssuableBlockedIcon from '~/vue_shared/components/issuable_blocked_icon/issuable_blocked_icon.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
+import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import eventHub from '~/boards/eventhub';
import defaultStore from '~/boards/stores';
@@ -63,17 +64,23 @@ describe('Board card component', () => {
actions: {
performSearch: performSearchMock,
},
- state: {
- ...defaultStore.state,
- isShowingLabels: true,
- },
+ state: defaultStore.state,
});
};
+ const mockApollo = createMockApollo();
+
const createWrapper = ({ props = {}, isEpicBoard = false, isGroupBoard = true } = {}) => {
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: isShowingLabelsQuery,
+ data: {
+ isShowingLabels: true,
+ },
+ });
+
wrapper = mountExtended(BoardCardInner, {
store,
- apolloProvider: createMockApollo(),
+ apolloProvider: mockApollo,
propsData: {
list,
item: issue,
@@ -235,7 +242,7 @@ describe('Board card component', () => {
expect(tooltip).toBeDefined();
expect(findHiddenIssueIcon().attributes('title')).toBe(
- 'This issue is hidden because its author has been banned',
+ 'This issue is hidden because its author has been banned.',
);
});
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index 7367b34c4df..5bafd9a8d0e 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -122,5 +122,7 @@ export default function createComponent({
},
});
+ jest.spyOn(store, 'dispatch').mockImplementation(() => {});
+
return component;
}
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index e0a110678b1..30bb4fba4e3 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -202,8 +202,6 @@ describe('Board list component', () => {
describe('handleDragOnEnd', () => {
beforeEach(() => {
- jest.spyOn(wrapper.vm, 'moveItem').mockImplementation(() => {});
-
startDrag();
});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index f0d40af94fe..11f9a4f6ff2 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -4,11 +4,14 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardCard from '~/boards/components/board_card.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
import { inactiveId } from '~/boards/constants';
+import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
+import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import { mockLabelList, mockIssue, DEFAULT_COLOR } from '../mock_data';
describe('Board card', () => {
@@ -20,9 +23,11 @@ describe('Board card', () => {
Vue.use(VueApollo);
const mockSetActiveBoardItemResolver = jest.fn();
+ const mockSetSelectedBoardItemsResolver = jest.fn();
const mockApollo = createMockApollo([], {
Mutation: {
setActiveBoardItem: mockSetActiveBoardItemResolver,
+ setSelectedBoardItems: mockSetSelectedBoardItemsResolver,
},
});
@@ -49,7 +54,21 @@ describe('Board card', () => {
provide = {},
stubs = { BoardCardInner },
item = mockIssue,
+ selectedBoardItems = [],
} = {}) => {
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: isShowingLabelsQuery,
+ data: {
+ isShowingLabels: true,
+ },
+ });
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: selectedBoardItemsQuery,
+ data: {
+ selectedBoardItems,
+ },
+ });
+
wrapper = shallowMountExtended(BoardCard, {
apolloProvider: mockApollo,
stubs: {
@@ -99,7 +118,7 @@ describe('Board card', () => {
describe('when GlLabel is clicked in BoardCardInner', () => {
it('doesnt call toggleBoardItem', () => {
- createStore({ initialState: { isShowingLabels: true } });
+ createStore();
mountComponent();
wrapper.findComponent(GlLabel).trigger('mouseup');
@@ -132,10 +151,9 @@ describe('Board card', () => {
createStore({
initialState: {
activeId: inactiveId,
- selectedBoardItems: [mockIssue],
},
});
- mountComponent();
+ mountComponent({ selectedBoardItems: [mockIssue.id] });
expect(wrapper.classes()).toContain('multi-select');
expect(wrapper.classes()).not.toContain('is-active');
@@ -163,13 +181,17 @@ describe('Board card', () => {
window.gon = { features: { boardMultiSelect: true } };
});
- it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => {
+ it('should call setSelectedBoardItemsMutation with correct parameters', async () => {
await multiSelectCard();
- expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledTimes(1);
- expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledWith(
+ expect(mockSetSelectedBoardItemsResolver).toHaveBeenCalledTimes(1);
+ expect(mockSetSelectedBoardItemsResolver).toHaveBeenCalledWith(
expect.any(Object),
- mockIssue,
+ {
+ itemId: mockIssue.id,
+ },
+ expect.anything(),
+ expect.anything(),
);
});
});
@@ -240,6 +262,7 @@ describe('Board card', () => {
it('set active board item on client when clicking on card', async () => {
await selectCard();
+ await waitForPromises();
expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith(
{},
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 15ee3976fb1..a0dacf085e2 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -14,6 +14,7 @@ import createBoardMutation from '~/boards/graphql/board_create.mutation.graphql'
import destroyBoardMutation from '~/boards/graphql/board_destroy.mutation.graphql';
import updateBoardMutation from '~/boards/graphql/board_update.mutation.graphql';
import eventHub from '~/boards/eventhub';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -55,12 +56,10 @@ describe('BoardForm', () => {
const findInput = () => wrapper.find('#board-new-name');
const setBoardMock = jest.fn();
- const setErrorMock = jest.fn();
const store = new Vuex.Store({
actions: {
setBoard: setBoardMock,
- setError: setErrorMock,
},
});
@@ -113,6 +112,10 @@ describe('BoardForm', () => {
});
};
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
describe('when user can not admin the board', () => {
beforeEach(() => {
createComponent({
@@ -237,7 +240,7 @@ describe('BoardForm', () => {
await waitForPromises();
expect(setBoardMock).not.toHaveBeenCalled();
- expect(setErrorMock).toHaveBeenCalled();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
describe('when Apollo boards FF is on', () => {
@@ -353,7 +356,7 @@ describe('BoardForm', () => {
await waitForPromises();
expect(setBoardMock).not.toHaveBeenCalled();
- expect(setErrorMock).toHaveBeenCalled();
+ expect(cacheUpdates.setError).toHaveBeenCalled();
});
describe('when Apollo boards FF is on', () => {
@@ -434,9 +437,11 @@ describe('BoardForm', () => {
await waitForPromises();
expect(visitUrl).not.toHaveBeenCalled();
- expect(store.dispatch).toHaveBeenCalledWith('setError', {
- message: 'Failed to delete board. Please try again.',
- });
+ expect(cacheUpdates.setError).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: 'Failed to delete board. Please try again.',
+ }),
+ );
});
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index fa18b47cf54..0a628af9939 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlLoadingIcon, GlDropdownSectionHeader } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
// eslint-disable-next-line no-restricted-imports
@@ -13,7 +13,7 @@ import projectRecentBoardsQuery from '~/boards/graphql/project_recent_boards.que
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
mockBoard,
mockGroupAllBoardsResponse,
@@ -47,17 +47,11 @@ describe('BoardsSelector', () => {
});
};
- const fillSearchBox = (filterTerm) => {
- const searchBox = wrapper.findComponent({ ref: 'searchBox' });
- const searchBoxInput = searchBox.find('input');
- searchBoxInput.setValue(filterTerm);
- searchBoxInput.trigger('input');
- };
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
- const getDropdownItems = () => wrapper.findAllByTestId('dropdown-item');
- const getDropdownHeaders = () => wrapper.findAllComponents(GlDropdownSectionHeader);
- const getLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const fillSearchBox = async (filterTerm) => {
+ await findDropdown().vm.$emit('search', filterTerm);
+ };
const projectBoardsQueryHandlerSuccess = jest
.fn()
@@ -96,7 +90,7 @@ describe('BoardsSelector', () => {
[groupRecentBoardsQuery, groupRecentBoardsQueryHandlerSuccess],
]);
- wrapper = mountExtended(BoardsSelector, {
+ wrapper = shallowMountExtended(BoardsSelector, {
store,
apolloProvider: fakeApollo,
propsData: {
@@ -142,13 +136,19 @@ describe('BoardsSelector', () => {
});
it('shows loading spinner', async () => {
+ createComponent({
+ provide: {
+ isApolloBoard: true,
+ },
+ props: {
+ isCurrentBoardLoading: true,
+ },
+ });
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
- expect(getLoadingIcon().exists()).toBe(true);
- expect(getDropdownHeaders()).toHaveLength(0);
- expect(getDropdownItems()).toHaveLength(0);
+ expect(findDropdown().props('loading')).toBe(true);
});
});
@@ -158,7 +158,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
});
@@ -167,9 +167,8 @@ describe('BoardsSelector', () => {
expect(projectBoardsQueryHandlerSuccess).toHaveBeenCalled();
});
- it('hides loading spinner', async () => {
- await nextTick();
- expect(getLoadingIcon().exists()).toBe(false);
+ it('hides loading spinner', () => {
+ expect(findDropdown().props('loading')).toBe(false);
});
describe('filtering', () => {
@@ -178,25 +177,26 @@ describe('BoardsSelector', () => {
});
it('shows all boards without filtering', () => {
- expect(getDropdownItems()).toHaveLength(boards.length + recentIssueBoards.length);
+ expect(findDropdown().props('items')[0].text).toBe('Recent');
+ expect(findDropdown().props('items')[0].options).toHaveLength(recentIssueBoards.length);
+ expect(findDropdown().props('items')[1].text).toBe('All');
+ expect(findDropdown().props('items')[1].options).toHaveLength(
+ boards.length - recentIssueBoards.length,
+ );
});
it('shows only matching boards when filtering', async () => {
const filterTerm = 'board1';
const expectedCount = boards.filter((board) => board.name.includes(filterTerm)).length;
- fillSearchBox(filterTerm);
-
- await nextTick();
- expect(getDropdownItems()).toHaveLength(expectedCount);
+ await fillSearchBox(filterTerm);
+ expect(findDropdown().props('items')).toHaveLength(expectedCount);
});
it('shows message if there are no matching boards', async () => {
- fillSearchBox('does not exist');
+ await fillSearchBox('does not exist');
- await nextTick();
- expect(getDropdownItems()).toHaveLength(0);
- expect(wrapper.text().includes('No matching boards found')).toBe(true);
+ expect(findDropdown().props('noResultsText')).toBe('No matching boards found');
});
});
@@ -204,14 +204,18 @@ describe('BoardsSelector', () => {
it('shows only when boards are greater than 10', async () => {
await nextTick();
expect(projectRecentBoardsQueryHandlerSuccess).toHaveBeenCalled();
- expect(getDropdownHeaders()).toHaveLength(2);
+
+ expect(findDropdown().props('items')).toHaveLength(2);
+ expect(findDropdown().props('items')[0].text).toBe('Recent');
+ expect(findDropdown().props('items')[1].text).toBe('All');
});
it('does not show when boards are less than 10', async () => {
createComponent({ projectBoardsQueryHandler: smallBoardsQueryHandlerSuccess });
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+
+ expect(findDropdown().props('items')).toHaveLength(0);
});
it('does not show when recentIssueBoards api returns empty array', async () => {
@@ -220,14 +224,14 @@ describe('BoardsSelector', () => {
});
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+ expect(findDropdown().props('items')).toHaveLength(0);
});
it('does not show when search is active', async () => {
fillSearchBox('Random string');
await nextTick();
- expect(getDropdownHeaders()).toHaveLength(0);
+ expect(findDropdown().props('items')).toHaveLength(0);
});
});
});
@@ -248,7 +252,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
@@ -272,7 +276,7 @@ describe('BoardsSelector', () => {
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
@@ -286,6 +290,7 @@ describe('BoardsSelector', () => {
createStore();
createComponent({ provide: { multipleIssueBoardsAvailable: true } });
expect(findDropdown().exists()).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
@@ -296,6 +301,7 @@ describe('BoardsSelector', () => {
provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: true },
});
expect(findDropdown().exists()).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
@@ -317,6 +323,7 @@ describe('BoardsSelector', () => {
provide: { isApolloBoard: true },
});
expect(findDropdown().props('loading')).toBe(true);
+ expect(findDropdown().props('toggleText')).toBe('Select board');
});
});
});
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
index 16ad54f0854..1edb6812af0 100644
--- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -26,14 +26,11 @@ describe('IssueBoardFilter', () => {
});
};
- let fetchUsersSpy;
let fetchLabelsSpy;
beforeEach(() => {
- fetchUsersSpy = jest.fn();
fetchLabelsSpy = jest.fn();
issueBoardFilters.mockReturnValue({
- fetchUsers: fetchUsersSpy,
fetchLabels: fetchLabelsSpy,
});
});
@@ -61,7 +58,7 @@ describe('IssueBoardFilter', () => {
({ isSignedIn }) => {
createComponent({ isSignedIn });
- const tokens = mockTokens(fetchLabelsSpy, fetchUsersSpy, isSignedIn);
+ const tokens = mockTokens(fetchLabelsSpy, isSignedIn);
expect(findBoardsFilteredSearch().props('tokens')).toEqual(orderBy(tokens, ['title']));
},
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index dfcdb4c05d0..dfc8b18e197 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -827,7 +827,7 @@ export const mockConfidentialToken = {
],
};
-export const mockTokens = (fetchLabels, fetchUsers, isSignedIn) => [
+export const mockTokens = (fetchLabels, isSignedIn) => [
{
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
@@ -836,7 +836,8 @@ export const mockTokens = (fetchLabels, fetchUsers, isSignedIn) => [
token: UserToken,
dataType: 'user',
unique: true,
- fetchUsers,
+ fullPath: 'gitlab-org',
+ isProject: false,
preloadedUsers: [],
},
{
@@ -848,7 +849,8 @@ export const mockTokens = (fetchLabels, fetchUsers, isSignedIn) => [
token: UserToken,
dataType: 'user',
unique: true,
- fetchUsers,
+ fullPath: 'gitlab-org',
+ isProject: false,
preloadedUsers: [],
},
{
@@ -973,7 +975,7 @@ export const boardListQueryResponse = ({
boardList: {
__typename: 'BoardList',
id: listId,
- totalWeight: 5,
+ totalIssueWeight: '5',
issuesCount,
},
},
diff --git a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
index ee8031f2475..dfb45083c7b 100644
--- a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
@@ -13,7 +13,7 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
size="medium"
textsronly="true"
toggleid="dropdown-toggle-btn-25"
- toggletext=""
+ toggletext="More actions"
variant="default"
>
<ul
diff --git a/spec/frontend/branches/components/sort_dropdown_spec.js b/spec/frontend/branches/components/sort_dropdown_spec.js
index 64ef30bb8a8..777e54f8e69 100644
--- a/spec/frontend/branches/components/sort_dropdown_spec.js
+++ b/spec/frontend/branches/components/sort_dropdown_spec.js
@@ -1,6 +1,7 @@
import { GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
import SortDropdown from '~/branches/components/sort_dropdown.vue';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -67,20 +68,33 @@ describe('Branches Sort Dropdown', () => {
});
});
+ describe('when url contains a search param', () => {
+ const branchName = 'branch-1';
+
+ beforeEach(() => {
+ setWindowLocation(`/root/ci-cd-project-demo/-/branches?search=${branchName}`);
+ wrapper = createWrapper();
+ });
+
+ it('should set the default the input value to search param', () => {
+ expect(findSearchBox().props('value')).toBe(branchName);
+ });
+ });
+
describe('when submitting a search term', () => {
beforeEach(() => {
urlUtils.visitUrl = jest.fn();
-
wrapper = createWrapper();
});
it('should call visitUrl', () => {
+ const searchTerm = 'branch-1';
const searchBox = findSearchBox();
-
+ searchBox.vm.$emit('input', searchTerm);
searchBox.vm.$emit('submit');
expect(urlUtils.visitUrl).toHaveBeenCalledWith(
- '/root/ci-cd-project-demo/-/branches?state=all&sort=updated_desc',
+ '/root/ci-cd-project-demo/-/branches?state=all&sort=updated_desc&search=branch-1',
);
});
});
diff --git a/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js b/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js
index 2f1dae71572..c9758c5ab24 100644
--- a/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js
@@ -1,5 +1,6 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import RunnerTypeIcon from '~/ci/runner/components/runner_type_icon.vue';
import RunnerCell from '~/ci/admin/jobs_table/components/cells/runner_cell.vue';
import { RUNNER_EMPTY_TEXT } from '~/ci/admin/jobs_table/constants';
import { allRunnersData } from 'jest/ci/runner/mock_data';
@@ -61,4 +62,29 @@ describe('Runner Cell', () => {
});
});
});
+
+ describe('Runner Type Icon', () => {
+ const findRunnerTypeIcon = () => wrapper.findComponent(RunnerTypeIcon);
+
+ describe('Job with runner', () => {
+ beforeEach(() => {
+ createComponent({ job: mockJobWithRunner });
+ });
+
+ it('shows the runner type icon', () => {
+ expect(findRunnerTypeIcon().exists()).toBe(true);
+ expect(findRunnerTypeIcon().props('type')).toBe(mockJobWithRunner.runner.runnerType);
+ });
+ });
+
+ describe('Job without runner', () => {
+ beforeEach(() => {
+ createComponent({ job: mockJobWithoutRunner });
+ });
+
+ it('does not show the runner type icon', () => {
+ expect(findRunnerTypeIcon().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
index 1cbb1a714c9..3628af31aa1 100644
--- a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
@@ -1,16 +1,8 @@
-import {
- GlLoadingIcon,
- GlTable,
- GlLink,
- GlBadge,
- GlPagination,
- GlModal,
- GlFormCheckbox,
-} from '@gitlab/ui';
+import { GlLoadingIcon, GlTable, GlLink, GlPagination, GlModal, GlFormCheckbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue';
import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue';
@@ -59,13 +51,13 @@ describe('JobArtifactsTable component', () => {
const findStatuses = () => wrapper.findAllByTestId('job-artifacts-job-status');
const findSuccessfulJobStatus = () => findStatuses().at(0);
- const findFailedJobStatus = () => findStatuses().at(1);
+ const findCiBadgeLink = () => findSuccessfulJobStatus().findComponent(CiBadgeLink);
const findLinks = () => wrapper.findAllComponents(GlLink);
const findJobLink = () => findLinks().at(0);
const findPipelineLink = () => findLinks().at(1);
- const findRefLink = () => findLinks().at(2);
- const findCommitLink = () => findLinks().at(3);
+ const findCommitLink = () => findLinks().at(2);
+ const findRefLink = () => findLinks().at(3);
const findSize = () => wrapper.findByTestId('job-artifacts-size');
const findCreated = () => wrapper.findByTestId('job-artifacts-created');
@@ -209,13 +201,13 @@ describe('JobArtifactsTable component', () => {
});
it('shows the job status as an icon for a successful job', () => {
- expect(findSuccessfulJobStatus().findComponent(CiIcon).exists()).toBe(true);
- expect(findSuccessfulJobStatus().findComponent(GlBadge).exists()).toBe(false);
- });
-
- it('shows the job status as a badge for other job statuses', () => {
- expect(findFailedJobStatus().findComponent(GlBadge).exists()).toBe(true);
- expect(findFailedJobStatus().findComponent(CiIcon).exists()).toBe(false);
+ expect(findCiBadgeLink().props()).toMatchObject({
+ status: {
+ group: 'success',
+ },
+ size: 'sm',
+ showText: false,
+ });
});
it('shows links to the job, pipeline, ref, and commit', () => {
diff --git a/spec/frontend/ci/catalog/components/ci_catalog_home_spec.js b/spec/frontend/ci/catalog/components/ci_catalog_home_spec.js
new file mode 100644
index 00000000000..1b5c86c19cb
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/ci_catalog_home_spec.js
@@ -0,0 +1,46 @@
+import { shallowMount } from '@vue/test-utils';
+import { createRouter } from '~/ci/catalog/router';
+import ciResourceDetailsPage from '~/ci/catalog/components/pages/ci_resource_details_page.vue';
+import CiCatalogHome from '~/ci/catalog/components/ci_catalog_home.vue';
+
+describe('CiCatalogHome', () => {
+ const defaultProps = {};
+ const baseRoute = '/';
+ const resourcesPageComponentStub = {
+ name: 'page-component',
+ template: '<div>Hello</div>',
+ };
+ const router = createRouter(baseRoute, resourcesPageComponentStub);
+
+ const createComponent = ({ props = {} } = {}) => {
+ shallowMount(CiCatalogHome, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ router,
+ });
+ };
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('router', () => {
+ it.each`
+ path | component
+ ${baseRoute} | ${resourcesPageComponentStub}
+ ${'/1'} | ${ciResourceDetailsPage}
+ `('when route is $path it renders the right component', async ({ path, component }) => {
+ if (path !== '/') {
+ await router.push(path);
+ }
+
+ const [root] = router.currentRoute.matched;
+
+ expect(root.components.default).toBe(component);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js
new file mode 100644
index 00000000000..658a135534b
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js
@@ -0,0 +1,120 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiResourceAbout from '~/ci/catalog/components/details/ci_resource_about.vue';
+import { formatDate } from '~/lib/utils/datetime_utility';
+
+describe('CiResourceAbout', () => {
+ let wrapper;
+
+ const defaultProps = {
+ isLoadingSharedData: false,
+ isLoadingDetails: false,
+ openIssuesCount: 4,
+ openMergeRequestsCount: 9,
+ latestVersion: {
+ id: 1,
+ tagName: 'v1.0.0',
+ tagPath: 'path/to/release',
+ releasedAt: '2022-08-23T17:19:09Z',
+ },
+ webPath: 'path/to/project',
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(CiResourceAbout, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findProjectLink = () => wrapper.findByText('Go to the project');
+ const findIssueCount = () => wrapper.findByText(`${defaultProps.openIssuesCount} issues`);
+ const findMergeRequestCount = () =>
+ wrapper.findByText(`${defaultProps.openMergeRequestsCount} merge requests`);
+ const findLastRelease = () =>
+ wrapper.findByText(
+ `Last release at ${formatDate(defaultProps.latestVersion.releasedAt, 'yyyy-mm-dd')}`,
+ );
+ const findAllLoadingItems = () => wrapper.findAllByTestId('skeleton-loading-line');
+
+ // Shared data items are items which gets their data from the index page query.
+ const sharedDataItems = [findProjectLink, findLastRelease];
+ // additional details items gets their state only when on the details page
+ const additionalDetailsItems = [findIssueCount, findMergeRequestCount];
+ const allItems = [...sharedDataItems, ...additionalDetailsItems];
+
+ describe('when loading shared data', () => {
+ beforeEach(() => {
+ createComponent({ props: { isLoadingSharedData: true, isLoadingDetails: true } });
+ });
+
+ it('renders all server-side data as loading', () => {
+ allItems.forEach((finder) => {
+ expect(finder().exists()).toBe(false);
+ });
+
+ expect(findAllLoadingItems()).toHaveLength(allItems.length);
+ });
+ });
+
+ describe('when loading additional details', () => {
+ beforeEach(() => {
+ createComponent({ props: { isLoadingDetails: true } });
+ });
+
+ it('renders only the details query as loading', () => {
+ sharedDataItems.forEach((finder) => {
+ expect(finder().exists()).toBe(true);
+ });
+
+ additionalDetailsItems.forEach((finder) => {
+ expect(finder().exists()).toBe(false);
+ });
+
+ expect(findAllLoadingItems()).toHaveLength(additionalDetailsItems.length);
+ });
+ });
+
+ describe('when has loaded', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders project link', () => {
+ expect(findProjectLink().exists()).toBe(true);
+ });
+
+ it('renders the number of issues opened', () => {
+ expect(findIssueCount().exists()).toBe(true);
+ });
+
+ it('renders the number of merge requests opened', () => {
+ expect(findMergeRequestCount().exists()).toBe(true);
+ });
+
+ it('renders the last release date', () => {
+ expect(findLastRelease().exists()).toBe(true);
+ });
+
+ describe('links', () => {
+ it('has the correct project link', () => {
+ expect(findProjectLink().attributes('href')).toBe(defaultProps.webPath);
+ });
+
+ it('has the correct issues link', () => {
+ expect(findIssueCount().attributes('href')).toBe(`${defaultProps.webPath}/issues`);
+ });
+
+ it('has the correct merge request link', () => {
+ expect(findMergeRequestCount().attributes('href')).toBe(
+ `${defaultProps.webPath}/merge_requests`,
+ );
+ });
+
+ it('has no link for release data', () => {
+ expect(findLastRelease().attributes('href')).toBe(undefined);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
new file mode 100644
index 00000000000..a41996d20b3
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
@@ -0,0 +1,113 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { resolvers } from '~/ci/catalog/graphql/settings';
+import CiResourceComponents from '~/ci/catalog/components/details/ci_resource_components.vue';
+import getCiCatalogcomponentComponents from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_components.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import { mockComponents } from '../../mock';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
+describe('CiResourceComponents', () => {
+ let wrapper;
+ let mockComponentsResponse;
+
+ const components = mockComponents.data.ciCatalogResource.components.nodes;
+
+ const resourceId = 'gid://gitlab/Ci::Catalog::Resource/1';
+
+ const defaultProps = { resourceId };
+
+ const createComponent = async () => {
+ const handlers = [[getCiCatalogcomponentComponents, mockComponentsResponse]];
+ const mockApollo = createMockApollo(handlers, resolvers);
+
+ wrapper = mountExtended(CiResourceComponents, {
+ propsData: {
+ ...defaultProps,
+ },
+ apolloProvider: mockApollo,
+ });
+
+ await waitForPromises();
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findComponents = () => wrapper.findAllByTestId('component-section');
+
+ beforeEach(() => {
+ mockComponentsResponse = jest.fn();
+ mockComponentsResponse.mockResolvedValue(mockComponents);
+ });
+
+ describe('when queries are loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('render a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not render components', () => {
+ expect(findComponents()).toHaveLength(0);
+ });
+
+ it('does not throw an error', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when components query throws an error', () => {
+ beforeEach(async () => {
+ mockComponentsResponse.mockRejectedValue();
+ await createComponent();
+ });
+
+ it('calls createAlert with the correct message', () => {
+ expect(createAlert).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: "There was an error fetching this resource's components",
+ });
+ });
+
+ it('does not render the loading state', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when queries have loaded', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('renders every component', () => {
+ expect(findComponents()).toHaveLength(components.length);
+ });
+
+ it('renders the component name, description and snippet', () => {
+ components.forEach((component) => {
+ expect(wrapper.text()).toContain(component.name);
+ expect(wrapper.text()).toContain(component.description);
+ expect(wrapper.text()).toContain(component.path);
+ });
+ });
+
+ describe('inputs', () => {
+ it('renders the component parameter attributes', () => {
+ const [firstComponent] = components;
+
+ firstComponent.inputs.nodes.forEach((input) => {
+ expect(findComponents().at(0).text()).toContain(input.name);
+ expect(findComponents().at(0).text()).toContain(input.defaultValue);
+ expect(findComponents().at(0).text()).toContain('Yes');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js
new file mode 100644
index 00000000000..1f7dcf9d4e5
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_details_spec.js
@@ -0,0 +1,83 @@
+import { GlTabs, GlTab } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import CiResourceComponents from '~/ci/catalog/components/details/ci_resource_components.vue';
+import CiResourceDetails from '~/ci/catalog/components/details/ci_resource_details.vue';
+import CiResourceReadme from '~/ci/catalog/components/details/ci_resource_readme.vue';
+
+describe('CiResourceDetails', () => {
+ let wrapper;
+
+ const defaultProps = {
+ resourceId: 'gid://gitlab/Ci::Catalog::Resource/1',
+ };
+ const defaultProvide = {
+ glFeatures: { ciCatalogComponentsTab: true },
+ };
+
+ const createComponent = ({ provide = {}, props = {} } = {}) => {
+ wrapper = shallowMount(CiResourceDetails, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ stubs: {
+ GlTabs,
+ },
+ });
+ };
+ const findAllTabs = () => wrapper.findAllComponents(GlTab);
+ const findCiResourceReadme = () => wrapper.findComponent(CiResourceReadme);
+ const findCiResourceComponents = () => wrapper.findComponent(CiResourceComponents);
+
+ describe('tabs', () => {
+ describe('when feature flag `ci_catalog_components_tab` is enabled', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the readme and components tab', () => {
+ expect(findAllTabs()).toHaveLength(2);
+ expect(findCiResourceComponents().exists()).toBe(true);
+ expect(findCiResourceReadme().exists()).toBe(true);
+ });
+ });
+
+ describe('when feature flag `ci_catalog_components_tab` is disabled', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: { glFeatures: { ciCatalogComponentsTab: false } },
+ });
+ });
+
+ it('renders only readme tab as default', () => {
+ expect(findCiResourceReadme().exists()).toBe(true);
+ expect(findCiResourceComponents().exists()).toBe(false);
+ expect(findAllTabs()).toHaveLength(1);
+ });
+ });
+
+ describe('UI', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('passes lazy attribute to all tabs', () => {
+ findAllTabs().wrappers.forEach((tab) => {
+ expect(tab.attributes().lazy).not.toBeUndefined();
+ });
+ });
+
+ it('passes the right props to the readme component', () => {
+ expect(findCiResourceReadme().props().resourceId).toBe(defaultProps.resourceId);
+ });
+
+ it('passes the right props to the components tab', () => {
+ expect(findCiResourceComponents().props().resourceId).toBe(defaultProps.resourceId);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
new file mode 100644
index 00000000000..6ab9520508d
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
@@ -0,0 +1,139 @@
+import { GlAvatar, GlAvatarLink, GlBadge } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiResourceHeader from '~/ci/catalog/components/details/ci_resource_header.vue';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
+import CiResourceAbout from '~/ci/catalog/components/details/ci_resource_about.vue';
+import { catalogSharedDataMock, catalogAdditionalDetailsMock } from '../../mock';
+
+describe('CiResourceHeader', () => {
+ let wrapper;
+
+ const resource = { ...catalogSharedDataMock.data.ciCatalogResource };
+ const resourceAdditionalData = { ...catalogAdditionalDetailsMock.data.ciCatalogResource };
+
+ const defaultProps = {
+ openIssuesCount: resourceAdditionalData.openIssuesCount,
+ openMergeRequestsCount: resourceAdditionalData.openMergeRequestsCount,
+ isLoadingDetails: false,
+ isLoadingSharedData: false,
+ resource,
+ };
+
+ const findAboutComponent = () => wrapper.findComponent(CiResourceAbout);
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+ const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+ const findVersionBadge = () => wrapper.findComponent(GlBadge);
+ const findPipelineStatusBadge = () => wrapper.findComponent(CiBadgeLink);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(CiResourceHeader, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the project name and description', () => {
+ expect(wrapper.html()).toContain(resource.name);
+ expect(wrapper.html()).toContain(resource.description);
+ });
+
+ it('renders the namespace and project path', () => {
+ expect(wrapper.html()).toContain(resource.rootNamespace.fullPath);
+ expect(wrapper.html()).toContain(resource.rootNamespace.name);
+ });
+
+ it('renders the avatar', () => {
+ const { id, name } = resource;
+
+ expect(findAvatar().exists()).toBe(true);
+ expect(findAvatarLink().exists()).toBe(true);
+ expect(findAvatar().props()).toMatchObject({
+ entityId: getIdFromGraphQLId(id),
+ entityName: name,
+ });
+ });
+
+ it('renders the catalog about section and passes props', () => {
+ expect(findAboutComponent().exists()).toBe(true);
+ expect(findAboutComponent().props()).toEqual({
+ isLoadingDetails: false,
+ isLoadingSharedData: false,
+ openIssuesCount: defaultProps.openIssuesCount,
+ openMergeRequestsCount: defaultProps.openMergeRequestsCount,
+ latestVersion: resource.latestVersion,
+ webPath: resource.webPath,
+ });
+ });
+ });
+
+ describe('Version badge', () => {
+ describe('without a version', () => {
+ beforeEach(() => {
+ createComponent({ props: { resource: { ...resource, latestVersion: null } } });
+ });
+
+ it('does not render', () => {
+ expect(findVersionBadge().exists()).toBe(false);
+ });
+ });
+
+ describe('with a version', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders', () => {
+ expect(findVersionBadge().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when the project has a release', () => {
+ const pipelineStatus = {
+ detailsPath: 'path/to/pipeline',
+ icon: 'status_success',
+ text: 'passed',
+ group: 'success',
+ };
+
+ describe.each`
+ hasPipelineBadge | describeText | testText | status
+ ${true} | ${'is'} | ${'renders'} | ${pipelineStatus}
+ ${false} | ${'is not'} | ${'does not render'} | ${{}}
+ `('and there $describeText a pipeline', ({ hasPipelineBadge, testText, status }) => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ pipelineStatus: status,
+ latestVersion: { tagName: '1.0.0', tagPath: 'path/to/release' },
+ },
+ });
+ });
+
+ it('renders the version badge', () => {
+ expect(findVersionBadge().exists()).toBe(true);
+ });
+
+ it(`${testText} the pipeline status badge`, () => {
+ expect(findPipelineStatusBadge().exists()).toBe(hasPipelineBadge);
+ if (hasPipelineBadge) {
+ expect(findPipelineStatusBadge().props()).toEqual({
+ showText: true,
+ size: 'sm',
+ status: pipelineStatus,
+ showTooltip: true,
+ useLink: true,
+ });
+ }
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js
new file mode 100644
index 00000000000..0dadac236a8
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_readme_spec.js
@@ -0,0 +1,96 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiResourceReadme from '~/ci/catalog/components/details/ci_resource_readme.vue';
+import getCiCatalogResourceReadme from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_readme.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+
+const readmeHtml = '<h1>This is a readme file</h1>';
+const resourceId = 'gid://gitlab/Ci::Catalog::Resource/1';
+
+describe('CiResourceReadme', () => {
+ let wrapper;
+ let mockReadmeResponse;
+
+ const readmeMockData = {
+ data: {
+ ciCatalogResource: {
+ id: resourceId,
+ readmeHtml,
+ },
+ },
+ };
+
+ const defaultProps = { resourceId };
+
+ const createComponent = ({ props = {} } = {}) => {
+ const handlers = [[getCiCatalogResourceReadme, mockReadmeResponse]];
+
+ wrapper = shallowMountExtended(CiResourceReadme, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ apolloProvider: createMockApollo(handlers),
+ });
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ beforeEach(() => {
+ mockReadmeResponse = jest.fn();
+ });
+
+ describe('when loading', () => {
+ beforeEach(() => {
+ mockReadmeResponse.mockResolvedValue(readmeMockData);
+ createComponent();
+ });
+
+ it('renders only a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(wrapper.html()).not.toContain(readmeHtml);
+ });
+ });
+
+ describe('when mounted', () => {
+ beforeEach(async () => {
+ mockReadmeResponse.mockResolvedValue(readmeMockData);
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('renders only the received HTML', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(wrapper.html()).toContain(readmeHtml);
+ });
+
+ it('does not render an error', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when there is an error loading the readme', () => {
+ beforeEach(async () => {
+ mockReadmeResponse.mockRejectedValue({ errors: [] });
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('calls the createAlert function to show an error', () => {
+ expect(createAlert).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: "There was a problem loading this project's readme content.",
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/catalog_header_spec.js b/spec/frontend/ci/catalog/components/list/catalog_header_spec.js
new file mode 100644
index 00000000000..912fd9e1a93
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/catalog_header_spec.js
@@ -0,0 +1,86 @@
+import { GlBanner, GlButton } from '@gitlab/ui';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CatalogHeader from '~/ci/catalog/components/list/catalog_header.vue';
+import { CATALOG_FEEDBACK_DISMISSED_KEY } from '~/ci/catalog/constants';
+
+describe('CatalogHeader', () => {
+ useLocalStorageSpy();
+
+ let wrapper;
+
+ const defaultProps = {};
+ const defaultProvide = {
+ pageTitle: 'Catalog page',
+ pageDescription: 'This is a nice catalog page',
+ };
+
+ const findBanner = () => wrapper.findComponent(GlBanner);
+ const findFeedbackButton = () => findBanner().findComponent(GlButton);
+ const findTitle = () => wrapper.findByText(defaultProvide.pageTitle);
+ const findDescription = () => wrapper.findByText(defaultProvide.pageDescription);
+
+ const createComponent = ({ props = {}, stubs = {} } = {}) => {
+ wrapper = shallowMountExtended(CatalogHeader, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: defaultProvide,
+ stubs: {
+ ...stubs,
+ },
+ });
+ };
+
+ it('renders the Catalog title and description', () => {
+ createComponent();
+
+ expect(findTitle().exists()).toBe(true);
+ expect(findDescription().exists()).toBe(true);
+ });
+
+ describe('Feedback banner', () => {
+ describe('when user has never dismissed', () => {
+ beforeEach(() => {
+ createComponent({ stubs: { GlBanner } });
+ });
+
+ it('is visible', () => {
+ expect(findBanner().exists()).toBe(true);
+ });
+
+ it('has link to feedback issue', () => {
+ expect(findFeedbackButton().attributes().href).toBe(
+ 'https://gitlab.com/gitlab-org/gitlab/-/issues/407556',
+ );
+ });
+ });
+
+ describe('when user dismisses it', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('sets the local storage and removes the banner', async () => {
+ expect(findBanner().exists()).toBe(true);
+
+ await findBanner().vm.$emit('close');
+
+ expect(localStorage.setItem).toHaveBeenCalledWith(CATALOG_FEEDBACK_DISMISSED_KEY, 'true');
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+
+ describe('when user has dismissed it before', () => {
+ beforeEach(() => {
+ localStorage.setItem(CATALOG_FEEDBACK_DISMISSED_KEY, 'true');
+ createComponent();
+ });
+
+ it('does not show the banner', () => {
+ expect(findBanner().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/catalog_list_skeleton_loader_spec.js b/spec/frontend/ci/catalog/components/list/catalog_list_skeleton_loader_spec.js
new file mode 100644
index 00000000000..d21fd56eb2e
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/catalog_list_skeleton_loader_spec.js
@@ -0,0 +1,22 @@
+import { shallowMount } from '@vue/test-utils';
+import CatalogListSkeletonLoader from '~/ci/catalog/components/list/catalog_list_skeleton_loader.vue';
+
+describe('CatalogListSkeletonLoader', () => {
+ let wrapper;
+
+ const findSkeletonLoader = () => wrapper.findComponent(CatalogListSkeletonLoader);
+
+ const createComponent = () => {
+ wrapper = shallowMount(CatalogListSkeletonLoader, {});
+ };
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders skeleton item', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
new file mode 100644
index 00000000000..7f446064366
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
@@ -0,0 +1,198 @@
+import Vue from 'vue';
+import VueRouter from 'vue-router';
+import { GlAvatar, GlBadge, GlButton, GlSprintf } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createRouter } from '~/ci/catalog/router/index';
+import CiResourcesListItem from '~/ci/catalog/components/list/ci_resources_list_item.vue';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { CI_RESOURCE_DETAILS_PAGE_NAME } from '~/ci/catalog/router/constants';
+import { catalogSinglePageResponse } from '../../mock';
+
+Vue.use(VueRouter);
+
+let router;
+let routerPush;
+
+describe('CiResourcesListItem', () => {
+ let wrapper;
+
+ const resource = catalogSinglePageResponse.data.ciCatalogResources.nodes[0];
+ const release = {
+ author: { name: 'author', webUrl: '/user/1' },
+ releasedAt: Date.now(),
+ tagName: '1.0.0',
+ };
+ const defaultProps = {
+ resource,
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(CiResourcesListItem, {
+ router,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ RouterLink: true,
+ RouterView: true,
+ },
+ });
+ };
+
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findResourceName = () => wrapper.findComponent(GlButton);
+ const findResourceDescription = () => wrapper.findByText(defaultProps.resource.description);
+ const findUserLink = () => wrapper.findByTestId('user-link');
+ const findTimeAgoMessage = () => wrapper.findComponent(GlSprintf);
+ const findFavorites = () => wrapper.findByTestId('stats-favorites');
+ const findForks = () => wrapper.findByTestId('stats-forks');
+
+ beforeEach(() => {
+ router = createRouter();
+ routerPush = jest.spyOn(router, 'push').mockImplementation(() => {});
+ });
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the resource avatar and passes the right props', () => {
+ const { icon, id, name } = defaultProps.resource;
+
+ expect(findAvatar().exists()).toBe(true);
+ expect(findAvatar().props()).toMatchObject({
+ entityId: getIdFromGraphQLId(id),
+ entityName: name,
+ src: icon,
+ });
+ });
+
+ it('renders the resource name button', () => {
+ expect(findResourceName().exists()).toBe(true);
+ });
+
+ it('renders the resource version badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ });
+
+ it('renders the resource description', () => {
+ expect(findResourceDescription().exists()).toBe(true);
+ });
+
+ describe('release time', () => {
+ describe('when there is no release data', () => {
+ beforeEach(() => {
+ createComponent({ props: { resource: { ...resource, latestVersion: null } } });
+ });
+
+ it('does not render the release', () => {
+ expect(findTimeAgoMessage().exists()).toBe(false);
+ });
+
+ it('renders the generic `unreleased` badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe('Unreleased');
+ });
+ });
+
+ describe('when there is release data', () => {
+ beforeEach(() => {
+ createComponent({ props: { resource: { ...resource, latestVersion: { ...release } } } });
+ });
+
+ it('renders the user link', () => {
+ expect(findUserLink().exists()).toBe(true);
+ expect(findUserLink().attributes('href')).toBe(release.author.webUrl);
+ });
+
+ it('renders the time since the resource was released', () => {
+ expect(findTimeAgoMessage().exists()).toBe(true);
+ });
+
+ it('renders the version badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe(release.tagName);
+ });
+ });
+ });
+ });
+
+ describe('when clicking on an item title', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findResourceName().vm.$emit('click');
+ });
+
+ it('navigates to the details page', () => {
+ expect(routerPush).toHaveBeenCalledWith({
+ name: CI_RESOURCE_DETAILS_PAGE_NAME,
+ params: {
+ id: getIdFromGraphQLId(resource.id),
+ },
+ });
+ });
+ });
+
+ describe('when clicking on an item avatar', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findAvatar().vm.$emit('click');
+ });
+
+ it('navigates to the details page', () => {
+ expect(routerPush).toHaveBeenCalledWith({
+ name: CI_RESOURCE_DETAILS_PAGE_NAME,
+ params: {
+ id: getIdFromGraphQLId(resource.id),
+ },
+ });
+ });
+ });
+
+ describe('statistics', () => {
+ describe('when there are no statistics', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ resource: {
+ forksCount: 0,
+ starCount: 0,
+ },
+ },
+ });
+ });
+
+ it('render favorites as 0', () => {
+ expect(findFavorites().exists()).toBe(true);
+ expect(findFavorites().text()).toBe('0');
+ });
+
+ it('render forks as 0', () => {
+ expect(findForks().exists()).toBe(true);
+ expect(findForks().text()).toBe('0');
+ });
+ });
+
+ describe('where there are statistics', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('render favorites', () => {
+ expect(findFavorites().exists()).toBe(true);
+ expect(findFavorites().text()).toBe(String(defaultProps.resource.starCount));
+ });
+
+ it('render forks', () => {
+ expect(findForks().exists()).toBe(true);
+ expect(findForks().text()).toBe(String(defaultProps.resource.forksCount));
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js b/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js
new file mode 100644
index 00000000000..aca20a83979
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js
@@ -0,0 +1,143 @@
+import { GlKeysetPagination } from '@gitlab/ui';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiResourcesList from '~/ci/catalog/components/list/ci_resources_list.vue';
+import CiResourcesListItem from '~/ci/catalog/components/list/ci_resources_list_item.vue';
+import { ciCatalogResourcesItemsCount } from '~/ci/catalog/graphql/settings';
+import { catalogResponseBody, catalogSinglePageResponse } from '../../mock';
+
+describe('CiResourcesList', () => {
+ let wrapper;
+
+ const createComponent = ({ props = {} } = {}) => {
+ const { nodes, pageInfo, count } = catalogResponseBody.data.ciCatalogResources;
+
+ const defaultProps = {
+ currentPage: 1,
+ resources: nodes,
+ pageInfo,
+ totalCount: count,
+ };
+
+ wrapper = shallowMountExtended(CiResourcesList, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ GlKeysetPagination,
+ },
+ });
+ };
+
+ const findPageCount = () => wrapper.findByTestId('pageCount');
+ const findResourcesListItems = () => wrapper.findAllComponents(CiResourcesListItem);
+ const findPrevBtn = () => wrapper.findByTestId('prevButton');
+ const findNextBtn = () => wrapper.findByTestId('nextButton');
+
+ describe('contains only one page', () => {
+ const { nodes, pageInfo, count } = catalogSinglePageResponse.data.ciCatalogResources;
+
+ beforeEach(async () => {
+ await createComponent({
+ props: { currentPage: 1, resources: nodes, pageInfo, totalCount: count },
+ });
+ });
+
+ it('shows the right number of items', () => {
+ expect(findResourcesListItems()).toHaveLength(nodes.length);
+ });
+
+ it('hides the keyset control for previous page', () => {
+ expect(findPrevBtn().exists()).toBe(false);
+ });
+
+ it('hides the keyset control for next page', () => {
+ expect(findNextBtn().exists()).toBe(false);
+ });
+
+ it('shows the correct count of current page', () => {
+ expect(findPageCount().text()).toContain('1 of 1');
+ });
+ });
+
+ describe.each`
+ hasPreviousPage | hasNextPage | pageText | expectedTotal | currentPage
+ ${false} | ${true} | ${'1 of 3'} | ${ciCatalogResourcesItemsCount} | ${1}
+ ${true} | ${true} | ${'2 of 3'} | ${ciCatalogResourcesItemsCount} | ${2}
+ ${true} | ${false} | ${'3 of 3'} | ${ciCatalogResourcesItemsCount} | ${3}
+ `(
+ 'when on page $pageText',
+ ({ currentPage, expectedTotal, pageText, hasPreviousPage, hasNextPage }) => {
+ const { nodes, pageInfo, count } = catalogResponseBody.data.ciCatalogResources;
+
+ const previousPageState = hasPreviousPage ? 'enabled' : 'disabled';
+ const nextPageState = hasNextPage ? 'enabled' : 'disabled';
+
+ beforeEach(async () => {
+ await createComponent({
+ props: {
+ currentPage,
+ resources: nodes,
+ pageInfo: { ...pageInfo, hasPreviousPage, hasNextPage },
+ totalCount: count,
+ },
+ });
+ });
+
+ it('shows the right number of items', () => {
+ expect(findResourcesListItems()).toHaveLength(expectedTotal);
+ });
+
+ it(`shows the keyset control for previous page as ${previousPageState}`, () => {
+ const disableAttr = findPrevBtn().attributes('disabled');
+
+ if (previousPageState === 'disabled') {
+ expect(disableAttr).toBeDefined();
+ } else {
+ expect(disableAttr).toBeUndefined();
+ }
+ });
+
+ it(`shows the keyset control for next page as ${nextPageState}`, () => {
+ const disableAttr = findNextBtn().attributes('disabled');
+
+ if (nextPageState === 'disabled') {
+ expect(disableAttr).toBeDefined();
+ } else {
+ expect(disableAttr).toBeUndefined();
+ }
+ });
+
+ it('shows the correct count of current page', () => {
+ expect(findPageCount().text()).toContain(pageText);
+ });
+ },
+ );
+
+ describe('when there is an error getting the page count', () => {
+ beforeEach(() => {
+ createComponent({ props: { totalCount: 0 } });
+ });
+
+ it('hides the page count', () => {
+ expect(findPageCount().exists()).toBe(false);
+ });
+ });
+
+ describe('emitted events', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it.each`
+ btnText | elFinder | eventName
+ ${'previous'} | ${findPrevBtn} | ${'onPrevPage'}
+ ${'next'} | ${findNextBtn} | ${'onNextPage'}
+ `('emits $eventName when clicking on the $btnText button', async ({ elFinder, eventName }) => {
+ await elFinder().vm.$emit('click');
+
+ expect(wrapper.emitted(eventName)).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/empty_state_spec.js b/spec/frontend/ci/catalog/components/list/empty_state_spec.js
new file mode 100644
index 00000000000..f589ad96a9d
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/empty_state_spec.js
@@ -0,0 +1,27 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import EmptyState from '~/ci/catalog/components/list/empty_state.vue';
+
+describe('EmptyState', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(EmptyState, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js b/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js
new file mode 100644
index 00000000000..40f243ed891
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/pages/ci_resource_details_page_spec.js
@@ -0,0 +1,186 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import VueRouter from 'vue-router';
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { CI_CATALOG_RESOURCE_TYPE, cacheConfig } from '~/ci/catalog/graphql/settings';
+
+import getCiCatalogResourceSharedData from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_shared_data.query.graphql';
+import getCiCatalogResourceDetails from '~/ci/catalog/graphql/queries/get_ci_catalog_resource_details.query.graphql';
+
+import CiResourceDetails from '~/ci/catalog/components/details/ci_resource_details.vue';
+import CiResourceDetailsPage from '~/ci/catalog/components/pages/ci_resource_details_page.vue';
+import CiResourceHeader from '~/ci/catalog/components/details/ci_resource_header.vue';
+import CiResourceHeaderSkeletonLoader from '~/ci/catalog/components/details/ci_resource_header_skeleton_loader.vue';
+
+import { createRouter } from '~/ci/catalog/router/index';
+import { CI_RESOURCE_DETAILS_PAGE_NAME } from '~/ci/catalog/router/constants';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
+import { catalogSharedDataMock, catalogAdditionalDetailsMock } from '../../mock';
+
+Vue.use(VueApollo);
+Vue.use(VueRouter);
+
+let router;
+
+const defaultSharedData = { ...catalogSharedDataMock.data.ciCatalogResource };
+const defaultAdditionalData = { ...catalogAdditionalDetailsMock.data.ciCatalogResource };
+
+describe('CiResourceDetailsPage', () => {
+ let wrapper;
+ let sharedDataResponse;
+ let additionalDataResponse;
+
+ const defaultProps = {};
+
+ const defaultProvide = {
+ ciCatalogPath: '/ci/catalog/resources',
+ };
+
+ const findDetailsComponent = () => wrapper.findComponent(CiResourceDetails);
+ const findHeaderComponent = () => wrapper.findComponent(CiResourceHeader);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findHeaderSkeletonLoader = () => wrapper.findComponent(CiResourceHeaderSkeletonLoader);
+
+ const createComponent = ({ props = {} } = {}) => {
+ const handlers = [
+ [getCiCatalogResourceSharedData, sharedDataResponse],
+ [getCiCatalogResourceDetails, additionalDataResponse],
+ ];
+
+ const mockApollo = createMockApollo(handlers, undefined, cacheConfig);
+
+ wrapper = shallowMount(CiResourceDetailsPage, {
+ router,
+ apolloProvider: mockApollo,
+ provide: {
+ ...defaultProvide,
+ },
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ RouterView: true,
+ },
+ });
+ };
+
+ beforeEach(async () => {
+ sharedDataResponse = jest.fn();
+ additionalDataResponse = jest.fn();
+
+ router = createRouter();
+ await router.push({
+ name: CI_RESOURCE_DETAILS_PAGE_NAME,
+ params: { id: defaultSharedData.id },
+ });
+ });
+
+ describe('when the app is loading', () => {
+ describe('and shared data is pre-fetched', () => {
+ beforeEach(() => {
+ // By mocking a return value and not a promise, we skip the loading
+ // to simulate having the pre-fetched query
+ sharedDataResponse.mockReturnValueOnce(catalogSharedDataMock);
+ additionalDataResponse.mockResolvedValue(catalogAdditionalDetailsMock);
+ createComponent();
+ });
+
+ it('renders the header skeleton loader', () => {
+ expect(findHeaderSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('passes down the loading state to the header component', () => {
+ sharedDataResponse.mockReturnValueOnce(catalogSharedDataMock);
+
+ expect(findHeaderComponent().props()).toMatchObject({
+ isLoadingDetails: true,
+ isLoadingSharedData: false,
+ });
+ });
+ });
+
+ describe('and shared data is not pre-fetched', () => {
+ beforeEach(() => {
+ sharedDataResponse.mockResolvedValue(catalogSharedDataMock);
+ additionalDataResponse.mockResolvedValue(catalogAdditionalDetailsMock);
+ createComponent();
+ });
+
+ it('does not render the header skeleton', () => {
+ expect(findHeaderSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('passes all loading state to the header component as true', () => {
+ expect(findHeaderComponent().props()).toMatchObject({
+ isLoadingDetails: true,
+ isLoadingSharedData: true,
+ });
+ });
+ });
+ });
+
+ describe('and there are no resources', () => {
+ beforeEach(async () => {
+ const mockError = new Error('error');
+ sharedDataResponse.mockRejectedValue(mockError);
+ additionalDataResponse.mockRejectedValue(mockError);
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('renders the empty state', () => {
+ expect(findDetailsComponent().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props('primaryButtonLink')).toBe(defaultProvide.ciCatalogPath);
+ });
+ });
+
+ describe('when data has loaded', () => {
+ beforeEach(async () => {
+ sharedDataResponse.mockResolvedValue(catalogSharedDataMock);
+ additionalDataResponse.mockResolvedValue(catalogAdditionalDetailsMock);
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('does not render the header skeleton loader', () => {
+ expect(findHeaderSkeletonLoader().exists()).toBe(false);
+ });
+
+ describe('Catalog header', () => {
+ it('exists', () => {
+ expect(findHeaderComponent().exists()).toBe(true);
+ });
+
+ it('passes expected props', () => {
+ expect(findHeaderComponent().props()).toEqual({
+ isLoadingDetails: false,
+ isLoadingSharedData: false,
+ openIssuesCount: defaultAdditionalData.openIssuesCount,
+ openMergeRequestsCount: defaultAdditionalData.openMergeRequestsCount,
+ pipelineStatus:
+ defaultAdditionalData.versions.nodes[0].commit.pipelines.nodes[0].detailedStatus,
+ resource: defaultSharedData,
+ });
+ });
+ });
+
+ describe('Catalog details', () => {
+ it('exists', () => {
+ expect(findDetailsComponent().exists()).toBe(true);
+ });
+
+ it('passes expected props', () => {
+ expect(findDetailsComponent().props()).toEqual({
+ resourceId: convertToGraphQLId(CI_CATALOG_RESOURCE_TYPE, defaultAdditionalData.id),
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/mock.js b/spec/frontend/ci/catalog/mock.js
new file mode 100644
index 00000000000..21fed6ac8ec
--- /dev/null
+++ b/spec/frontend/ci/catalog/mock.js
@@ -0,0 +1,546 @@
+import { componentsMockData } from '~/ci/catalog/constants';
+
+export const catalogResponseBody = {
+ data: {
+ ciCatalogResources: {
+ pageInfo: {
+ startCursor:
+ 'eyJjcmVhdGVkX2F0IjoiMjAxNS0wNy0wMyAxMDowMDowMC4wMDAwMDAwMDAgKzAwMDAiLCJpZCI6IjEyOSJ9',
+ endCursor:
+ 'eyJjcmVhdGVkX2F0IjoiMjAxNS0wNy0wMyAxMDowMDowMC4wMDAwMDAwMDAgKzAwMDAiLCJpZCI6IjExMCJ9',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ __typename: 'PageInfo',
+ },
+ count: 41,
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/129',
+ icon: null,
+ name: 'Project-42 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-42',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/128',
+ icon: null,
+ name: 'Project-41 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-41',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/127',
+ icon: null,
+ name: 'Project-40 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-40',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/126',
+ icon: null,
+ name: 'Project-39 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-39',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/125',
+ icon: null,
+ name: 'Project-38 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-38',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/124',
+ icon: null,
+ name: 'Project-37 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-37',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/123',
+ icon: null,
+ name: 'Project-36 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-36',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/122',
+ icon: null,
+ name: 'Project-35 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-35',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/121',
+ icon: null,
+ name: 'Project-34 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-34',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/120',
+ icon: null,
+ name: 'Project-33 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-33',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/119',
+ icon: null,
+ name: 'Project-32 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-32',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/118',
+ icon: null,
+ name: 'Project-31 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-31',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/117',
+ icon: null,
+ name: 'Project-30 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-30',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/116',
+ icon: null,
+ name: 'Project-29 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-29',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/115',
+ icon: null,
+ name: 'Project-28 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-28',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/114',
+ icon: null,
+ name: 'Project-27 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-27',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/113',
+ icon: null,
+ name: 'Project-26 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-26',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/112',
+ icon: null,
+ name: 'Project-25 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-25',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/111',
+ icon: null,
+ name: 'Project-24 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-24',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/110',
+ icon: null,
+ name: 'Project-23 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-23',
+ __typename: 'CiCatalogResource',
+ },
+ ],
+ __typename: 'CiCatalogResourceConnection',
+ },
+ },
+};
+
+export const catalogSinglePageResponse = {
+ data: {
+ ciCatalogResources: {
+ pageInfo: {
+ startCursor:
+ 'eyJjcmVhdGVkX2F0IjoiMjAxNS0wNy0wMyAxMDowMDowMC4wMDAwMDAwMDAgKzAwMDAiLCJpZCI6IjEzMiJ9',
+ endCursor:
+ 'eyJjcmVhdGVkX2F0IjoiMjAxNS0wNy0wMyAxMDowMDowMC4wMDAwMDAwMDAgKzAwMDAiLCJpZCI6IjEzMCJ9',
+ hasNextPage: false,
+ hasPreviousPage: false,
+ __typename: 'PageInfo',
+ },
+ count: 3,
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/132',
+ icon: null,
+ name: 'Project-45 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-45',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/131',
+ icon: null,
+ name: 'Project-44 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-44',
+ __typename: 'CiCatalogResource',
+ },
+ {
+ id: 'gid://gitlab/Ci::Catalog::Resource/130',
+ icon: null,
+ name: 'Project-43 Name',
+ description: 'A simple component',
+ starCount: 0,
+ forksCount: 0,
+ latestVersion: null,
+ rootNamespace: {
+ id: 'gid://gitlab/Group/185',
+ fullPath: 'frontend-fixtures',
+ name: 'frontend-fixtures',
+ __typename: 'Namespace',
+ },
+ webPath: '/frontend-fixtures/project-43',
+ __typename: 'CiCatalogResource',
+ },
+ ],
+ __typename: 'CiCatalogResourceConnection',
+ },
+ },
+};
+
+export const catalogSharedDataMock = {
+ data: {
+ ciCatalogResource: {
+ __typename: 'CiCatalogResource',
+ id: `gid://gitlab/CiCatalogResource/1`,
+ icon: null,
+ description: 'This is the description of the repo',
+ name: 'Ruby',
+ rootNamespace: { id: 1, fullPath: '/group/project', name: 'my-dumb-project' },
+ starCount: 1,
+ forksCount: 2,
+ latestVersion: {
+ __typename: 'Release',
+ id: '3',
+ tagName: '1.0.0',
+ tagPath: 'path/to/release',
+ releasedAt: Date.now(),
+ author: { id: 1, webUrl: 'profile/1', name: 'username' },
+ },
+ webPath: 'path/to/project',
+ },
+ },
+};
+
+export const catalogAdditionalDetailsMock = {
+ data: {
+ ciCatalogResource: {
+ __typename: 'CiCatalogResource',
+ id: `gid://gitlab/CiCatalogResource/1`,
+ openIssuesCount: 4,
+ openMergeRequestsCount: 10,
+ readmeHtml: '<h1>Hello world</h1>',
+ versions: {
+ __typename: 'ReleaseConnection',
+ nodes: [
+ {
+ __typename: 'Release',
+ id: 'gid://gitlab/Release/3',
+ commit: {
+ __typename: 'Commit',
+ id: 'gid://gitlab/CommitPresenter/afa936495f20e08c26ed4a67130ee2166f94fa6e',
+ pipelines: {
+ __typename: 'PipelineConnection',
+ nodes: [
+ {
+ __typename: 'Pipeline',
+ id: 'gid://gitlab/Ci::Pipeline/583',
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-583-583',
+ detailsPath: '/root/cicd-circular/-/pipelines/583',
+ icon: 'status_success',
+ text: 'passed',
+ group: 'success',
+ },
+ },
+ ],
+ },
+ },
+ tagName: 'v1.0.2',
+ releasedAt: '2022-08-23T17:19:09Z',
+ },
+ ],
+ },
+ },
+ },
+};
+
+const generateResourcesNodes = (count = 20, startId = 0) => {
+ const nodes = [];
+ for (let i = startId; i < startId + count; i += 1) {
+ nodes.push({
+ __typename: 'CiCatalogResource',
+ id: `gid://gitlab/CiCatalogResource/${i}`,
+ description: `This is a component that does a bunch of stuff and is really just a number: ${i}`,
+ forksCount: 5,
+ icon: 'my-icon',
+ name: `My component #${i}`,
+ rootNamespace: {
+ id: 1,
+ __typename: 'Namespace',
+ name: 'namespaceName',
+ path: 'namespacePath',
+ },
+ starCount: 10,
+ latestVersion: {
+ __typename: 'Release',
+ id: '3',
+ tagName: '1.0.0',
+ tagPath: 'path/to/release',
+ releasedAt: Date.now(),
+ author: { id: 1, webUrl: 'profile/1', name: 'username' },
+ },
+ webPath: 'path/to/project',
+ });
+ }
+
+ return nodes;
+};
+
+export const mockCatalogResourceItem = generateResourcesNodes(1)[0];
+
+export const mockComponents = {
+ data: {
+ ciCatalogResource: {
+ __typename: 'CiCatalogResource',
+ id: `gid://gitlab/CiCatalogResource/1`,
+ components: {
+ ...componentsMockData,
+ },
+ },
+ },
+};
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
index 64227872af3..353b5fd3c47 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
@@ -1,10 +1,4 @@
-import {
- GlListboxItem,
- GlCollapsibleListbox,
- GlDropdownDivider,
- GlDropdownItem,
- GlIcon,
-} from '@gitlab/ui';
+import { GlListboxItem, GlCollapsibleListbox, GlDropdownDivider, GlIcon } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { allEnvironments, ENVIRONMENT_QUERY_LIMIT } from '~/ci/ci_variable_list/constants';
import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
@@ -16,7 +10,6 @@ describe('Ci environments dropdown', () => {
const defaultProps = {
areEnvironmentsLoading: false,
environments: envs,
- hasEnvScopeQuery: false,
selectedEnvironmentScope: '',
};
@@ -25,7 +18,7 @@ describe('Ci environments dropdown', () => {
const findActiveIconByIndex = (index) => findListboxItemByIndex(index).findComponent(GlIcon);
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findListboxText = () => findListbox().props('toggleText');
- const findCreateWildcardButton = () => wrapper.findComponent(GlDropdownItem);
+ const findCreateWildcardButton = () => wrapper.findByTestId('create-wildcard-button');
const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
const findMaxEnvNote = () => wrapper.findByTestId('max-envs-notice');
@@ -57,32 +50,23 @@ describe('Ci environments dropdown', () => {
});
describe('Search term is empty', () => {
- describe.each`
- hasEnvScopeQuery | status | defaultEnvStatus | firstItemValue | envIndices
- ${true} | ${'exists'} | ${'prepends'} | ${'*'} | ${[1, 2, 3]}
- ${false} | ${'does not exist'} | ${'does not prepend'} | ${envs[0]} | ${[0, 1, 2]}
- `(
- 'when query for fetching environment scope $status',
- ({ defaultEnvStatus, firstItemValue, hasEnvScopeQuery, envIndices }) => {
- beforeEach(() => {
- createComponent({ props: { environments: envs, hasEnvScopeQuery } });
- });
-
- it(`${defaultEnvStatus} * in listbox`, () => {
- expect(findListboxItemByIndex(0).text()).toBe(firstItemValue);
- });
-
- it('renders all environments', () => {
- expect(findListboxItemByIndex(envIndices[0]).text()).toBe(envs[0]);
- expect(findListboxItemByIndex(envIndices[1]).text()).toBe(envs[1]);
- expect(findListboxItemByIndex(envIndices[2]).text()).toBe(envs[2]);
- });
-
- it('does not display active checkmark', () => {
- expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
- });
- },
- );
+ beforeEach(() => {
+ createComponent({ props: { environments: envs } });
+ });
+
+ it(`prepends * in listbox`, () => {
+ expect(findListboxItemByIndex(0).text()).toBe('*');
+ });
+
+ it('renders all environments', () => {
+ expect(findListboxItemByIndex(1).text()).toBe(envs[0]);
+ expect(findListboxItemByIndex(2).text()).toBe(envs[1]);
+ expect(findListboxItemByIndex(3).text()).toBe(envs[2]);
+ });
+
+ it('does not display active checkmark', () => {
+ expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
+ });
});
describe('when `*` is the value of selectedEnvironmentScope props', () => {
@@ -98,40 +82,13 @@ describe('Ci environments dropdown', () => {
});
});
- describe('when environments are not fetched via graphql', () => {
+ describe('when fetching environments', () => {
const currentEnv = envs[2];
beforeEach(() => {
createComponent();
});
- it('filters on the frontend and renders only the environment searched for', async () => {
- await findListbox().vm.$emit('search', currentEnv);
-
- expect(findAllListboxItems()).toHaveLength(1);
- expect(findListboxItemByIndex(0).text()).toBe(currentEnv);
- });
-
- it('does not emit event when searching', async () => {
- expect(wrapper.emitted('search-environment-scope')).toBeUndefined();
-
- await findListbox().vm.$emit('search', currentEnv);
-
- expect(wrapper.emitted('search-environment-scope')).toBeUndefined();
- });
-
- it('does not display note about max environments shown', () => {
- expect(findMaxEnvNote().exists()).toBe(false);
- });
- });
-
- describe('when fetching environments via graphql', () => {
- const currentEnv = envs[2];
-
- beforeEach(() => {
- createComponent({ props: { hasEnvScopeQuery: true } });
- });
-
it('renders dropdown divider', () => {
expect(findDropdownDivider().exists()).toBe(true);
});
@@ -143,7 +100,7 @@ describe('Ci environments dropdown', () => {
});
it('renders dropdown loading icon while fetch query is loading', () => {
- createComponent({ props: { areEnvironmentsLoading: true, hasEnvScopeQuery: true } });
+ createComponent({ props: { areEnvironmentsLoading: true } });
expect(findListbox().props('loading')).toBe(true);
expect(findListbox().props('searching')).toBe(false);
@@ -151,7 +108,7 @@ describe('Ci environments dropdown', () => {
});
it('renders search loading icon while search query is loading and dropdown is open', async () => {
- createComponent({ props: { areEnvironmentsLoading: true, hasEnvScopeQuery: true } });
+ createComponent({ props: { areEnvironmentsLoading: true } });
await findListbox().vm.$emit('shown');
expect(findListbox().props('loading')).toBe(false);
@@ -188,16 +145,35 @@ describe('Ci environments dropdown', () => {
});
});
- describe('when creating a new environment from a search term', () => {
- const search = 'new-env';
+ describe('when creating a new environment scope from a search term', () => {
+ const searchTerm = 'new-env';
beforeEach(() => {
- createComponent({ searchTerm: search });
+ createComponent({ searchTerm });
});
- it('emits create-environment-scope', () => {
- findCreateWildcardButton().vm.$emit('click');
+ it('sets new environment scope as the selected environment scope', async () => {
+ findCreateWildcardButton().trigger('click');
+
+ await findListbox().vm.$emit('search', searchTerm);
+
+ expect(findListbox().props('selected')).toBe(searchTerm);
+ });
+
+ it('includes new environment scope in search if it matches search term', async () => {
+ findCreateWildcardButton().trigger('click');
+
+ await findListbox().vm.$emit('search', searchTerm);
+
+ expect(findAllListboxItems()).toHaveLength(envs.length + 1);
+ expect(findListboxItemByIndex(1).text()).toBe(searchTerm);
+ });
+
+ it('excludes new environment scope in search if it does not match the search term', async () => {
+ findCreateWildcardButton().trigger('click');
+
+ await findListbox().vm.$emit('search', 'not-new-env');
- expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]);
+ expect(findAllListboxItems()).toHaveLength(envs.length);
});
});
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
index ab5d914a6a1..207ea7aa060 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -1,4 +1,5 @@
-import { GlDrawer, GlFormCombobox, GlFormInput, GlFormSelect } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { GlDrawer, GlFormCombobox, GlFormInput, GlFormSelect, GlModal } from '@gitlab/ui';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
@@ -67,6 +68,8 @@ describe('CI Variable Drawer', () => {
};
const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-btn');
+ const findConfirmDeleteModal = () => wrapper.findComponent(GlModal);
+ const findDeleteBtn = () => wrapper.findByTestId('ci-variable-delete-btn');
const findDisabledEnvironmentScopeDropdown = () => wrapper.findComponent(GlFormInput);
const findDrawer = () => wrapper.findComponent(GlDrawer);
const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
@@ -363,22 +366,118 @@ describe('CI Variable Drawer', () => {
});
it('title and confirm button renders the correct text', () => {
- expect(findTitle().text()).toBe('Add Variable');
- expect(findConfirmBtn().text()).toBe('Add Variable');
+ expect(findTitle().text()).toBe('Add variable');
+ expect(findConfirmBtn().text()).toBe('Add variable');
+ });
+
+ it('does not render delete button', () => {
+ expect(findDeleteBtn().exists()).toBe(false);
+ });
+
+ it('dispatches the add-variable event', async () => {
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+ await findProtectedCheckbox().vm.$emit('input', false);
+ await findExpandedCheckbox().vm.$emit('input', true);
+ await findMaskedCheckbox().vm.$emit('input', true);
+ await findValueField().vm.$emit('input', 'NEW_VALUE');
+
+ findConfirmBtn().vm.$emit('click');
+
+ expect(wrapper.emitted('add-variable')).toEqual([
+ [
+ {
+ environmentScope: '*',
+ key: 'NEW_VARIABLE',
+ masked: true,
+ protected: false,
+ raw: false, // opposite of expanded
+ value: 'NEW_VALUE',
+ variableType: 'ENV_VAR',
+ },
+ ],
+ ]);
});
});
describe('when editing a variable', () => {
beforeEach(() => {
createComponent({
- props: { mode: EDIT_VARIABLE_ACTION },
+ props: { mode: EDIT_VARIABLE_ACTION, selectedVariable: mockProjectVariableFileType },
stubs: { GlDrawer },
});
});
it('title and confirm button renders the correct text', () => {
- expect(findTitle().text()).toBe('Edit Variable');
- expect(findConfirmBtn().text()).toBe('Edit Variable');
+ expect(findTitle().text()).toBe('Edit variable');
+ expect(findConfirmBtn().text()).toBe('Edit variable');
+ });
+
+ it('dispatches the edit-variable event', async () => {
+ await findValueField().vm.$emit('input', 'EDITED_VALUE');
+
+ findConfirmBtn().vm.$emit('click');
+
+ expect(wrapper.emitted('update-variable')).toEqual([
+ [
+ {
+ ...mockProjectVariableFileType,
+ value: 'EDITED_VALUE',
+ },
+ ],
+ ]);
+ });
+ });
+
+ describe('when deleting a variable', () => {
+ beforeEach(() => {
+ createComponent({
+ mountFn: mountExtended,
+ props: { mode: EDIT_VARIABLE_ACTION, selectedVariable: mockProjectVariableFileType },
+ });
+ });
+
+ it('bubbles up the delete-variable event', async () => {
+ findDeleteBtn().vm.$emit('click');
+
+ await nextTick();
+
+ findConfirmDeleteModal().vm.$emit('primary');
+
+ expect(wrapper.emitted('delete-variable')).toEqual([[mockProjectVariableFileType]]);
+ });
+ });
+
+ describe('environment scope events', () => {
+ beforeEach(() => {
+ createComponent({
+ mountFn: mountExtended,
+ props: {
+ mode: EDIT_VARIABLE_ACTION,
+ selectedVariable: mockProjectVariableFileType,
+ areScopedVariablesAvailable: true,
+ hideEnvironmentScope: false,
+ },
+ });
+ });
+
+ it('sets the environment scope', async () => {
+ await findEnvironmentScopeDropdown().vm.$emit('select-environment', 'staging');
+ await findConfirmBtn().vm.$emit('click');
+
+ expect(wrapper.emitted('update-variable')).toEqual([
+ [
+ {
+ ...mockProjectVariableFileType,
+ environmentScope: 'staging',
+ },
+ ],
+ ]);
+ });
+
+ it('bubbles up the search event', async () => {
+ await findEnvironmentScopeDropdown().vm.$emit('search-environment-scope', 'staging');
+
+ expect(wrapper.emitted('search-environment-scope')).toEqual([['staging']]);
});
});
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
index 7dce23f72c0..5ba9b3b8c20 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
@@ -12,12 +12,10 @@ import {
ENVIRONMENT_SCOPE_LINK_TITLE,
AWS_TIP_TITLE,
AWS_TIP_MESSAGE,
- groupString,
instanceString,
- projectString,
variableOptions,
} from '~/ci/ci_variable_list/constants';
-import { mockEnvs, mockVariablesWithScopes, mockVariablesWithUniqueScopes } from '../mocks';
+import { mockVariablesWithScopes } from '../mocks';
import ModalStub from '../stubs';
describe('Ci variable modal', () => {
@@ -46,7 +44,6 @@ describe('Ci variable modal', () => {
areScopedVariablesAvailable: true,
environments: [],
hideEnvironmentScope: false,
- hasEnvScopeQuery: false,
mode: ADD_VARIABLE_ACTION,
selectedVariable: {},
variables: [],
@@ -352,42 +349,6 @@ describe('Ci variable modal', () => {
expect(link.attributes('title')).toBe(ENVIRONMENT_SCOPE_LINK_TITLE);
expect(link.attributes('href')).toBe(defaultProvide.environmentScopeLink);
});
-
- describe('when query for envioronment scope exists', () => {
- beforeEach(() => {
- createComponent({
- props: {
- environments: mockEnvs,
- hasEnvScopeQuery: true,
- variables: mockVariablesWithUniqueScopes(projectString),
- },
- });
- });
-
- it('does not merge environment scope sources', () => {
- const expectedLength = mockEnvs.length;
-
- expect(findCiEnvironmentsDropdown().props('environments')).toHaveLength(expectedLength);
- });
- });
-
- describe('when feature flag is disabled', () => {
- const mockGroupVariables = mockVariablesWithUniqueScopes(groupString);
- beforeEach(() => {
- createComponent({
- props: {
- environments: mockEnvs,
- variables: mockGroupVariables,
- },
- });
- });
-
- it('merges environment scope sources', () => {
- const expectedLength = mockGroupVariables.length + mockEnvs.length;
-
- expect(findCiEnvironmentsDropdown().props('environments')).toHaveLength(expectedLength);
- });
- });
});
describe('and section is hidden', () => {
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
index 79dd638e2bd..04145c2c6aa 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
@@ -23,7 +23,6 @@ describe('Ci variable table', () => {
environments: mapEnvironmentNames(mockEnvs),
hideEnvironmentScope: false,
isLoading: false,
- hasEnvScopeQuery: false,
maxVariableLimit: 5,
pageInfo: { after: '' },
variables: mockVariablesWithScopes(projectString),
@@ -70,7 +69,6 @@ describe('Ci variable table', () => {
areEnvironmentsLoading: defaultProps.areEnvironmentsLoading,
areScopedVariablesAvailable: defaultProps.areScopedVariablesAvailable,
environments: defaultProps.environments,
- hasEnvScopeQuery: defaultProps.hasEnvScopeQuery,
hideEnvironmentScope: defaultProps.hideEnvironmentScope,
variables: defaultProps.variables,
mode: ADD_VARIABLE_ACTION,
@@ -142,7 +140,7 @@ describe('Ci variable table', () => {
});
});
- describe('variable events', () => {
+ describe('variable events for modal', () => {
beforeEach(() => {
createComponent();
});
@@ -161,6 +159,25 @@ describe('Ci variable table', () => {
});
});
+ describe('variable events for drawer', () => {
+ beforeEach(() => {
+ createComponent({ featureFlags: { ciVariableDrawer: true } });
+ });
+
+ it.each`
+ eventName
+ ${'add-variable'}
+ ${'update-variable'}
+ ${'delete-variable'}
+ `('bubbles up the $eventName event', async ({ eventName }) => {
+ await findCiVariableTable().vm.$emit('set-selected-variable');
+
+ await findCiVariableDrawer().vm.$emit(eventName, newVariable);
+
+ expect(wrapper.emitted(eventName)).toEqual([[newVariable]]);
+ });
+ });
+
describe('pages events', () => {
beforeEach(() => {
createComponent();
@@ -178,7 +195,7 @@ describe('Ci variable table', () => {
});
});
- describe('environment events', () => {
+ describe('environment events for modal', () => {
beforeEach(() => {
createComponent();
});
@@ -191,4 +208,18 @@ describe('Ci variable table', () => {
expect(wrapper.emitted('search-environment-scope')).toEqual([['staging']]);
});
});
+
+ describe('environment events for drawer', () => {
+ beforeEach(() => {
+ createComponent({ featureFlags: { ciVariableDrawer: true } });
+ });
+
+ it('bubbles up the search event', async () => {
+ await findCiVariableTable().vm.$emit('set-selected-variable');
+
+ await findCiVariableDrawer().vm.$emit('search-environment-scope', 'staging');
+
+ expect(wrapper.emitted('search-environment-scope')).toEqual([['staging']]);
+ });
+ });
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
index 6fa1915f3c1..c90ff4cc682 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
@@ -52,7 +52,6 @@ const mockProvide = {
const defaultProps = {
areScopedVariablesAvailable: true,
- hasEnvScopeQuery: false,
pageInfo: {},
hideEnvironmentScope: false,
refetchAfterMutation: false,
@@ -514,7 +513,6 @@ describe('Ci Variable Shared Component', () => {
areEnvironmentsLoading: false,
areScopedVariablesAvailable: wrapper.props().areScopedVariablesAvailable,
hideEnvironmentScope: defaultProps.hideEnvironmentScope,
- hasEnvScopeQuery: props.hasEnvScopeQuery,
pageInfo: defaultProps.pageInfo,
isLoading: false,
maxVariableLimit,
diff --git a/spec/frontend/ci/ci_variable_list/mocks.js b/spec/frontend/ci/ci_variable_list/mocks.js
index 41dfc0ebfda..9c9c99ad5ea 100644
--- a/spec/frontend/ci/ci_variable_list/mocks.js
+++ b/spec/frontend/ci/ci_variable_list/mocks.js
@@ -189,7 +189,6 @@ export const createProjectProps = () => {
componentName: 'ProjectVariable',
entity: 'project',
fullPath: '/namespace/project/',
- hasEnvScopeQuery: true,
id: 'gid://gitlab/Project/20',
mutationData: {
[ADD_MUTATION_ACTION]: addProjectVariable,
@@ -214,7 +213,6 @@ export const createGroupProps = () => {
componentName: 'GroupVariable',
entity: 'group',
fullPath: '/my-group',
- hasEnvScopeQuery: false,
id: 'gid://gitlab/Group/20',
mutationData: {
[ADD_MUTATION_ACTION]: addGroupVariable,
@@ -233,7 +231,6 @@ export const createGroupProps = () => {
export const createInstanceProps = () => {
return {
componentName: 'InstanceVariable',
- hasEnvScopeQuery: false,
entity: '',
mutationData: {
[ADD_MUTATION_ACTION]: addAdminVariable,
diff --git a/spec/frontend/ci/ci_variable_list/utils_spec.js b/spec/frontend/ci/ci_variable_list/utils_spec.js
index beeae71376a..fbcf0e7c5a5 100644
--- a/spec/frontend/ci/ci_variable_list/utils_spec.js
+++ b/spec/frontend/ci/ci_variable_list/utils_spec.js
@@ -1,58 +1,7 @@
-import {
- createJoinedEnvironments,
- convertEnvironmentScope,
- mapEnvironmentNames,
-} from '~/ci/ci_variable_list/utils';
+import { convertEnvironmentScope, mapEnvironmentNames } from '~/ci/ci_variable_list/utils';
import { allEnvironments } from '~/ci/ci_variable_list/constants';
describe('utils', () => {
- const environments = ['dev', 'prod'];
- const newEnvironments = ['staging'];
-
- describe('createJoinedEnvironments', () => {
- it('returns only `environments` if `variables` argument is undefined', () => {
- const variables = undefined;
-
- expect(createJoinedEnvironments(variables, environments, [])).toEqual(environments);
- });
-
- it('returns a list of environments and environment scopes taken from variables in alphabetical order', () => {
- const envScope1 = 'new1';
- const envScope2 = 'new2';
-
- const variables = [{ environmentScope: envScope1 }, { environmentScope: envScope2 }];
-
- expect(createJoinedEnvironments(variables, environments, [])).toEqual([
- environments[0],
- envScope1,
- envScope2,
- environments[1],
- ]);
- });
-
- it('returns combined list with new environments included', () => {
- const variables = undefined;
-
- expect(createJoinedEnvironments(variables, environments, newEnvironments)).toEqual([
- ...environments,
- ...newEnvironments,
- ]);
- });
-
- it('removes duplicate environments', () => {
- const envScope1 = environments[0];
- const envScope2 = 'new2';
-
- const variables = [{ environmentScope: envScope1 }, { environmentScope: envScope2 }];
-
- expect(createJoinedEnvironments(variables, environments, [])).toEqual([
- environments[0],
- envScope2,
- environments[1],
- ]);
- });
- });
-
describe('convertEnvironmentScope', () => {
it('converts the * to the `All environments` text', () => {
expect(convertEnvironmentScope('*')).toBe(allEnvironments.text);
diff --git a/spec/frontend/ci/common/pipelines_table_spec.js b/spec/frontend/ci/common/pipelines_table_spec.js
index 26dd1a2fcc5..6cf391d72ca 100644
--- a/spec/frontend/ci/common/pipelines_table_spec.js
+++ b/spec/frontend/ci/common/pipelines_table_spec.js
@@ -1,9 +1,7 @@
-import '~/commons';
import { GlTableLite } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import fixture from 'test_fixtures/pipelines/pipelines.json';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import PipelineFailedJobsWidget from '~/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget.vue';
import PipelineOperations from '~/ci/pipelines_page/components/pipeline_operations.vue';
@@ -12,7 +10,7 @@ import PipelineUrl from '~/ci/pipelines_page/components/pipeline_url.vue';
import PipelinesTable from '~/ci/common/pipelines_table.vue';
import PipelinesTimeago from '~/ci/pipelines_page/components/time_ago.vue';
import {
- PipelineKeyOptions,
+ PIPELINE_ID_KEY,
BUTTON_TOOLTIP_RETRY,
BUTTON_TOOLTIP_CANCEL,
TRACKING_CATEGORIES,
@@ -20,51 +18,43 @@ import {
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
-jest.mock('~/ci/event_hub');
-
describe('Pipelines Table', () => {
- let pipeline;
let wrapper;
let trackingSpy;
const defaultProvide = {
- glFeatures: {},
- withFailedJobsDetails: false,
+ fullPath: '/my-project/',
+ useFailedJobsWidget: false,
};
- const provideWithDetails = {
- glFeatures: {
- ciJobFailuresInMr: true,
- },
- withFailedJobsDetails: true,
+ const provideWithFailedJobsWidget = {
+ useFailedJobsWidget: true,
};
- const defaultProps = {
- pipelines: [],
- viewType: 'root',
- pipelineKeyOption: PipelineKeyOptions[0],
- };
+ const { pipelines } = fixture;
- const createMockPipeline = () => {
- // Clone fixture as it could be modified by tests
- const { pipelines } = JSON.parse(JSON.stringify(fixture));
- return pipelines.find((p) => p.user !== null && p.commit !== null);
+ const defaultProps = {
+ pipelines,
+ pipelineIdType: PIPELINE_ID_KEY,
};
- const createComponent = (props = {}, provide = {}) => {
- wrapper = extendedWrapper(
- mount(PipelinesTable, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- provide: {
- ...defaultProvide,
- ...provide,
- },
- stubs: ['PipelineFailedJobsWidget'],
- }),
- );
+ const [firstPipeline] = pipelines;
+
+ const createComponent = ({ props = {}, provide = {}, stubs = {} } = {}) => {
+ wrapper = mountExtended(PipelinesTable, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ stubs: {
+ PipelineOperations: true,
+ ...stubs,
+ },
+ });
};
const findGlTableLite = () => wrapper.findComponent(GlTableLite);
@@ -84,13 +74,9 @@ describe('Pipelines Table', () => {
const findRetryBtn = () => wrapper.findByTestId('pipelines-retry-button');
const findCancelBtn = () => wrapper.findByTestId('pipelines-cancel-button');
- beforeEach(() => {
- pipeline = createMockPipeline();
- });
-
describe('Pipelines Table', () => {
beforeEach(() => {
- createComponent({ pipelines: [pipeline], viewType: 'root' });
+ createComponent({ props: { viewType: 'root' } });
});
it('displays table', () => {
@@ -105,7 +91,7 @@ describe('Pipelines Table', () => {
});
it('should display a table row', () => {
- expect(findTableRows()).toHaveLength(1);
+ expect(findTableRows()).toHaveLength(pipelines.length);
});
describe('status cell', () => {
@@ -120,7 +106,7 @@ describe('Pipelines Table', () => {
});
it('should display the pipeline id', () => {
- expect(findPipelineInfo().text()).toContain(`#${pipeline.id}`);
+ expect(findPipelineInfo().text()).toContain(`#${firstPipeline.id}`);
});
});
@@ -130,24 +116,33 @@ describe('Pipelines Table', () => {
});
it('should render the right number of stages', () => {
- const stagesLength = pipeline.details.stages.length;
- expect(findLegacyPipelineMiniGraph().props('stages').length).toBe(stagesLength);
+ const stagesLength = firstPipeline.details.stages.length;
+ expect(findLegacyPipelineMiniGraph().props('stages')).toHaveLength(stagesLength);
});
it('should render the latest downstream pipelines only', () => {
// component receives two downstream pipelines. one of them is already outdated
// because we retried the trigger job, so the mini pipeline graph will only
// render the newly created downstream pipeline instead
- expect(pipeline.triggered).toHaveLength(2);
+ expect(firstPipeline.triggered).toHaveLength(2);
expect(findLegacyPipelineMiniGraph().props('downstreamPipelines')).toHaveLength(1);
});
describe('when pipeline does not have stages', () => {
beforeEach(() => {
- pipeline = createMockPipeline();
- pipeline.details.stages = [];
-
- createComponent({ pipelines: [pipeline] });
+ createComponent({
+ props: {
+ pipelines: [
+ {
+ ...firstPipeline,
+ details: {
+ ...firstPipeline.details,
+ stages: [],
+ },
+ },
+ ],
+ },
+ });
});
it('stages are not rendered', () => {
@@ -163,6 +158,10 @@ describe('Pipelines Table', () => {
});
describe('operations cell', () => {
+ beforeEach(() => {
+ createComponent({ stubs: { PipelineOperations } });
+ });
+
it('should render pipeline operations', () => {
expect(findActions().exists()).toBe(true);
});
@@ -183,97 +182,101 @@ describe('Pipelines Table', () => {
});
describe('failed jobs details', () => {
- describe('row', () => {
- describe('when the FF is disabled', () => {
- beforeEach(() => {
- createComponent({ pipelines: [pipeline] });
- });
+ describe('when `useFailedJobsWidget` value is provided', () => {
+ beforeEach(() => {
+ createComponent({ provide: provideWithFailedJobsWidget });
+ });
- it('does not render', () => {
- expect(findTableRows()).toHaveLength(1);
- expect(findPipelineFailureWidget().exists()).toBe(false);
- });
+ it('renders', () => {
+ // We have 2 rows per pipeline with the widget
+ expect(findTableRows()).toHaveLength(pipelines.length * 2);
+ expect(findPipelineFailureWidget().exists()).toBe(true);
});
- describe('when the FF is enabled', () => {
- describe('and `withFailedJobsDetails` value is provided', () => {
- beforeEach(() => {
- createComponent({ pipelines: [pipeline] }, provideWithDetails);
- });
-
- it('renders', () => {
- expect(findTableRows()).toHaveLength(2);
- expect(findPipelineFailureWidget().exists()).toBe(true);
- });
-
- it('passes the expected props', () => {
- expect(findPipelineFailureWidget().props()).toStrictEqual({
- failedJobsCount: pipeline.failed_builds.length,
- isPipelineActive: pipeline.active,
- pipelineIid: pipeline.iid,
- pipelinePath: pipeline.path,
- // Make sure the forward slash was removed
- projectPath: 'frontend-fixtures/pipelines-project',
- });
- });
+ it('passes the expected props', () => {
+ expect(findPipelineFailureWidget().props()).toStrictEqual({
+ failedJobsCount: firstPipeline.failed_builds_count,
+ isPipelineActive: firstPipeline.active,
+ pipelineIid: firstPipeline.iid,
+ pipelinePath: firstPipeline.path,
+ // Make sure the forward slash was removed
+ projectPath: 'frontend-fixtures/pipelines-project',
});
+ });
+ });
- describe('and `withFailedJobsDetails` value is not provided', () => {
- beforeEach(() => {
- createComponent(
- { pipelines: [pipeline] },
- { glFeatures: { ciJobFailuresInMr: true } },
- );
- });
-
- it('does not render', () => {
- expect(findTableRows()).toHaveLength(1);
- expect(findPipelineFailureWidget().exists()).toBe(false);
- });
- });
+ describe('and `useFailedJobsWidget` value is not provided', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render', () => {
+ expect(findTableRows()).toHaveLength(pipelines.length);
+ expect(findPipelineFailureWidget().exists()).toBe(false);
});
});
});
+ });
- describe('tracking', () => {
- beforeEach(() => {
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ describe('events', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('when confirming to cancel a pipeline', () => {
+ beforeEach(async () => {
+ await findActions().vm.$emit('cancel-pipeline', firstPipeline);
});
- afterEach(() => {
- unmockTracking();
+ it('emits the `cancel-pipeline` event', () => {
+ expect(wrapper.emitted('cancel-pipeline')).toEqual([[firstPipeline]]);
});
+ });
- it('tracks status badge click', () => {
- findCiBadgeLink().vm.$emit('ciStatusBadgeClick');
+ describe('when retrying a pipeline', () => {
+ beforeEach(() => {
+ findActions().vm.$emit('retry-pipeline', firstPipeline);
+ });
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_ci_status_badge', {
- label: TRACKING_CATEGORIES.table,
- });
+ it('emits the `retry-pipeline` event', () => {
+ expect(wrapper.emitted('retry-pipeline')).toEqual([[firstPipeline]]);
});
+ });
- it('tracks retry pipeline button click', () => {
- findRetryBtn().vm.$emit('click');
+ describe('when refreshing pipelines', () => {
+ beforeEach(() => {
+ findActions().vm.$emit('refresh-pipelines-table');
+ });
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_retry_button', {
- label: TRACKING_CATEGORIES.table,
- });
+ it('emits the `refresh-pipelines-table` event', () => {
+ expect(wrapper.emitted('refresh-pipelines-table')).toEqual([[]]);
});
+ });
+ });
- it('tracks cancel pipeline button click', () => {
- findCancelBtn().vm.$emit('click');
+ describe('tracking', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_cancel_button', {
- label: TRACKING_CATEGORIES.table,
- });
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks status badge click', () => {
+ findCiBadgeLink().vm.$emit('ciStatusBadgeClick');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_ci_status_badge', {
+ label: TRACKING_CATEGORIES.table,
});
+ });
- it('tracks pipeline mini graph stage click', () => {
- findLegacyPipelineMiniGraph().vm.$emit('miniGraphStageClick');
+ it('tracks pipeline mini graph stage click', () => {
+ findLegacyPipelineMiniGraph().vm.$emit('miniGraphStageClick');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_minigraph', {
- label: TRACKING_CATEGORIES.table,
- });
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_minigraph', {
+ label: TRACKING_CATEGORIES.table,
});
});
});
diff --git a/spec/frontend/ci/job_details/components/job_header_spec.js b/spec/frontend/ci/job_details/components/job_header_spec.js
index 6fc55732353..609369316f5 100644
--- a/spec/frontend/ci/job_details/components/job_header_spec.js
+++ b/spec/frontend/ci/job_details/components/job_header_spec.js
@@ -16,7 +16,7 @@ describe('Header CI Component', () => {
text: 'failed',
details_path: 'path',
},
- name: 'Job build_job',
+ name: 'build_job',
time: '2017-05-08T14:57:39.781Z',
user: {
id: 1234,
@@ -34,17 +34,15 @@ describe('Header CI Component', () => {
const findUserLink = () => wrapper.findComponent(GlAvatarLink);
const findSidebarToggleBtn = () => wrapper.findComponent(GlButton);
const findStatusTooltip = () => wrapper.findComponent(GlTooltip);
- const findActionButtons = () => wrapper.findByTestId('job-header-action-buttons');
const findJobName = () => wrapper.findByTestId('job-name');
- const createComponent = (props, slots) => {
+ const createComponent = (props) => {
wrapper = extendedWrapper(
shallowMount(JobHeader, {
propsData: {
...defaultProps,
...props,
},
- ...slots,
}),
);
};
@@ -54,6 +52,10 @@ describe('Header CI Component', () => {
createComponent();
});
+ it('renders the correct job name', () => {
+ expect(findJobName().text()).toBe(defaultProps.name);
+ });
+
it('should render status badge', () => {
expect(findCiBadgeLink().exists()).toBe(true);
});
@@ -65,10 +67,6 @@ describe('Header CI Component', () => {
it('should render sidebar toggle button', () => {
expect(findSidebarToggleBtn().exists()).toBe(true);
});
-
- it('should not render header action buttons when slot is empty', () => {
- expect(findActionButtons().exists()).toBe(false);
- });
});
describe('user avatar', () => {
@@ -124,31 +122,12 @@ describe('Header CI Component', () => {
});
});
- describe('job name', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('should render the job name', () => {
- expect(findJobName().text()).toBe('Job build_job');
- });
- });
-
- describe('slot', () => {
- it('should render header action buttons', () => {
- createComponent({}, { slots: { default: 'Test Actions' } });
-
- expect(findActionButtons().exists()).toBe(true);
- expect(findActionButtons().text()).toBe('Test Actions');
- });
- });
-
describe('shouldRenderTriggeredLabel', () => {
it('should render created keyword when the shouldRenderTriggeredLabel is false', () => {
createComponent({ shouldRenderTriggeredLabel: false });
- expect(wrapper.text()).toContain('created');
- expect(wrapper.text()).not.toContain('started');
+ expect(wrapper.text()).toContain('Created');
+ expect(wrapper.text()).not.toContain('Started');
});
});
});
diff --git a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
index e3d5c448338..5abf2a5ce53 100644
--- a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
+++ b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import CollapsibleSection from '~/ci/job_details/components/log/collapsible_section.vue';
+import LogLine from '~/ci/job_details/components/log/line.vue';
import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data';
@@ -9,9 +10,9 @@ describe('Job Log Collapsible Section', () => {
const jobLogEndpoint = 'jobs/335';
- const findCollapsibleLine = () => wrapper.find('.collapsible-line');
- const findCollapsibleLineSvg = () => wrapper.find('.collapsible-line svg');
const findLogLineHeader = () => wrapper.findComponent(LogLineHeader);
+ const findLogLineHeaderSvg = () => findLogLineHeader().find('svg');
+ const findLogLines = () => wrapper.findAllComponents(LogLine);
const createComponent = (props = {}) => {
wrapper = mount(CollapsibleSection, {
@@ -30,11 +31,16 @@ describe('Job Log Collapsible Section', () => {
});
it('renders clickable header line', () => {
- expect(findCollapsibleLine().attributes('role')).toBe('button');
+ expect(findLogLineHeader().text()).toBe('1 foo');
+ expect(findLogLineHeader().attributes('role')).toBe('button');
});
- it('renders an icon with the closed state', () => {
- expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('chevron-lg-right-icon');
+ it('renders an icon with a closed state', () => {
+ expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-right-icon');
+ });
+
+ it('does not render collapsed lines', () => {
+ expect(findLogLines()).toHaveLength(0);
});
});
@@ -47,15 +53,17 @@ describe('Job Log Collapsible Section', () => {
});
it('renders clickable header line', () => {
- expect(findCollapsibleLine().attributes('role')).toBe('button');
+ expect(findLogLineHeader().text()).toContain('foo');
+ expect(findLogLineHeader().attributes('role')).toBe('button');
});
it('renders an icon with the open state', () => {
- expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('chevron-lg-down-icon');
+ expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-down-icon');
});
- it('renders collapsible lines content', () => {
- expect(wrapper.findAll('.js-line').length).toEqual(collapsibleSectionOpened.lines.length);
+ it('renders collapsible lines', () => {
+ expect(findLogLines().at(0).text()).toContain('this is a collapsible nested section');
+ expect(findLogLines()).toHaveLength(collapsibleSectionOpened.lines.length);
});
});
@@ -65,7 +73,7 @@ describe('Job Log Collapsible Section', () => {
jobLogEndpoint,
});
- findCollapsibleLine().trigger('click');
+ findLogLineHeader().trigger('click');
await nextTick();
expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
diff --git a/spec/frontend/ci/job_details/components/log/line_header_spec.js b/spec/frontend/ci/job_details/components/log/line_header_spec.js
index 7d1b05346f2..45296e4b6c2 100644
--- a/spec/frontend/ci/job_details/components/log/line_header_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_header_spec.js
@@ -16,7 +16,7 @@ describe('Job Log Header Line', () => {
style: 'term-fg-l-green',
},
],
- lineNumber: 76,
+ lineNumber: 77,
},
isClosed: true,
path: '/jashkenas/underscore/-/jobs/335',
diff --git a/spec/frontend/ci/job_details/components/log/line_number_spec.js b/spec/frontend/ci/job_details/components/log/line_number_spec.js
index d5c1d0fd985..db964e341b7 100644
--- a/spec/frontend/ci/job_details/components/log/line_number_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_number_spec.js
@@ -5,7 +5,7 @@ describe('Job Log Line Number', () => {
let wrapper;
const data = {
- lineNumber: 0,
+ lineNumber: 1,
path: '/jashkenas/underscore/-/jobs/335',
};
diff --git a/spec/frontend/ci/job_details/components/log/line_spec.js b/spec/frontend/ci/job_details/components/log/line_spec.js
index b6f3a2b68df..dad41d0cd7f 100644
--- a/spec/frontend/ci/job_details/components/log/line_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_spec.js
@@ -224,7 +224,7 @@ describe('Job Log Line', () => {
offset: 24526,
content: [{ text: 'job log content' }],
section: 'custom-section',
- lineNumber: 76,
+ lineNumber: 77,
},
path: '/root/ci-project/-/jobs/6353',
});
diff --git a/spec/frontend/ci/job_details/components/log/log_spec.js b/spec/frontend/ci/job_details/components/log/log_spec.js
index cc1621b87d6..1931d5046dc 100644
--- a/spec/frontend/ci/job_details/components/log/log_spec.js
+++ b/spec/frontend/ci/job_details/components/log/log_spec.js
@@ -7,7 +7,7 @@ import { scrollToElement } from '~/lib/utils/common_utils';
import Log from '~/ci/job_details/components/log/log.vue';
import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
import { logLinesParser } from '~/ci/job_details/store/utils';
-import { jobLog } from './mock_data';
+import { mockJobLog, mockJobLogLineCount } from './mock_data';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
@@ -39,7 +39,7 @@ describe('Job Log', () => {
};
state = {
- jobLog: logLinesParser(jobLog),
+ jobLog: logLinesParser(mockJobLog),
jobLogEndpoint: 'jobs/id',
};
@@ -57,15 +57,18 @@ describe('Job Log', () => {
createComponent();
});
- it('renders a line number for each open line', () => {
- expect(wrapper.find('#L1').text()).toBe('1');
- expect(wrapper.find('#L2').text()).toBe('2');
- expect(wrapper.find('#L3').text()).toBe('3');
- });
+ it.each([...Array(mockJobLogLineCount).keys()])(
+ 'renders a line number for each line %d',
+ (index) => {
+ const lineNumber = wrapper
+ .findAll('.js-log-line')
+ .at(index)
+ .find(`#L${index + 1}`);
- it('links to the provided path and correct line number', () => {
- expect(wrapper.find('#L1').attributes('href')).toBe(`${state.jobLogEndpoint}#L1`);
- });
+ expect(lineNumber.text()).toBe(`${index + 1}`);
+ expect(lineNumber.attributes('href')).toBe(`${state.jobLogEndpoint}#L${index + 1}`);
+ },
+ );
});
describe('collapsible sections', () => {
@@ -103,7 +106,7 @@ describe('Job Log', () => {
await waitForPromises();
- expect(wrapper.find('#L6').exists()).toBe(false);
+ expect(wrapper.find('#L9').exists()).toBe(false);
expect(scrollToElement).not.toHaveBeenCalled();
});
});
@@ -116,19 +119,19 @@ describe('Job Log', () => {
it('scrolls to line number', async () => {
createComponent();
- state.jobLog = logLinesParser(jobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6');
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
- state.jobLog = logLinesParser(jobLog, [], '#L7');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L7');
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
});
it('line number within collapsed section is visible', () => {
- state.jobLog = logLinesParser(jobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6');
createComponent();
@@ -148,7 +151,7 @@ describe('Job Log', () => {
],
section: 'prepare-executor',
section_header: true,
- lineNumber: 2,
+ lineNumber: 3,
},
];
diff --git a/spec/frontend/ci/job_details/components/log/mock_data.js b/spec/frontend/ci/job_details/components/log/mock_data.js
index fa51b92a044..14669872cc1 100644
--- a/spec/frontend/ci/job_details/components/log/mock_data.js
+++ b/spec/frontend/ci/job_details/components/log/mock_data.js
@@ -1,4 +1,4 @@
-export const jobLog = [
+export const mockJobLog = [
{
offset: 1000,
content: [{ text: 'Running with gitlab-runner 12.1.0 (de7731dd)' }],
@@ -19,69 +19,50 @@ export const jobLog = [
},
{
offset: 1003,
- content: [{ text: 'Starting service postgres:9.6.14 ...', style: 'text-green' }],
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
section: 'prepare-executor',
},
{
offset: 1004,
- content: [
- {
- text: 'Restore cache',
- style: 'term-fg-l-cyan term-bold',
- },
- ],
- section: 'restore-cache',
- section_header: true,
- section_options: {
- collapsed: 'true',
- },
+ content: [{ text: 'Starting service ...', style: 'term-fg-l-green' }],
+ section: 'prepare-executor',
},
{
offset: 1005,
- content: [
- {
- text: 'Checking cache for ruby-gems-debian-bullseye-ruby-3.0-16...',
- style: 'term-fg-l-green term-bold',
- },
- ],
- section: 'restore-cache',
- },
-];
-
-export const utilsMockData = [
- {
- offset: 1001,
- content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
+ content: [],
+ section: 'prepare-executor',
+ section_duration: '00:09',
},
{
- offset: 1002,
+ offset: 1006,
content: [
{
- text:
- 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.6-golang-1.14-git-2.28-lfs-2.9-chrome-84-node-12.x-yarn-1.21-postgresql-11-graphicsmagick-1.3.34',
+ text: 'Getting source from Git repository',
},
],
- section: 'prepare-executor',
+ section: 'get-sources',
section_header: true,
},
{
- offset: 1003,
- content: [{ text: 'Starting service postgres:9.6.14 ...' }],
- section: 'prepare-executor',
+ offset: 1007,
+ content: [{ text: 'Fetching changes with git depth set to 20...' }],
+ section: 'get-sources',
},
{
- offset: 1004,
- content: [{ text: 'Pulling docker image postgres:9.6.14 ...', style: 'term-fg-l-green' }],
- section: 'prepare-executor',
+ offset: 1008,
+ content: [{ text: 'Initialized empty Git repository', style: 'term-fg-l-green' }],
+ section: 'get-sources',
},
{
- offset: 1005,
+ offset: 1009,
content: [],
- section: 'prepare-executor',
- section_duration: '10:00',
+ section: 'get-sources',
+ section_duration: '00:19',
},
];
+export const mockJobLogLineCount = 8; // `text` entries in mockJobLog
+
export const originalTrace = [
{
offset: 1,
@@ -191,7 +172,7 @@ export const collapsibleSectionClosed = {
offset: 80,
content: [{ text: 'this is a collapsible nested section' }],
section: 'prepare-script',
- lineNumber: 3,
+ lineNumber: 2,
},
],
};
@@ -212,7 +193,7 @@ export const collapsibleSectionOpened = {
offset: 80,
content: [{ text: 'this is a collapsible nested section' }],
section: 'prepare-script',
- lineNumber: 3,
+ lineNumber: 2,
},
],
};
diff --git a/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js b/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js
index 1d61bf3243f..e539be2b220 100644
--- a/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js
@@ -30,31 +30,31 @@ describe('Artifacts block', () => {
'These artifacts are the latest. They will not be deleted (even if expired) until newer artifacts are available.';
const expiredArtifact = {
- expire_at: expireAt,
+ expireAt,
expired: true,
locked: false,
};
const nonExpiredArtifact = {
- download_path: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
- browse_path: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
- keep_path: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/keep',
- expire_at: expireAt,
+ downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
+ browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
+ keepPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/keep',
+ expireAt,
expired: false,
locked: false,
};
const lockedExpiredArtifact = {
...expiredArtifact,
- download_path: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
- browse_path: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
+ downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
+ browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
expired: true,
locked: true,
};
const lockedNonExpiredArtifact = {
...nonExpiredArtifact,
- keep_path: undefined,
+ keepPath: undefined,
locked: true,
};
diff --git a/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js
index 1063bec6f3b..81181fc71b2 100644
--- a/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js
@@ -55,15 +55,9 @@ describe('Sidebar Header', () => {
const findEraseButton = () => wrapper.findByTestId('job-log-erase-link');
const findNewIssueButton = () => wrapper.findByTestId('job-new-issue');
const findTerminalLink = () => wrapper.findByTestId('terminal-link');
- const findJobName = () => wrapper.findByTestId('job-name');
const findRetryButton = () => wrapper.findComponent(JobRetryButton);
describe('when rendering contents', () => {
- it('renders the correct job name', async () => {
- await createComponentWithApollo();
- expect(findJobName().text()).toBe(mockJobResponse.data.project.job.name);
- });
-
it('does not render buttons with no paths', async () => {
await createComponentWithApollo();
expect(findCancelButton().exists()).toBe(false);
diff --git a/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
index e188d99b8b1..37a2ca75df0 100644
--- a/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
@@ -53,7 +53,6 @@ describe('Job Sidebar Details Container', () => {
['erased_at', 'Erased: 3 weeks ago'],
['finished_at', 'Finished: 3 weeks ago'],
['queued_duration', 'Queued: 9 seconds'],
- ['id', 'Job ID: #4757'],
['runner', 'Runner: #1 (ABCDEFGH) local ci runner'],
['coverage', 'Coverage: 20%'],
])('uses %s to render job-%s', async (detail, value) => {
@@ -78,7 +77,7 @@ describe('Job Sidebar Details Container', () => {
createWrapper();
await store.dispatch('receiveJobSuccess', job);
- expect(findAllDetailsRow()).toHaveLength(8);
+ expect(findAllDetailsRow()).toHaveLength(7);
});
describe('duration row', () => {
diff --git a/spec/frontend/ci/job_details/job_app_spec.js b/spec/frontend/ci/job_details/job_app_spec.js
index c2d91771495..ff84b2d0283 100644
--- a/spec/frontend/ci/job_details/job_app_spec.js
+++ b/spec/frontend/ci/job_details/job_app_spec.js
@@ -31,8 +31,6 @@ describe('Job App', () => {
const initSettings = {
endpoint: `${TEST_HOST}jobs/123.json`,
pagePath: `${TEST_HOST}jobs/123`,
- logState:
- 'eyJvZmZzZXQiOjE3NDUxLCJuX29wZW5fdGFncyI6MCwiZmdfY29sb3IiOm51bGwsImJnX2NvbG9yIjpudWxsLCJzdHlsZV9tYXNrIjowfQ%3D%3D',
};
const props = {
diff --git a/spec/frontend/ci/job_details/store/actions_spec.js b/spec/frontend/ci/job_details/store/actions_spec.js
index bb5c1fe32bd..2799bc9578c 100644
--- a/spec/frontend/ci/job_details/store/actions_spec.js
+++ b/spec/frontend/ci/job_details/store/actions_spec.js
@@ -2,7 +2,6 @@ import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import {
- setJobEndpoint,
setJobLogOptions,
clearEtagPoll,
stopPolling,
@@ -39,25 +38,21 @@ describe('Job State actions', () => {
mockedState = state();
});
- describe('setJobEndpoint', () => {
- it('should commit SET_JOB_ENDPOINT mutation', () => {
- return testAction(
- setJobEndpoint,
- 'job/872324.json',
- mockedState,
- [{ type: types.SET_JOB_ENDPOINT, payload: 'job/872324.json' }],
- [],
- );
- });
- });
-
describe('setJobLogOptions', () => {
it('should commit SET_JOB_LOG_OPTIONS mutation', () => {
return testAction(
setJobLogOptions,
- { pagePath: 'job/872324/trace.json' },
+ { endpoint: '/group1/project1/-/jobs/99.json', pagePath: '/group1/project1/-/jobs/99' },
mockedState,
- [{ type: types.SET_JOB_LOG_OPTIONS, payload: { pagePath: 'job/872324/trace.json' } }],
+ [
+ {
+ type: types.SET_JOB_LOG_OPTIONS,
+ payload: {
+ endpoint: '/group1/project1/-/jobs/99.json',
+ pagePath: '/group1/project1/-/jobs/99',
+ },
+ },
+ ],
[],
);
});
diff --git a/spec/frontend/ci/job_details/store/mutations_spec.js b/spec/frontend/ci/job_details/store/mutations_spec.js
index 0835c534fb9..78b29efed68 100644
--- a/spec/frontend/ci/job_details/store/mutations_spec.js
+++ b/spec/frontend/ci/job_details/store/mutations_spec.js
@@ -12,11 +12,17 @@ describe('Jobs Store Mutations', () => {
stateCopy = state();
});
- describe('SET_JOB_ENDPOINT', () => {
+ describe('SET_JOB_LOG_OPTIONS', () => {
it('should set jobEndpoint', () => {
- mutations[types.SET_JOB_ENDPOINT](stateCopy, 'job/21312321.json');
+ mutations[types.SET_JOB_LOG_OPTIONS](stateCopy, {
+ endpoint: '/group1/project1/-/jobs/99.json',
+ pagePath: '/group1/project1/-/jobs/99',
+ });
- expect(stateCopy.jobEndpoint).toEqual('job/21312321.json');
+ expect(stateCopy).toMatchObject({
+ jobLogEndpoint: '/group1/project1/-/jobs/99',
+ jobEndpoint: '/group1/project1/-/jobs/99.json',
+ });
});
});
@@ -39,13 +45,13 @@ describe('Jobs Store Mutations', () => {
describe('RECEIVE_JOB_LOG_SUCCESS', () => {
describe('when job log has state', () => {
it('sets jobLogState', () => {
- const stateLog =
+ const logState =
'eyJvZmZzZXQiOjczNDQ1MSwibl9vcGVuX3RhZ3MiOjAsImZnX2NvbG9yIjpudWxsLCJiZ19jb2xvciI6bnVsbCwic3R5bGVfbWFzayI6MH0=';
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
- state: stateLog,
+ state: logState,
});
- expect(stateCopy.jobLogState).toEqual(stateLog);
+ expect(stateCopy.jobLogState).toEqual(logState);
});
});
@@ -100,7 +106,7 @@ describe('Jobs Store Mutations', () => {
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
- lineNumber: 0,
+ lineNumber: 1,
},
]);
});
@@ -121,7 +127,7 @@ describe('Jobs Store Mutations', () => {
{
offset: 0,
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
- lineNumber: 0,
+ lineNumber: 1,
},
]);
});
diff --git a/spec/frontend/ci/job_details/store/utils_spec.js b/spec/frontend/ci/job_details/store/utils_spec.js
index 4ffba35761e..394ce0ab737 100644
--- a/spec/frontend/ci/job_details/store/utils_spec.js
+++ b/spec/frontend/ci/job_details/store/utils_spec.js
@@ -6,10 +6,10 @@ import {
addDurationToHeader,
isCollapsibleSection,
findOffsetAndRemove,
- getIncrementalLineNumber,
+ getNextLineNumber,
} from '~/ci/job_details/store/utils';
import {
- utilsMockData,
+ mockJobLog,
originalTrace,
regularIncremental,
regularIncrementalRepeated,
@@ -187,39 +187,49 @@ describe('Jobs Store Utils', () => {
let result;
beforeEach(() => {
- result = logLinesParser(utilsMockData);
+ result = logLinesParser(mockJobLog);
});
describe('regular line', () => {
it('adds a lineNumber property with correct index', () => {
- expect(result[0].lineNumber).toEqual(0);
- expect(result[1].line.lineNumber).toEqual(1);
+ expect(result[0].lineNumber).toEqual(1);
+ expect(result[1].lineNumber).toEqual(2);
+ expect(result[2].line.lineNumber).toEqual(3);
+ expect(result[2].lines[0].lineNumber).toEqual(4);
+ expect(result[2].lines[1].lineNumber).toEqual(5);
+ expect(result[3].line.lineNumber).toEqual(6);
+ expect(result[3].lines[0].lineNumber).toEqual(7);
+ expect(result[3].lines[1].lineNumber).toEqual(8);
});
});
describe('collapsible section', () => {
it('adds a `isClosed` property', () => {
- expect(result[1].isClosed).toEqual(false);
+ expect(result[2].isClosed).toEqual(false);
+ expect(result[3].isClosed).toEqual(false);
});
it('adds a `isHeader` property', () => {
- expect(result[1].isHeader).toEqual(true);
+ expect(result[2].isHeader).toEqual(true);
+ expect(result[3].isHeader).toEqual(true);
});
it('creates a lines array property with the content of the collapsible section', () => {
- expect(result[1].lines.length).toEqual(2);
- expect(result[1].lines[0].content).toEqual(utilsMockData[2].content);
- expect(result[1].lines[1].content).toEqual(utilsMockData[3].content);
+ expect(result[2].lines.length).toEqual(2);
+ expect(result[2].lines[0].content).toEqual(mockJobLog[3].content);
+ expect(result[2].lines[1].content).toEqual(mockJobLog[4].content);
});
});
describe('section duration', () => {
it('adds the section information to the header section', () => {
- expect(result[1].line.section_duration).toEqual(utilsMockData[4].section_duration);
+ expect(result[2].line.section_duration).toEqual(mockJobLog[5].section_duration);
+ expect(result[3].line.section_duration).toEqual(mockJobLog[9].section_duration);
});
it('does not add section duration as a line', () => {
- expect(result[1].lines.includes(utilsMockData[4])).toEqual(false);
+ expect(result[2].lines.includes(mockJobLog[5])).toEqual(false);
+ expect(result[3].lines.includes(mockJobLog[9])).toEqual(false);
});
});
});
@@ -316,17 +326,24 @@ describe('Jobs Store Utils', () => {
});
});
- describe('getIncrementalLineNumber', () => {
- describe('when last line is 0', () => {
+ describe('getNextLineNumber', () => {
+ describe('when there is no previous log', () => {
+ it('returns 1', () => {
+ expect(getNextLineNumber([])).toEqual(1);
+ expect(getNextLineNumber(undefined)).toEqual(1);
+ });
+ });
+
+ describe('when last line is 1', () => {
it('returns 1', () => {
const log = [
{
content: [],
- lineNumber: 0,
+ lineNumber: 1,
},
];
- expect(getIncrementalLineNumber(log)).toEqual(1);
+ expect(getNextLineNumber(log)).toEqual(2);
});
});
@@ -343,7 +360,7 @@ describe('Jobs Store Utils', () => {
},
];
- expect(getIncrementalLineNumber(log)).toEqual(102);
+ expect(getNextLineNumber(log)).toEqual(102);
});
});
@@ -364,7 +381,7 @@ describe('Jobs Store Utils', () => {
},
];
- expect(getIncrementalLineNumber(log)).toEqual(102);
+ expect(getNextLineNumber(log)).toEqual(102);
});
});
@@ -391,7 +408,7 @@ describe('Jobs Store Utils', () => {
},
];
- expect(getIncrementalLineNumber(log)).toEqual(104);
+ expect(getNextLineNumber(log)).toEqual(104);
});
});
});
@@ -410,7 +427,7 @@ describe('Jobs Store Utils', () => {
text: 'Downloading',
},
],
- lineNumber: 0,
+ lineNumber: 1,
},
{
offset: 2,
@@ -419,7 +436,7 @@ describe('Jobs Store Utils', () => {
text: 'log line',
},
],
- lineNumber: 1,
+ lineNumber: 2,
},
]);
});
@@ -438,7 +455,7 @@ describe('Jobs Store Utils', () => {
text: 'log line',
},
],
- lineNumber: 0,
+ lineNumber: 1,
},
]);
});
@@ -462,7 +479,7 @@ describe('Jobs Store Utils', () => {
},
],
section: 'section',
- lineNumber: 0,
+ lineNumber: 1,
},
lines: [],
},
@@ -488,7 +505,7 @@ describe('Jobs Store Utils', () => {
},
],
section: 'section',
- lineNumber: 0,
+ lineNumber: 1,
},
lines: [
{
@@ -499,7 +516,7 @@ describe('Jobs Store Utils', () => {
},
],
section: 'section',
- lineNumber: 1,
+ lineNumber: 2,
},
],
},
diff --git a/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js
index cb8f6ed8f9b..bb44d970bd7 100644
--- a/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js
@@ -40,20 +40,20 @@ describe('Job Cell', () => {
};
describe('Job Id', () => {
- it('displays the job id and links to the job', () => {
+ it('displays the job id, job name and links to the job', () => {
createComponent();
- const expectedJobId = `#${getIdFromGraphQLId(mockJob.id)}`;
+ const expectedJobId = `#${getIdFromGraphQLId(mockJob.id)}: ${mockJob.name}`;
expect(findJobIdLink().text()).toBe(expectedJobId);
expect(findJobIdLink().attributes('href')).toBe(mockJob.detailedStatus.detailsPath);
expect(findJobIdNoLink().exists()).toBe(false);
});
- it('display the job id with no link', () => {
+ it('display the job id and job name with no link', () => {
createComponent(jobAsGuest);
- const expectedJobId = `#${getIdFromGraphQLId(jobAsGuest.id)}`;
+ const expectedJobId = `#${getIdFromGraphQLId(jobAsGuest.id)}: ${jobAsGuest.name}`;
expect(findJobIdNoLink().text()).toBe(expectedJobId);
expect(findJobIdNoLink().exists()).toBe(true);
diff --git a/spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/status_cell_spec.js
index 21f14ba0c98..e66942cc730 100644
--- a/spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/status_cell_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import DurationCell from '~/ci/jobs_page/components/job_cells/duration_cell.vue';
+import StatusCell from '~/ci/jobs_page/components/job_cells/status_cell.vue';
describe('Duration Cell', () => {
let wrapper;
@@ -12,7 +12,7 @@ describe('Duration Cell', () => {
const createComponent = (props) => {
wrapper = extendedWrapper(
- shallowMount(DurationCell, {
+ shallowMount(StatusCell, {
propsData: {
job: {
...props,
diff --git a/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js
index f4893c4077f..0f85c4590ec 100644
--- a/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js
@@ -6,7 +6,7 @@ describe('Jobs table empty state', () => {
let wrapper;
const pipelineEditorPath = '/root/project/-/ci/editor';
- const emptyStateSvgPath = 'assets/jobs-empty-state.svg';
+ const emptyStateSvgPath = 'illustrations/empty-state/empty-pipeline-md.svg';
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
diff --git a/spec/frontend/ci/jobs_page/components/jobs_table_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
index 3adb95bf371..d4e0ce92bc2 100644
--- a/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
@@ -2,6 +2,7 @@ import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import { DEFAULT_FIELDS_ADMIN } from '~/ci/admin/jobs_table/constants';
import ProjectCell from '~/ci/admin/jobs_table/components/cells/project_cell.vue';
@@ -47,11 +48,11 @@ describe('Jobs Table', () => {
expect(findCiBadgeLink().exists()).toBe(true);
});
- it('displays the job stage and name', () => {
+ it('displays the job stage, id and name', () => {
const [firstJob] = mockJobsNodes;
- expect(findJobStage().text()).toBe(firstJob.stage.name);
- expect(findJobName().text()).toBe(firstJob.name);
+ expect(findJobStage().text()).toBe(`Stage: ${firstJob.stage.name}`);
+ expect(findJobName().text()).toBe(`#${getIdFromGraphQLId(firstJob.id)}: ${firstJob.name}`);
});
it('displays the coverage for only jobs that have coverage', () => {
diff --git a/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
index 107f0df5c02..de9ee8a16bf 100644
--- a/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
@@ -1,10 +1,11 @@
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
-import { GlBadge, GlModal, GlToast } from '@gitlab/ui';
+import { GlModal, GlToast } from '@gitlab/ui';
import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import ActionComponent from '~/ci/common/private/job_action_component.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
@@ -27,9 +28,10 @@ describe('pipeline graph job item', () => {
const findJobWithoutLink = () => wrapper.findByTestId('job-without-link');
const findJobWithLink = () => wrapper.findByTestId('job-with-link');
const findActionVueComponent = () => wrapper.findComponent(ActionComponent);
- const findActionComponent = () => wrapper.findByTestId('ci-action-component');
- const findBadge = () => wrapper.findComponent(GlBadge);
+ const findActionComponent = () => wrapper.findByTestId('ci-action-button');
+ const findBadge = () => wrapper.findByTestId('job-bridge-badge');
const findJobLink = () => wrapper.findByTestId('job-with-link');
+ const findJobCiBadge = () => wrapper.findComponent(CiBadgeLink);
const findModal = () => wrapper.findComponent(GlModal);
const clickOnModalPrimaryBtn = () => findModal().vm.$emit('primary');
@@ -57,6 +59,9 @@ describe('pipeline graph job item', () => {
mocks: {
...mocks,
},
+ stubs: {
+ CiBadgeLink,
+ },
});
};
@@ -81,7 +86,8 @@ describe('pipeline graph job item', () => {
expect(link.attributes('title')).toBe(`${mockJob.name} - ${mockJob.status.label}`);
- expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
+ expect(findJobCiBadge().exists()).toBe(true);
+ expect(findJobCiBadge().find('.ci-status-icon-success').exists()).toBe(true);
expect(wrapper.text()).toBe(mockJob.name);
});
@@ -99,7 +105,8 @@ describe('pipeline graph job item', () => {
});
it('should render status and name', () => {
- expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
+ expect(findJobCiBadge().exists()).toBe(true);
+ expect(findJobCiBadge().find('.ci-status-icon-success').exists()).toBe(true);
expect(findJobLink().exists()).toBe(false);
expect(wrapper.text()).toBe(mockJobWithoutDetails.name);
@@ -110,6 +117,15 @@ describe('pipeline graph job item', () => {
});
});
+ describe('CiBadgeLink', () => {
+ it('should not render a link', () => {
+ createWrapper();
+
+ expect(findJobCiBadge().exists()).toBe(true);
+ expect(findJobCiBadge().props('useLink')).toBe(false);
+ });
+ });
+
describe('action icon', () => {
it('should render the action icon', () => {
createWrapper();
diff --git a/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
index 5541b0db54a..5fe8581e81b 100644
--- a/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
@@ -37,7 +37,7 @@ describe('Linked pipeline', () => {
const findButton = () => wrapper.findComponent(GlButton);
const findCancelButton = () => wrapper.findByLabelText('Cancel downstream pipeline');
const findCardTooltip = () => wrapper.findComponent(GlTooltip);
- const findDownstreamPipelineTitle = () => wrapper.findByTestId('downstream-title');
+ const findDownstreamPipelineTitle = () => wrapper.findByTestId('downstream-title-content');
const findExpandButton = () => wrapper.findByTestId('expand-pipeline-button');
const findLinkedPipeline = () => wrapper.findComponent({ ref: 'linkedPipeline' });
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
diff --git a/spec/frontend/ci/pipeline_details/mock_data.js b/spec/frontend/ci/pipeline_details/mock_data.js
index e32d0a0df47..56365622544 100644
--- a/spec/frontend/ci/pipeline_details/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/mock_data.js
@@ -640,7 +640,7 @@ export const mockPipeline = (projectPath) => {
triggered_by: null,
triggered: [],
},
- pipelineScheduleUrl: 'foo',
+ pipelineSchedulesPath: 'foo',
pipelineKey: 'id',
viewType: 'root',
};
@@ -865,7 +865,7 @@ export const mockPipelineTag = () => {
triggered_by: null,
triggered: [],
},
- pipelineScheduleUrl: 'foo',
+ pipelineSchedulesPath: 'foo',
pipelineKey: 'id',
viewType: 'root',
};
@@ -1072,7 +1072,7 @@ export const mockPipelineBranch = () => {
triggered_by: null,
triggered: [],
},
- pipelineScheduleUrl: 'foo',
+ pipelineSchedulesPath: 'foo',
pipelineKey: 'id',
viewType: 'root',
};
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
index 1a2ed60a6f4..9bb0618b758 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import { GlIcon, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -43,7 +43,7 @@ describe('Pipeline Status', () => {
},
projectFullPath: mockProjectFullPath,
},
- stubs: { GlLink, GlSprintf },
+ stubs: { GlSprintf },
});
};
diff --git a/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
index 30a0b868c5f..4b357a9fc7c 100644
--- a/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import LegacyPipelineStage from '~/ci/pipeline_mini_graph/legacy_pipeline_stage.vue';
@@ -52,7 +52,7 @@ describe('Pipelines stage component', () => {
});
const findCiActionBtn = () => wrapper.find('.js-ci-action');
- const findCiIcon = () => wrapper.findComponent(CiIcon);
+ const findCiIcon = () => wrapper.findComponent(CiBadgeLink);
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownToggle = () => wrapper.find('button.dropdown-toggle');
const findDropdownMenu = () =>
@@ -106,17 +106,6 @@ describe('Pipelines stage component', () => {
expect(findDropdownToggle().exists()).toBe(true);
expect(findCiIcon().exists()).toBe(true);
});
-
- it('renders a borderless ci-icon', () => {
- expect(findCiIcon().exists()).toBe(true);
- expect(findCiIcon().props('isBorderless')).toBe(true);
- expect(findCiIcon().classes('borderless')).toBe(true);
- });
-
- it('renders a ci-icon with a custom border class', () => {
- expect(findCiIcon().exists()).toBe(true);
- expect(findCiIcon().classes('gl-border')).toBe(true);
- });
});
describe('when user opens dropdown and stage request is successful', () => {
diff --git a/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
index 0396029cdaf..3c9d235bfcc 100644
--- a/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
@@ -50,19 +50,6 @@ describe('Linked pipeline mini list', () => {
expect(findCiIcon().exists()).toBe(true);
});
- it('should render a borderless ci-icon', () => {
- expect(findCiIcon().exists()).toBe(true);
-
- expect(findCiIcon().props('isBorderless')).toBe(true);
- expect(findCiIcon().classes('borderless')).toBe(true);
- });
-
- it('should render a ci-icon with a custom border class', () => {
- expect(findCiIcon().exists()).toBe(true);
-
- expect(findCiIcon().classes('gl-border')).toBe(true);
- });
-
it('should render the correct ci status icon', () => {
expect(findCiIcon().classes('ci-status-icon-running')).toBe(true);
});
@@ -124,19 +111,6 @@ describe('Linked pipeline mini list', () => {
expect(findLinkedPipelineMiniList().classes('is-downstream')).toBe(true);
});
- it('should render a borderless ci-icon', () => {
- expect(findCiIcon().exists()).toBe(true);
-
- expect(findCiIcon().props('isBorderless')).toBe(true);
- expect(findCiIcon().classes('borderless')).toBe(true);
- });
-
- it('should render a ci-icon with a custom border class', () => {
- expect(findCiIcon().exists()).toBe(true);
-
- expect(findCiIcon().classes('gl-border')).toBe(true);
- });
-
it('should render the pipeline counter', () => {
expect(findLinkedPipelineCounter().exists()).toBe(true);
});
diff --git a/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js
index 1d4ae33c667..2807cc0f2a1 100644
--- a/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js
@@ -55,12 +55,12 @@ describe('Pipeline New Form', () => {
const findForm = () => wrapper.findComponent(GlForm);
const findRefsDropdown = () => wrapper.findComponent(RefsDropdown);
- const findSubmitButton = () => wrapper.findByTestId('run_pipeline_button');
- const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row');
+ const findSubmitButton = () => wrapper.findByTestId('run-pipeline-button');
+ const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row-container');
const findRemoveIcons = () => wrapper.findAllByTestId('remove-ci-variable-row');
const findVariableTypes = () => wrapper.findAllByTestId('pipeline-form-ci-variable-type');
- const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key');
- const findValueInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-value');
+ const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key-field');
+ const findValueInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-value-field');
const findValueDropdowns = () =>
wrapper.findAllByTestId('pipeline-form-ci-variable-value-dropdown');
const findValueDropdownItems = (dropdown) => dropdown.findAllComponents(GlDropdownItem);
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_empty_state_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_empty_state_spec.js
new file mode 100644
index 00000000000..5ad0f915f62
--- /dev/null
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_empty_state_spec.js
@@ -0,0 +1,37 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
+import PipelineSchedulesEmptyState from '~/ci/pipeline_schedules/components/pipeline_schedules_empty_state.vue';
+
+describe('Pipeline Schedules Empty State', () => {
+ let wrapper;
+
+ const mockSchedulePath = 'root/test/-/pipeline_schedules/new"';
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineSchedulesEmptyState, {
+ provide: {
+ newSchedulePath: mockSchedulePath,
+ },
+ stubs: { GlSprintf },
+ });
+ };
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findLink = () => wrapper.findComponent(GlLink);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('has link to create new schedule', () => {
+ expect(findEmptyState().props('primaryButtonLink')).toBe(mockSchedulePath);
+ });
+
+ it('has link to help documentation', () => {
+ expect(findLink().attributes('href')).toBe('/help/ci/pipelines/schedules');
+ });
+});
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
index eb76b0bfbb4..d1844d609f2 100644
--- a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlEmptyState, GlLink, GlLoadingIcon, GlTabs } from '@gitlab/ui';
+import { GlAlert, GlEmptyState, GlLink, GlLoadingIcon, GlPagination, GlTabs } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { trimText } from 'helpers/text_helper';
@@ -14,6 +14,7 @@ import deletePipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/muta
import playPipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/mutations/play_pipeline_schedule.mutation.graphql';
import takeOwnershipMutation from '~/ci/pipeline_schedules/graphql/mutations/take_ownership.mutation.graphql';
import getPipelineSchedulesQuery from '~/ci/pipeline_schedules/graphql/queries/get_pipeline_schedules.query.graphql';
+import { SCHEDULES_PER_PAGE } from '~/ci/pipeline_schedules/constants';
import {
mockGetPipelineSchedulesGraphQLResponse,
mockPipelineScheduleNodes,
@@ -22,6 +23,7 @@ import {
playMutationResponse,
takeOwnershipMutationResponse,
emptyPipelineSchedulesResponse,
+ mockPipelineSchedulesResponseWithPagination,
} from '../mock_data';
Vue.use(VueApollo);
@@ -34,6 +36,9 @@ describe('Pipeline schedules app', () => {
let wrapper;
const successHandler = jest.fn().mockResolvedValue(mockGetPipelineSchedulesGraphQLResponse);
+ const successHandlerWithPagination = jest
+ .fn()
+ .mockResolvedValue(mockPipelineSchedulesResponseWithPagination);
const successEmptyHandler = jest.fn().mockResolvedValue(emptyPipelineSchedulesResponse);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
@@ -81,6 +86,11 @@ describe('Pipeline schedules app', () => {
const findInactiveTab = () => wrapper.findByTestId('pipeline-schedules-inactive-tab');
const findSchedulesCharacteristics = () =>
wrapper.findByTestId('pipeline-schedules-characteristics');
+ const findPagination = () => wrapper.findComponent(GlPagination);
+ const setPage = async (page) => {
+ findPagination().vm.$emit('input', page);
+ await waitForPromises();
+ };
describe('default', () => {
beforeEach(() => {
@@ -107,6 +117,10 @@ describe('Pipeline schedules app', () => {
it('new schedule button links to new schedule path', () => {
expect(findNewButton().attributes('href')).toBe('/root/ci-project/-/pipeline_schedules/new');
});
+
+ it('does not display pagination when no next page exists', () => {
+ expect(findPagination().exists()).toBe(false);
+ });
});
describe('fetching pipeline schedules', () => {
@@ -333,6 +347,10 @@ describe('Pipeline schedules app', () => {
ids: null,
projectPath: 'gitlab-org/gitlab',
status: null,
+ first: SCHEDULES_PER_PAGE,
+ last: null,
+ nextPageCursor: '',
+ prevPageCursor: '',
});
});
});
@@ -370,4 +388,57 @@ describe('Pipeline schedules app', () => {
});
});
});
+
+ describe('pagination', () => {
+ const { pageInfo } = mockPipelineSchedulesResponseWithPagination.data.project.pipelineSchedules;
+
+ beforeEach(async () => {
+ createComponent([[getPipelineSchedulesQuery, successHandlerWithPagination]]);
+
+ await waitForPromises();
+ });
+
+ it('displays pagination', () => {
+ expect(findPagination().exists()).toBe(true);
+ expect(findPagination().props()).toMatchObject({
+ value: 1,
+ prevPage: Number(pageInfo.hasPreviousPage),
+ nextPage: Number(pageInfo.hasNextPage),
+ });
+ expect(successHandlerWithPagination).toHaveBeenCalledWith({
+ projectPath: 'gitlab-org/gitlab',
+ ids: null,
+ first: SCHEDULES_PER_PAGE,
+ last: null,
+ nextPageCursor: '',
+ prevPageCursor: '',
+ });
+ });
+
+ it('updates query variables when going to next page', async () => {
+ await setPage(2);
+
+ expect(successHandlerWithPagination).toHaveBeenCalledWith({
+ projectPath: 'gitlab-org/gitlab',
+ ids: null,
+ first: SCHEDULES_PER_PAGE,
+ last: null,
+ prevPageCursor: '',
+ nextPageCursor: pageInfo.endCursor,
+ });
+ expect(findPagination().props('value')).toEqual(2);
+ });
+
+ it('when switching tabs pagination should reset', async () => {
+ await setPage(2);
+
+ expect(findPagination().props('value')).toEqual(2);
+
+ await findInactiveTab().trigger('click');
+
+ await waitForPromises();
+
+ expect(findPagination().props('value')).toEqual(1);
+ });
+ });
});
diff --git a/spec/frontend/ci/pipeline_schedules/mock_data.js b/spec/frontend/ci/pipeline_schedules/mock_data.js
index 711b120c61e..1bff296305d 100644
--- a/spec/frontend/ci/pipeline_schedules/mock_data.js
+++ b/spec/frontend/ci/pipeline_schedules/mock_data.js
@@ -48,6 +48,26 @@ export const mockSinglePipelineScheduleNodeNoVars = {
},
};
+export const mockPipelineSchedulesResponseWithPagination = {
+ data: {
+ currentUser: mockGetPipelineSchedulesGraphQLResponse.data.currentUser,
+ project: {
+ id: mockGetPipelineSchedulesGraphQLResponse.data.project.id,
+ pipelineSchedules: {
+ count: 3,
+ nodes: mockGetPipelineSchedulesGraphQLResponse.data.project.pipelineSchedules.nodes,
+ pageInfo: {
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'eyJpZCI6IjQ0In0',
+ endCursor: 'eyJpZCI6IjI4In0',
+ __typename: 'PageInfo',
+ },
+ },
+ },
+ },
+};
+
export const emptyPipelineSchedulesResponse = {
data: {
currentUser: {
@@ -59,6 +79,13 @@ export const emptyPipelineSchedulesResponse = {
pipelineSchedules: {
count: 0,
nodes: [],
+ pageInfo: {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: '',
+ endCursor: '',
+ __typename: 'PageInfo',
+ },
},
},
},
diff --git a/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js
index b5c9a3030e0..6b0d5b18f7d 100644
--- a/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js
@@ -15,6 +15,7 @@ describe('Pipeline label component', () => {
const findAutoDevopsTag = () => wrapper.findByTestId('pipeline-url-autodevops');
const findAutoDevopsTagLink = () => wrapper.findByTestId('pipeline-url-autodevops-link');
const findDetachedTag = () => wrapper.findByTestId('pipeline-url-detached');
+ const findMergedResultsTag = () => wrapper.findByTestId('pipeline-url-merged-results');
const findFailureTag = () => wrapper.findByTestId('pipeline-url-failure');
const findForkTag = () => wrapper.findByTestId('pipeline-url-fork');
const findTrainTag = () => wrapper.findByTestId('pipeline-url-train');
@@ -25,6 +26,7 @@ describe('Pipeline label component', () => {
wrapper = shallowMountExtended(PipelineLabelsComponent, {
propsData: { ...defaultProps, ...props },
provide: {
+ pipelineSchedulesPath: 'group/project/-/schedules',
targetProjectFullPath: projectPath,
},
});
@@ -41,6 +43,7 @@ describe('Pipeline label component', () => {
expect(findScheduledTag().exists()).toBe(false);
expect(findForkTag().exists()).toBe(false);
expect(findTrainTag().exists()).toBe(false);
+ expect(findMergedResultsTag().exists()).toBe(false);
});
it('should render the stuck tag when flag is provided', () => {
@@ -140,9 +143,33 @@ describe('Pipeline label component', () => {
expect(findForkTag().text()).toBe('fork');
});
+ it('should render the merged results badge when the pipeline is a merged results pipeline', () => {
+ const mergedResultsPipeline = defaultProps.pipeline;
+ mergedResultsPipeline.flags.merged_result_pipeline = true;
+
+ createComponent({
+ ...mergedResultsPipeline,
+ });
+
+ expect(findMergedResultsTag().text()).toBe('merged results');
+ });
+
+ it('should not render the merged results badge when the pipeline is not a merged results pipeline', () => {
+ const mergedResultsPipeline = defaultProps.pipeline;
+ mergedResultsPipeline.flags.merged_result_pipeline = false;
+
+ createComponent({
+ ...mergedResultsPipeline,
+ });
+
+ expect(findMergedResultsTag().exists()).toBe(false);
+ });
+
it('should render the train badge when the pipeline is a merge train pipeline', () => {
const mergeTrainPipeline = defaultProps.pipeline;
mergeTrainPipeline.flags.merge_train_pipeline = true;
+ // a merge train pipeline is also a merged results pipeline
+ mergeTrainPipeline.flags.merged_result_pipeline = true;
createComponent({
...mergeTrainPipeline,
@@ -161,4 +188,17 @@ describe('Pipeline label component', () => {
expect(findTrainTag().exists()).toBe(false);
});
+
+ it('should not render the merged results badge when the pipeline is a merge train pipeline', () => {
+ const mergeTrainPipeline = defaultProps.pipeline;
+ mergeTrainPipeline.flags.merge_train_pipeline = true;
+ // a merge train pipeline is also a merged results pipeline
+ mergeTrainPipeline.flags.merged_result_pipeline = true;
+
+ createComponent({
+ ...mergeTrainPipeline,
+ });
+
+ expect(findMergedResultsTag().exists()).toBe(false);
+ });
});
diff --git a/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js
index d2eab64b317..6205a37e291 100644
--- a/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js
@@ -1,10 +1,13 @@
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PipelinesManualActions from '~/ci/pipelines_page/components/pipelines_manual_actions.vue';
import PipelineMultiActions from '~/ci/pipelines_page/components/pipeline_multi_actions.vue';
import PipelineOperations from '~/ci/pipelines_page/components/pipeline_operations.vue';
-import eventHub from '~/ci/event_hub';
+import PipelineStopModal from '~/ci/pipelines_page/components/pipeline_stop_modal.vue';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
describe('Pipeline operations', () => {
+ let trackingSpy;
let wrapper;
const defaultProps = {
@@ -36,6 +39,7 @@ describe('Pipeline operations', () => {
const findMultiActions = () => wrapper.findComponent(PipelineMultiActions);
const findRetryBtn = () => wrapper.findByTestId('pipelines-retry-button');
const findCancelBtn = () => wrapper.findByTestId('pipelines-cancel-button');
+ const findPipelineStopModal = () => wrapper.findComponent(PipelineStopModal);
it('should display pipeline manual actions', () => {
createComponent();
@@ -49,28 +53,71 @@ describe('Pipeline operations', () => {
expect(findMultiActions().exists()).toBe(true);
});
+ it('does not show the confirmation modal', () => {
+ createComponent();
+
+ expect(findPipelineStopModal().props().showConfirmationModal).toBe(false);
+ });
+
+ describe('when cancelling a pipeline', () => {
+ beforeEach(async () => {
+ createComponent();
+ await findCancelBtn().vm.$emit('click');
+ });
+
+ it('should show a confirmation modal', () => {
+ expect(findPipelineStopModal().props().showConfirmationModal).toBe(true);
+ });
+
+ it('should emit cancel-pipeline event when confirming', async () => {
+ await findPipelineStopModal().vm.$emit('submit');
+
+ expect(wrapper.emitted('cancel-pipeline')).toEqual([[defaultProps.pipeline]]);
+ expect(findPipelineStopModal().props().showConfirmationModal).toBe(false);
+ });
+
+ it('should hide the modal when closing', async () => {
+ await findPipelineStopModal().vm.$emit('close-modal');
+
+ expect(findPipelineStopModal().props().showConfirmationModal).toBe(false);
+ });
+ });
+
describe('events', () => {
beforeEach(() => {
createComponent();
-
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
it('should emit retryPipeline event', () => {
findRetryBtn().vm.$emit('click');
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'retryPipeline',
- defaultProps.pipeline.retry_path,
- );
+ expect(wrapper.emitted('retry-pipeline')).toEqual([[defaultProps.pipeline]]);
+ });
+ });
+
+ describe('tracking', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks retry pipeline button click', () => {
+ findRetryBtn().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_retry_button', {
+ label: TRACKING_CATEGORIES.table,
+ });
});
- it('should emit openConfirmationModal event', () => {
+ it('tracks cancel pipeline button click', () => {
findCancelBtn().vm.$emit('click');
- expect(eventHub.$emit).toHaveBeenCalledWith('openConfirmationModal', {
- pipeline: defaultProps.pipeline,
- endpoint: defaultProps.pipeline.cancel_path,
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_cancel_button', {
+ label: TRACKING_CATEGORIES.table,
});
});
});
diff --git a/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js
index 4d78a923542..1e276840c07 100644
--- a/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js
@@ -1,15 +1,17 @@
import { shallowMount } from '@vue/test-utils';
-import { GlSprintf } from '@gitlab/ui';
+import { GlModal, GlSprintf } from '@gitlab/ui';
import { mockPipelineHeader } from 'jest/ci/pipeline_details/mock_data';
import PipelineStopModal from '~/ci/pipelines_page/components/pipeline_stop_modal.vue';
describe('PipelineStopModal', () => {
let wrapper;
- const createComponent = () => {
+ const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMount(PipelineStopModal, {
propsData: {
pipeline: mockPipelineHeader,
+ showConfirmationModal: false,
+ ...props,
},
stubs: {
GlSprintf,
@@ -17,11 +19,43 @@ describe('PipelineStopModal', () => {
});
};
+ const findModal = () => wrapper.findComponent(GlModal);
+
beforeEach(() => {
createComponent();
});
- it('should render "stop pipeline" warning', () => {
- expect(wrapper.text()).toMatch(`You’re about to stop pipeline #${mockPipelineHeader.id}.`);
+ describe('when `showConfirmationModal` is false', () => {
+ it('passes the visiblity value to the modal', () => {
+ expect(findModal().props().visible).toBe(false);
+ });
+ });
+
+ describe('when `showConfirmationModal` is true', () => {
+ beforeEach(() => {
+ createComponent({ props: { showConfirmationModal: true } });
+ });
+
+ it('passes the visiblity value to the modal', () => {
+ expect(findModal().props().visible).toBe(true);
+ });
+
+ it('renders "stop pipeline" warning', () => {
+ expect(wrapper.text()).toMatch(`You're about to stop pipeline #${mockPipelineHeader.id}.`);
+ });
+ });
+
+ describe('events', () => {
+ beforeEach(() => {
+ createComponent({ props: { showConfirmationModal: true } });
+ });
+
+ it('emits the close-modal event when the visiblity changes', async () => {
+ expect(wrapper.emitted('close-modal')).toBeUndefined();
+
+ await findModal().vm.$emit('change', false);
+
+ expect(wrapper.emitted('close-modal')).toEqual([[]]);
+ });
});
});
diff --git a/spec/frontend/ci/pipelines_page/pipelines_spec.js b/spec/frontend/ci/pipelines_page/pipelines_spec.js
index 5d1f431e57c..fd95f98e7f8 100644
--- a/spec/frontend/ci/pipelines_page/pipelines_spec.js
+++ b/spec/frontend/ci/pipelines_page/pipelines_spec.js
@@ -28,7 +28,7 @@ import NavigationControls from '~/ci/pipelines_page/components/nav_controls.vue'
import PipelinesComponent from '~/ci/pipelines_page/pipelines.vue';
import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
import PipelinesTableComponent from '~/ci/common/pipelines_table.vue';
-import { RAW_TEXT_WARNING, TRACKING_CATEGORIES } from '~/ci/constants';
+import { PIPELINE_IID_KEY, RAW_TEXT_WARNING, TRACKING_CATEGORIES } from '~/ci/constants';
import Store from '~/ci/pipeline_details/stores/pipelines_store';
import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
@@ -57,28 +57,23 @@ describe('Pipelines', () => {
let mockApollo;
let mock;
let trackingSpy;
+ let mutationMock;
- const paths = {
- emptyStateSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
- errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg',
- noPipelinesSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
+ const withPermissionsProps = {
ciLintPath: '/ci/lint',
resetCachePath: `${mockProjectPath}/settings/ci_cd/reset_cache`,
newPipelinePath: `${mockProjectPath}/pipelines/new`,
-
ciRunnerSettingsPath: `${mockProjectPath}/-/settings/ci_cd#js-runners-settings`,
- };
-
- const noPermissions = {
- emptyStateSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
- errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg',
- noPipelinesSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
+ canCreatePipeline: true,
};
const defaultProps = {
hasGitlabCi: true,
- canCreatePipeline: true,
- ...paths,
+ canCreatePipeline: false,
+ projectId: mockProjectId,
+ defaultBranchName: mockDefaultBranchName,
+ endpoint: mockPipelinesEndpoint,
+ params: {},
};
const findFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
@@ -87,10 +82,9 @@ describe('Pipelines', () => {
const findNavigationControls = () => wrapper.findComponent(NavigationControls);
const findPipelinesTable = () => wrapper.findComponent(PipelinesTableComponent);
const findTablePagination = () => wrapper.findComponent(TablePagination);
- const findPipelineKeyCollapsibleBoxVue = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findPipelineKeyCollapsibleBox = () => wrapper.findComponent(GlCollapsibleListbox);
const findTab = (tab) => wrapper.findByTestId(`pipelines-tab-${tab}`);
- const findPipelineKeyCollapsibleBox = () => wrapper.findByTestId('pipeline-key-collapsible-box');
const findRunPipelineButton = () => wrapper.findByTestId('run-pipeline-button');
const findCiLintButton = () => wrapper.findByTestId('ci-lint-button');
const findCleanCacheButton = () => wrapper.findByTestId('clear-cache-button');
@@ -98,25 +92,23 @@ describe('Pipelines', () => {
wrapper.find('[data-testid="mini-pipeline-graph-dropdown"] .dropdown-toggle');
const findPipelineUrlLinks = () => wrapper.findAll('[data-testid="pipeline-url-link"]');
- const createComponent = (props = defaultProps) => {
- const { mutationMock, ...restProps } = props;
+ const createComponent = ({ props = {}, withPermissions = true } = {}) => {
mockApollo = createMockApollo([[setSortPreferenceMutation, mutationMock]]);
+ const permissionsProps = withPermissions ? { ...withPermissionsProps } : {};
wrapper = extendedWrapper(
mount(PipelinesComponent, {
provide: {
pipelineEditorPath: '',
suggestedCiTemplates: [],
- ciRunnerSettingsPath: paths.ciRunnerSettingsPath,
+ ciRunnerSettingsPath: defaultProps.ciRunnerSettingsPath,
anyRunnersAvailable: true,
},
propsData: {
+ ...defaultProps,
+ ...permissionsProps,
+ ...props,
store: new Store(),
- projectId: mockProjectId,
- defaultBranchName: mockDefaultBranchName,
- endpoint: mockPipelinesEndpoint,
- params: {},
- ...restProps,
},
apolloProvider: mockApollo,
}),
@@ -124,12 +116,11 @@ describe('Pipelines', () => {
};
beforeEach(() => {
- setWindowLocation(TEST_HOST);
- });
-
- beforeEach(() => {
mock = new MockAdapter(axios);
+ setWindowLocation(TEST_HOST);
+ mutationMock = jest.fn();
+
jest.spyOn(window.history, 'pushState');
jest.spyOn(Api, 'projectUsers').mockResolvedValue(users);
jest.spyOn(Api, 'branches').mockResolvedValue({ data: branches });
@@ -169,7 +160,9 @@ describe('Pipelines', () => {
describe('when user has no permissions', () => {
beforeEach(async () => {
- createComponent({ hasGitlabCi: true, canCreatePipeline: false, ...noPermissions });
+ createComponent({
+ withPermissions: false,
+ });
await waitForPromises();
});
@@ -225,11 +218,13 @@ describe('Pipelines', () => {
});
it('renders Run pipeline link', () => {
- expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(
+ withPermissionsProps.newPipelinePath,
+ );
});
it('renders CI lint link', () => {
- expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(withPermissionsProps.ciLintPath);
});
it('renders Clear runner cache button', () => {
@@ -382,7 +377,7 @@ describe('Pipelines', () => {
it('should change the text to Show Pipeline IID', async () => {
expect(findPipelineKeyCollapsibleBox().exists()).toBe(true);
expect(findPipelineUrlLinks().at(0).text()).toBe(`#${mockFilteredPipeline.id}`);
- findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+ findPipelineKeyCollapsibleBox().vm.$emit('select', PIPELINE_IID_KEY);
await waitForPromises();
@@ -390,21 +385,21 @@ describe('Pipelines', () => {
});
it('calls mutation to save idType preference', () => {
- const mutationMock = jest.fn().mockResolvedValue(setIdTypePreferenceMutationResponse);
- createComponent({ ...defaultProps, mutationMock });
+ mutationMock = jest.fn().mockResolvedValue(setIdTypePreferenceMutationResponse);
+ createComponent();
- findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+ findPipelineKeyCollapsibleBox().vm.$emit('select', PIPELINE_IID_KEY);
- expect(mutationMock).toHaveBeenCalledWith({ input: { visibilityPipelineIdType: 'IID' } });
+ expect(mutationMock).toHaveBeenCalledWith({
+ input: { visibilityPipelineIdType: PIPELINE_IID_KEY.toUpperCase() },
+ });
});
it('captures error when mutation response has errors', async () => {
- const mutationMock = jest
- .fn()
- .mockResolvedValue(setIdTypePreferenceMutationResponseWithErrors);
- createComponent({ ...defaultProps, mutationMock });
+ mutationMock = jest.fn().mockResolvedValue(setIdTypePreferenceMutationResponseWithErrors);
+ createComponent();
- findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+ findPipelineKeyCollapsibleBox().vm.$emit('select', PIPELINE_IID_KEY);
await waitForPromises();
expect(Sentry.captureException).toHaveBeenCalledWith(new Error('oh no!'));
@@ -610,11 +605,13 @@ describe('Pipelines', () => {
});
it('renders Run pipeline link', () => {
- expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(
+ withPermissionsProps.newPipelinePath,
+ );
});
it('renders CI lint link', () => {
- expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(withPermissionsProps.ciLintPath);
});
it('renders Clear runner cache button', () => {
@@ -651,7 +648,7 @@ describe('Pipelines', () => {
describe('when CI is not enabled and user has permissions', () => {
beforeEach(async () => {
- createComponent({ hasGitlabCi: false, canCreatePipeline: true, ...paths });
+ createComponent({ props: { hasGitlabCi: false } });
await waitForPromises();
});
@@ -678,7 +675,7 @@ describe('Pipelines', () => {
describe('when CI is not enabled and user has no permissions', () => {
beforeEach(async () => {
- createComponent({ hasGitlabCi: false, canCreatePipeline: false, ...noPermissions });
+ createComponent({ props: { hasGitlabCi: false }, withPermissions: false });
await waitForPromises();
});
@@ -700,7 +697,7 @@ describe('Pipelines', () => {
describe('when CI is enabled and user has no permissions', () => {
beforeEach(() => {
- createComponent({ hasGitlabCi: true, canCreatePipeline: false, ...noPermissions });
+ createComponent({ props: { hasGitlabCi: true }, withPermissions: false });
return waitForPromises();
});
@@ -798,8 +795,10 @@ describe('Pipelines', () => {
describe('when user has no permissions', () => {
beforeEach(async () => {
- createComponent({ hasGitlabCi: false, canCreatePipeline: true, ...noPermissions });
-
+ createComponent({
+ props: { hasGitlabCi: false },
+ withPermissions: false,
+ });
await waitForPromises();
});
@@ -834,9 +833,11 @@ describe('Pipelines', () => {
});
it('renders buttons', () => {
- expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(
+ withPermissionsProps.newPipelinePath,
+ );
- expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(withPermissionsProps.ciLintPath);
expect(findCleanCacheButton().text()).toBe('Clear runner caches');
});
diff --git a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
index c9349c64bfb..4a75c353487 100644
--- a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -103,11 +103,6 @@ describe('AdminRunnerShowApp', () => {
it('shows basic runner details', () => {
const expected = `Description My Runner
Last contact Never contacted
- Version 1.0.0
- IP Address None
- Executor None
- Architecture None
- Platform darwin
Configuration Runs untagged jobs
Maximum job timeout None
Token expiry
diff --git a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
index 1bbcb991619..bc28147db27 100644
--- a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
@@ -156,9 +156,7 @@ describe('AdminRunnersApp', () => {
await createComponent({ mountFn: mountExtended });
});
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/414975
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('fetches counts', () => {
+ it('fetches counts', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
});
diff --git a/spec/frontend/ci/runner/components/runner_details_spec.js b/spec/frontend/ci/runner/components/runner_details_spec.js
index cc91340655b..9d5f89a2642 100644
--- a/spec/frontend/ci/runner/components/runner_details_spec.js
+++ b/spec/frontend/ci/runner/components/runner_details_spec.js
@@ -49,13 +49,6 @@ describe('RunnerDetails', () => {
${'Description'} | ${{ description: null }} | ${'None'}
${'Last contact'} | ${{ contactedAt: mockOneHourAgo }} | ${'1 hour ago'}
${'Last contact'} | ${{ contactedAt: null }} | ${'Never contacted'}
- ${'Version'} | ${{ version: '12.3' }} | ${'12.3'}
- ${'Version'} | ${{ version: null }} | ${'None'}
- ${'Executor'} | ${{ executorName: 'shell' }} | ${'shell'}
- ${'Architecture'} | ${{ architectureName: 'amd64' }} | ${'amd64'}
- ${'Platform'} | ${{ platformName: 'darwin' }} | ${'darwin'}
- ${'IP Address'} | ${{ ipAddress: '127.0.0.1' }} | ${'127.0.0.1'}
- ${'IP Address'} | ${{ ipAddress: null }} | ${'None'}
${'Configuration'} | ${{ accessLevel: ACCESS_LEVEL_REF_PROTECTED, runUntagged: true }} | ${'Protected, Runs untagged jobs'}
${'Configuration'} | ${{ accessLevel: ACCESS_LEVEL_REF_PROTECTED, runUntagged: false }} | ${'Protected'}
${'Configuration'} | ${{ accessLevel: ACCESS_LEVEL_NOT_PROTECTED, runUntagged: true }} | ${'Runs untagged jobs'}
diff --git a/spec/frontend/ci/runner/components/runner_details_tabs_spec.js b/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
index 689d0575726..516209794ad 100644
--- a/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
+++ b/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
@@ -54,7 +54,7 @@ describe('RunnerDetailsTabs', () => {
...options,
});
- routerPush = jest.spyOn(wrapper.vm.$router, 'push').mockImplementation(() => {});
+ routerPush = jest.spyOn(wrapper.vm.$router, 'push');
return waitForPromises();
};
@@ -67,9 +67,8 @@ describe('RunnerDetailsTabs', () => {
});
it('shows runner jobs', async () => {
- setWindowLocation(`#${JOBS_ROUTE_PATH}`);
-
- await createComponent({ mountFn: mountExtended });
+ createComponent({ mountFn: mountExtended });
+ await wrapper.vm.$router.push({ path: JOBS_ROUTE_PATH });
expect(findRunnerDetails().exists()).toBe(false);
expect(findRunnerJobs().props('runner')).toBe(mockRunner);
@@ -101,10 +100,9 @@ describe('RunnerDetailsTabs', () => {
}
});
- it.each(['#/', '#/unknown-tab'])('shows details when location hash is `%s`', async (hash) => {
- setWindowLocation(hash);
-
- await createComponent({ mountFn: mountExtended });
+ it.each(['#/', '#/unknown-tab'])('shows details when location hash is `%s`', async (path) => {
+ createComponent({ mountFn: mountExtended });
+ await wrapper.vm.$router.push({ path });
expect(findTabs().props('value')).toBe(0);
expect(findRunnerDetails().exists()).toBe(true);
diff --git a/spec/frontend/ci/runner/components/runner_list_spec.js b/spec/frontend/ci/runner/components/runner_list_spec.js
index 9da640afeb7..7c00aa48d31 100644
--- a/spec/frontend/ci/runner/components/runner_list_spec.js
+++ b/spec/frontend/ci/runner/components/runner_list_spec.js
@@ -1,14 +1,11 @@
import { GlTableLite, GlSkeletonLoader } from '@gitlab/ui';
import HelpPopover from '~/vue_shared/components/help_popover.vue';
-import {
- extendedWrapper,
- shallowMountExtended,
- mountExtended,
-} from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { createLocalState } from '~/ci/runner/graphql/list/local_state';
+import { stubComponent } from 'helpers/stub_component';
import RunnerList from '~/ci/runner/components/runner_list.vue';
import RunnerBulkDelete from '~/ci/runner/components/runner_bulk_delete.vue';
@@ -29,14 +26,11 @@ describe('RunnerList', () => {
const findHeaders = () => wrapper.findAll('th');
const findRows = () => wrapper.findAll('[data-testid^="runner-row-"]');
const findCell = ({ row = 0, fieldKey }) =>
- extendedWrapper(findRows().at(row).find(`[data-testid="td-${fieldKey}"]`));
+ findRows().at(row).find(`[data-testid="td-${fieldKey}"]`);
const findRunnerBulkDelete = () => wrapper.findComponent(RunnerBulkDelete);
const findRunnerBulkDeleteCheckbox = () => wrapper.findComponent(RunnerBulkDeleteCheckbox);
- const createComponent = (
- { props = {}, provide = {}, ...options } = {},
- mountFn = shallowMountExtended,
- ) => {
+ const createComponent = ({ props = {}, ...options } = {}, mountFn = shallowMountExtended) => {
({ cacheConfig, localMutations } = createLocalState());
wrapper = mountFn(RunnerList, {
@@ -49,7 +43,6 @@ describe('RunnerList', () => {
localMutations,
onlineContactTimeoutSecs,
staleTimeoutSecs,
- ...provide,
},
...options,
});
@@ -81,7 +74,11 @@ describe('RunnerList', () => {
});
it('Sets runner id as a row key', () => {
- createComponent();
+ createComponent({
+ stubs: {
+ GlTableLite: stubComponent(GlTableLite),
+ },
+ });
expect(findTable().attributes('primary-key')).toBe('id');
});
@@ -220,7 +217,12 @@ describe('RunnerList', () => {
describe('When data is loading', () => {
it('shows a busy state', () => {
- createComponent({ props: { runners: [], loading: true } });
+ createComponent({
+ props: { runners: [], loading: true },
+ stubs: {
+ GlTableLite: stubComponent(GlTableLite),
+ },
+ });
expect(findTable().classes('gl-opacity-6')).toBe(true);
});
diff --git a/spec/frontend/ci/runner/components/runner_type_icon_spec.js b/spec/frontend/ci/runner/components/runner_type_icon_spec.js
new file mode 100644
index 00000000000..01f3de10aa6
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_type_icon_spec.js
@@ -0,0 +1,67 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerTypeIcon from '~/ci/runner/components/runner_type_icon.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { assertProps } from 'helpers/assert_props';
+import {
+ INSTANCE_TYPE,
+ GROUP_TYPE,
+ PROJECT_TYPE,
+ I18N_INSTANCE_TYPE,
+ I18N_GROUP_TYPE,
+ I18N_PROJECT_TYPE,
+} from '~/ci/runner/constants';
+
+describe('RunnerTypeIcon', () => {
+ let wrapper;
+
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const getTooltip = () => getBinding(findIcon().element, 'gl-tooltip');
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerTypeIcon, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ describe.each`
+ type | tooltipText
+ ${INSTANCE_TYPE} | ${I18N_INSTANCE_TYPE}
+ ${GROUP_TYPE} | ${I18N_GROUP_TYPE}
+ ${PROJECT_TYPE} | ${I18N_PROJECT_TYPE}
+ `('displays $type runner', ({ type, tooltipText }) => {
+ beforeEach(() => {
+ createComponent({ props: { type } });
+ });
+
+ it(`with no text`, () => {
+ expect(findIcon().text()).toBe('');
+ });
+
+ it(`with aria-label`, () => {
+ expect(findIcon().props('ariaLabel')).toBeDefined();
+ });
+
+ it('with a tooltip', () => {
+ expect(getTooltip().value).toBeDefined();
+ expect(getTooltip().value).toContain(tooltipText);
+ });
+ });
+
+ it('validation fails for an incorrect type', () => {
+ expect(() => {
+ assertProps(RunnerTypeIcon, { type: 'AN_UNKNOWN_VALUE' });
+ }).toThrow();
+ });
+
+ it('does not render content when type is missing', () => {
+ createComponent({ props: { type: undefined } });
+
+ expect(findIcon().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
index 7438c47e32c..8258bd1d507 100644
--- a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
+++ b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
@@ -108,11 +108,6 @@ describe('GroupRunnerShowApp', () => {
it('shows basic runner details', () => {
const expected = `Description My Runner
Last contact Never contacted
- Version 1.0.0
- IP Address None
- Executor None
- Architecture None
- Platform darwin
Configuration Runs untagged jobs
Maximum job timeout None
Token expiry
diff --git a/spec/frontend/ci/runner/sentry_utils_spec.js b/spec/frontend/ci/runner/sentry_utils_spec.js
index 2f17cc43ac5..59d386a5899 100644
--- a/spec/frontend/ci/runner/sentry_utils_spec.js
+++ b/spec/frontend/ci/runner/sentry_utils_spec.js
@@ -4,24 +4,12 @@ import { captureException } from '~/ci/runner/sentry_utils';
jest.mock('@sentry/browser');
describe('~/ci/runner/sentry_utils', () => {
- let mockSetTag;
-
- beforeEach(() => {
- mockSetTag = jest.fn();
-
- Sentry.withScope.mockImplementation((fn) => {
- const scope = { setTag: mockSetTag };
- fn(scope);
- });
- });
-
describe('captureException', () => {
const mockError = new Error('Something went wrong!');
it('error is reported to sentry', () => {
captureException({ error: mockError });
- expect(Sentry.withScope).toHaveBeenCalled();
expect(Sentry.captureException).toHaveBeenCalledWith(mockError);
});
@@ -30,10 +18,11 @@ describe('~/ci/runner/sentry_utils', () => {
captureException({ error: mockError, component: mockComponentName });
- expect(Sentry.withScope).toHaveBeenCalled();
- expect(Sentry.captureException).toHaveBeenCalledWith(mockError);
-
- expect(mockSetTag).toHaveBeenCalledWith('vue_component', mockComponentName);
+ expect(Sentry.captureException).toHaveBeenCalledWith(mockError, {
+ tags: {
+ vue_component: mockComponentName,
+ },
+ });
});
});
});
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index 207bfddcb4f..d4474b1c643 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -8,8 +8,15 @@ import ClustersEmptyState from '~/clusters_list/components/clusters_empty_state.
import ClusterStore from '~/clusters_list/store';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import {
+ SET_LOADING_NODES,
+ SET_CLUSTERS_DATA,
+ SET_LOADING_CLUSTERS,
+} from '~/clusters_list/store/mutation_types';
import { apiData } from '../mock_data';
+jest.mock('@sentry/browser');
+
describe('Clusters', () => {
let mock;
let store;
@@ -59,15 +66,7 @@ describe('Clusters', () => {
};
};
- let captureException;
-
beforeEach(() => {
- jest.spyOn(Sentry, 'withScope').mockImplementation((fn) => {
- const mockScope = { setTag: () => {} };
- fn(mockScope);
- });
- captureException = jest.spyOn(Sentry, 'captureException');
-
mock = new MockAdapter(axios);
mockPollingApi(HTTP_STATUS_OK, apiData, paginationHeader());
@@ -76,13 +75,12 @@ describe('Clusters', () => {
afterEach(() => {
mock.restore();
- captureException.mockRestore();
});
describe('clusters table', () => {
describe('when data is loading', () => {
beforeEach(() => {
- wrapper.vm.$store.state.loadingClusters = true;
+ store.commit(SET_LOADING_CLUSTERS, true);
});
it('displays a loader instead of the table while loading', () => {
@@ -99,7 +97,12 @@ describe('Clusters', () => {
describe('when there are no clusters', () => {
beforeEach(() => {
- wrapper.vm.$store.state.totalClusters = 0;
+ store.commit(SET_CLUSTERS_DATA, {
+ data: {},
+ paginationInformation: {
+ total: 0,
+ },
+ });
});
it('should render empty state', () => {
expect(findEmptyState().exists()).toBe(true);
@@ -175,7 +178,7 @@ describe('Clusters', () => {
describe('nodes finish loading', () => {
beforeEach(async () => {
- wrapper.vm.$store.state.loadingNodes = false;
+ store.commit(SET_LOADING_NODES, false);
await nextTick();
});
@@ -198,19 +201,23 @@ describe('Clusters', () => {
describe('nodes with unknown quantity', () => {
it('notifies Sentry about all missing quantity types', () => {
- expect(captureException).toHaveBeenCalledTimes(8);
+ expect(Sentry.captureException).toHaveBeenCalledTimes(8);
});
it('notifies Sentry about CPU missing quantity types', () => {
const missingCpuTypeError = new Error('UnknownK8sCpuQuantity:1missingCpuUnit');
- expect(captureException).toHaveBeenCalledWith(missingCpuTypeError);
+ expect(Sentry.captureException).toHaveBeenCalledWith(missingCpuTypeError, {
+ tags: { javascript_clusters_list: 'totalCpuAndUsageError' },
+ });
});
it('notifies Sentry about Memory missing quantity types', () => {
const missingMemoryTypeError = new Error('UnknownK8sMemoryQuantity:1missingMemoryUnit');
- expect(captureException).toHaveBeenCalledWith(missingMemoryTypeError);
+ expect(Sentry.captureException).toHaveBeenCalledWith(missingMemoryTypeError, {
+ tags: { javascript_clusters_list: 'totalMemoryAndUsageError' },
+ });
});
});
});
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index 6d23db0517d..9e6da595a75 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -18,10 +18,6 @@ describe('Clusters store actions', () => {
describe('reportSentryError', () => {
beforeEach(() => {
- jest.spyOn(Sentry, 'withScope').mockImplementation((fn) => {
- const mockScope = { setTag: () => {} };
- fn(mockScope);
- });
captureException = jest.spyOn(Sentry, 'captureException');
});
@@ -34,7 +30,11 @@ describe('Clusters store actions', () => {
const tag = 'sentryErrorTag';
await testAction(actions.reportSentryError, { error: sentryError, tag }, {}, [], []);
- expect(captureException).toHaveBeenCalledWith(sentryError);
+ expect(captureException).toHaveBeenCalledWith(sentryError, {
+ tags: {
+ javascript_clusters_list: tag,
+ },
+ });
});
});
diff --git a/spec/frontend/commit/commit_pipeline_status_spec.js b/spec/frontend/commit/commit_pipeline_status_spec.js
index 73031724b12..08a7ec17785 100644
--- a/spec/frontend/commit/commit_pipeline_status_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_spec.js
@@ -137,7 +137,7 @@ describe('Commit pipeline status component', () => {
});
it('renders CI icon with the correct title and status', () => {
- expect(findCiIcon().attributes('title')).toEqual('Pipeline: passed');
+ expect(findCiIcon().attributes('title')).toEqual('Pipeline: Passed');
expect(findCiIcon().props('status')).toEqual(mockCiStatus);
});
});
diff --git a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
index 80b75a0a65e..844a2d81832 100644
--- a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
+++ b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
@@ -1,11 +1,11 @@
-import { GlLoadingIcon, GlLink } from '@gitlab/ui';
+import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import CommitBoxPipelineStatus from '~/projects/commit_box/info/components/commit_box_pipeline_status.vue';
import {
COMMIT_BOX_POLL_INTERVAL,
@@ -32,8 +32,7 @@ describe('Commit box pipeline status', () => {
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findStatusIcon = () => wrapper.findComponent(CiIcon);
- const findPipelineLink = () => wrapper.findComponent(GlLink);
+ const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
const advanceToNextFetch = () => {
jest.advanceTimersByTime(COMMIT_BOX_POLL_INTERVAL);
@@ -50,6 +49,9 @@ describe('Commit box pipeline status', () => {
provide: {
...mockProvide,
},
+ stubs: {
+ CiBadgeLink,
+ },
apolloProvider: createMockApolloProvider(handler),
});
};
@@ -59,7 +61,7 @@ describe('Commit box pipeline status', () => {
createComponent();
expect(findLoadingIcon().exists()).toBe(true);
- expect(findStatusIcon().exists()).toBe(false);
+ expect(findCiBadgeLink().exists()).toBe(false);
});
});
@@ -71,7 +73,7 @@ describe('Commit box pipeline status', () => {
});
it('should display pipeline status after the query is resolved successfully', () => {
- expect(findStatusIcon().exists()).toBe(true);
+ expect(findCiBadgeLink().exists()).toBe(true);
expect(findLoadingIcon().exists()).toBe(false);
expect(createAlert).toHaveBeenCalledTimes(0);
@@ -88,7 +90,7 @@ describe('Commit box pipeline status', () => {
},
} = mockPipelineStatusResponse;
- expect(findPipelineLink().attributes('href')).toBe(detailsPath);
+ expect(findCiBadgeLink().attributes('href')).toBe(detailsPath);
});
});
diff --git a/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js b/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js
index 4af292e3588..d58b139dae3 100644
--- a/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js
+++ b/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js
@@ -1,13 +1,13 @@
import { GlLoadingIcon, GlModal, GlTableLite } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import fixture from 'test_fixtures/pipelines/pipelines.json';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
-import LegacyPipelinesTableWraper from '~/commit/pipelines/legacy_pipelines_table_wrapper.vue';
+import LegacyPipelinesTableWrapper from '~/commit/pipelines/legacy_pipelines_table_wrapper.vue';
+import PipelinesTable from '~/ci/common/pipelines_table.vue';
import {
HTTP_STATUS_BAD_REQUEST,
HTTP_STATUS_INTERNAL_SERVER_ERROR,
@@ -39,27 +39,26 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findModal = () => wrapper.findComponent(GlModal);
const findMrPipelinesDocsLink = () => wrapper.findByTestId('mr-pipelines-docs-link');
-
- const createComponent = ({ props = {} } = {}) => {
- wrapper = extendedWrapper(
- mount(LegacyPipelinesTableWraper, {
- propsData: {
- endpoint: 'endpoint.json',
- emptyStateSvgPath: 'foo',
- errorStateSvgPath: 'foo',
- ...props,
- },
- mocks: {
- $toast,
- },
- stubs: {
- GlModal: stubComponent(GlModal, {
- template: '<div />',
- methods: { show: showMock },
- }),
- },
- }),
- );
+ const findPipelinesTable = () => wrapper.findComponent(PipelinesTable);
+
+ const createComponent = ({ props = {}, mountFn = mountExtended } = {}) => {
+ wrapper = mountFn(LegacyPipelinesTableWrapper, {
+ propsData: {
+ endpoint: 'endpoint.json',
+ emptyStateSvgPath: 'foo',
+ errorStateSvgPath: 'foo',
+ ...props,
+ },
+ mocks: {
+ $toast,
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ template: '<div />',
+ methods: { show: showMock },
+ }),
+ },
+ });
};
beforeEach(() => {
@@ -116,7 +115,6 @@ describe('Pipelines table in Commits and Merge requests', () => {
it('should make an API request when using pagination', async () => {
expect(mock.history.get).toHaveLength(1);
- expect(mock.history.get[0].params.page).toBe('1');
wrapper.find('.next-page-item').trigger('click');
@@ -359,4 +357,53 @@ describe('Pipelines table in Commits and Merge requests', () => {
);
});
});
+
+ describe('events', () => {
+ beforeEach(async () => {
+ mock.onGet('endpoint.json').reply(HTTP_STATUS_OK, [pipeline]);
+
+ createComponent({ mountFn: shallowMountExtended });
+
+ await waitForPromises();
+ });
+
+ describe('When cancelling a pipeline', () => {
+ it('sends the cancel action', async () => {
+ expect(mock.history.post).toHaveLength(0);
+
+ findPipelinesTable().vm.$emit('cancel-pipeline', pipeline);
+
+ await waitForPromises();
+
+ expect(mock.history.post).toHaveLength(1);
+ expect(mock.history.post[0].url).toContain('cancel.json');
+ });
+ });
+
+ describe('When retrying a pipeline', () => {
+ it('sends the retry action', async () => {
+ expect(mock.history.post).toHaveLength(0);
+
+ findPipelinesTable().vm.$emit('retry-pipeline', pipeline);
+
+ await waitForPromises();
+
+ expect(mock.history.post).toHaveLength(1);
+ expect(mock.history.post[0].url).toContain('retry.json');
+ });
+ });
+
+ describe('When refreshing a pipeline', () => {
+ it('calls the pipelines endpoint again', async () => {
+ expect(mock.history.get).toHaveLength(1);
+
+ findPipelinesTable().vm.$emit('refresh-pipelines-table');
+
+ await waitForPromises();
+
+ expect(mock.history.get).toHaveLength(2);
+ expect(mock.history.get[1].url).toContain('endpoint.json');
+ });
+ });
+ });
});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 3eb00f69345..548c6030ed7 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -206,6 +206,14 @@ describe('markdownSerializer', () => {
);
});
+ it('correctly serializes a malformed URL-encoded link', () => {
+ expect(
+ serialize(
+ paragraph(link({ href: 'https://example.com/%E0%A4%A' }, 'https://example.com/%E0%A4%A')),
+ ),
+ ).toBe('https://example.com/%E0%A4%A');
+ });
+
it('correctly serializes a link with a title', () => {
expect(
serialize(
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index f915b834aff..7d863a8eb78 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -8,6 +8,7 @@ import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import RefSelector from '~/ref/components/ref_selector.vue';
import { REF_TYPE_BRANCHES, REF_TYPE_TAGS } from '~/ref/constants';
+import { SET_CHART_DATA, SET_LOADING_STATE } from '~/contributors/stores/mutation_types';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@@ -66,14 +67,14 @@ describe('Contributors charts', () => {
});
it('should display loader whiled loading data', async () => {
- wrapper.vm.$store.state.loading = true;
+ store.commit(SET_LOADING_STATE, true);
await nextTick();
expect(findLoadingIcon().exists()).toBe(true);
});
it('should render charts and a RefSelector when loading completed and there is chart data', async () => {
- wrapper.vm.$store.state.loading = false;
- wrapper.vm.$store.state.chartData = chartData;
+ store.commit(SET_LOADING_STATE, false);
+ store.commit(SET_CHART_DATA, chartData);
await nextTick();
expect(findLoadingIcon().exists()).toBe(false);
@@ -92,8 +93,8 @@ describe('Contributors charts', () => {
});
it('should have a history button with a set href attribute', async () => {
- wrapper.vm.$store.state.loading = false;
- wrapper.vm.$store.state.chartData = chartData;
+ store.commit(SET_LOADING_STATE, false);
+ store.commit(SET_CHART_DATA, chartData);
await nextTick();
const historyButton = findHistoryButton();
@@ -102,8 +103,8 @@ describe('Contributors charts', () => {
});
it('visits a URL when clicking on a branch/tag', async () => {
- wrapper.vm.$store.state.loading = false;
- wrapper.vm.$store.state.chartData = chartData;
+ store.commit(SET_LOADING_STATE, false);
+ store.commit(SET_CHART_DATA, chartData);
await nextTick();
findRefSelector().vm.$emit('input', branch);
diff --git a/spec/frontend/crm/crm_form_spec.js b/spec/frontend/crm/crm_form_spec.js
index fabf43ceb9d..083b49b7c30 100644
--- a/spec/frontend/crm/crm_form_spec.js
+++ b/spec/frontend/crm/crm_form_spec.js
@@ -10,7 +10,7 @@ import routes from '~/crm/contacts/routes';
import createContactMutation from '~/crm/contacts/components/graphql/create_contact.mutation.graphql';
import updateContactMutation from '~/crm/contacts/components/graphql/update_contact.mutation.graphql';
import getGroupContactsQuery from '~/crm/contacts/components/graphql/get_group_contacts.query.graphql';
-import createOrganizationMutation from '~/crm/organizations/components/graphql/create_organization.mutation.graphql';
+import createOrganizationMutation from '~/crm/organizations/components/graphql/create_customer_relations_organization.mutation.graphql';
import getGroupOrganizationsQuery from '~/crm/organizations/components/graphql/get_group_organizations.query.graphql';
import {
createContactMutationErrorResponse,
diff --git a/spec/frontend/crm/organization_form_wrapper_spec.js b/spec/frontend/crm/organization_form_wrapper_spec.js
index 8408c1920a9..f15fcac71d5 100644
--- a/spec/frontend/crm/organization_form_wrapper_spec.js
+++ b/spec/frontend/crm/organization_form_wrapper_spec.js
@@ -2,7 +2,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import OrganizationFormWrapper from '~/crm/organizations/components/organization_form_wrapper.vue';
import CrmForm from '~/crm/components/crm_form.vue';
import getGroupOrganizationsQuery from '~/crm/organizations/components/graphql/get_group_organizations.query.graphql';
-import createOrganizationMutation from '~/crm/organizations/components/graphql/create_organization.mutation.graphql';
+import createOrganizationMutation from '~/crm/organizations/components/graphql/create_customer_relations_organization.mutation.graphql';
import updateOrganizationMutation from '~/crm/organizations/components/graphql/update_organization.mutation.graphql';
describe('Customer relations organization form wrapper', () => {
diff --git a/spec/frontend/design_management/components/design_description/description_form_spec.js b/spec/frontend/design_management/components/design_description/description_form_spec.js
index f7feff98da3..7d68a3b80d5 100644
--- a/spec/frontend/design_management/components/design_description/description_form_spec.js
+++ b/spec/frontend/design_management/components/design_description/description_form_spec.js
@@ -42,7 +42,6 @@ describe('Design description form', () => {
showEditor = false,
isSubmitting = false,
designVariables = mockDesignVariables,
- contentEditorOnIssues = false,
designUpdateMutationHandler = mockDesignUpdateMutationHandler,
} = {}) => {
mockApollo = createMockApollo([[updateDesignDescriptionMutation, designUpdateMutationHandler]]);
@@ -52,11 +51,6 @@ describe('Design description form', () => {
markdownPreviewPath: '/gitlab-org/gitlab-test/preview_markdown?target_type=Issue',
designVariables,
},
- provide: {
- glFeatures: {
- contentEditorOnIssues,
- },
- },
apolloProvider: mockApollo,
data() {
return {
@@ -131,7 +125,7 @@ describe('Design description form', () => {
expect(findMarkdownEditor().props()).toMatchObject({
value: 'Test description',
renderMarkdownPath: '/gitlab-org/gitlab-test/preview_markdown?target_type=Issue',
- enableContentEditor: false,
+ enableContentEditor: true,
formFieldProps,
autofocus: true,
enableAutocomplete: true,
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 961ea27f0f4..9b5e812c021 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -191,7 +191,7 @@ describe('Design management index page', () => {
[moveDesignMutation, moveDesignHandler],
];
- fakeApollo = createMockApollo(requestHandlers, {}, { addTypename: true });
+ fakeApollo = createMockApollo(requestHandlers, {});
wrapper = shallowMountExtended(Index, {
apolloProvider: fakeApollo,
router,
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index e10aad6214c..212def72b90 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -6,6 +6,7 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'spec/test_constants';
+
import App from '~/diffs/components/app.vue';
import CommitWidget from '~/diffs/components/commit_widget.vue';
import CompareVersions from '~/diffs/components/compare_versions.vue';
@@ -17,6 +18,8 @@ import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
+import eventHub from '~/diffs/event_hub';
+
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { Mousetrap } from '~/lib/mousetrap';
@@ -760,4 +763,29 @@ describe('diffs/components/app', () => {
);
});
});
+
+ describe('autoscroll', () => {
+ let loadSpy;
+
+ beforeEach(() => {
+ createComponent();
+ loadSpy = jest.spyOn(wrapper.vm, 'loadCollapsedDiff').mockResolvedValue('resolved');
+ });
+
+ it('does nothing if the location hash does not include a file hash', () => {
+ window.location.hash = 'not_a_file_hash';
+
+ eventHub.$emit('doneLoadingBatches');
+
+ expect(loadSpy).not.toHaveBeenCalled();
+ });
+
+ it('requests that the correct file be loaded', () => {
+ window.location.hash = '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_0_1';
+
+ eventHub.$emit('doneLoadingBatches');
+
+ expect(loadSpy).toHaveBeenCalledWith({ file: store.state.diffs.diffFiles[0] });
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index b089825090b..b0d98e0e4a6 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -8,8 +8,12 @@ import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import DiffFileHeader from '~/diffs/components/diff_file_header.vue';
import { DIFF_FILE_AUTOMATIC_COLLAPSE, DIFF_FILE_MANUAL_COLLAPSE } from '~/diffs/constants';
-import { reviewFile } from '~/diffs/store/actions';
-import { SET_DIFF_FILE_VIEWED, SET_MR_FILE_REVIEWS } from '~/diffs/store/mutation_types';
+import { reviewFile, setFileForcedOpen } from '~/diffs/store/actions';
+import {
+ SET_DIFF_FILE_VIEWED,
+ SET_MR_FILE_REVIEWS,
+ SET_FILE_FORCED_OPEN,
+} from '~/diffs/store/mutation_types';
import { diffViewerModes } from '~/ide/constants';
import { scrollToElement } from '~/lib/utils/common_utils';
import { truncateSha } from '~/lib/utils/text_utility';
@@ -67,6 +71,7 @@ describe('DiffFileHeader component', () => {
toggleFullDiff: jest.fn(),
setCurrentFileHash: jest.fn(),
setFileCollapsedByUser: jest.fn(),
+ setFileForcedOpen: jest.fn(),
reviewFile: jest.fn(),
},
},
@@ -138,6 +143,19 @@ describe('DiffFileHeader component', () => {
expect(wrapper.emitted().toggleFile).toBeDefined();
});
+ it('when header is clicked it triggers the action that removes the value that forces a file to be uncollapsed', () => {
+ createComponent();
+ findHeader().trigger('click');
+
+ return testAction(
+ setFileForcedOpen,
+ { filePath: diffFile.file_path, forced: false },
+ {},
+ [{ type: SET_FILE_FORCED_OPEN, payload: { filePath: diffFile.file_path, forced: false } }],
+ [],
+ );
+ });
+
it('when collapseIcon is clicked emits toggleFile', async () => {
createComponent({ props: { collapsible: true } });
findCollapseButton().vm.$emit('click', new Event('click'));
@@ -643,6 +661,44 @@ describe('DiffFileHeader component', () => {
expect(Boolean(wrapper.emitted().toggleFile)).toBe(fires);
},
);
+
+ it('removes the property that forces a file to be shown when the file review is toggled', () => {
+ createComponent({
+ props: {
+ diffFile: {
+ ...diffFile,
+ viewer: {
+ ...diffFile.viewer,
+ automaticallyCollapsed: false,
+ manuallyCollapsed: null,
+ },
+ },
+ showLocalFileReviews: true,
+ addMergeRequestButtons: true,
+ expanded: false,
+ },
+ });
+
+ findReviewFileCheckbox().vm.$emit('change', true);
+
+ testAction(
+ setFileForcedOpen,
+ { filePath: diffFile.file_path, forced: false },
+ {},
+ [{ type: SET_FILE_FORCED_OPEN, payload: { filePath: diffFile.file_path, forced: false } }],
+ [],
+ );
+
+ findReviewFileCheckbox().vm.$emit('change', false);
+
+ testAction(
+ setFileForcedOpen,
+ { filePath: diffFile.file_path, forced: false },
+ {},
+ [{ type: SET_FILE_FORCED_OPEN, payload: { filePath: diffFile.file_path, forced: false } }],
+ [],
+ );
+ });
});
it('should render the comment on files button', () => {
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 53f135471b7..13efd3584b4 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -324,6 +324,22 @@ describe('DiffFile', () => {
});
describe('collapsing', () => {
+ describe('forced open', () => {
+ it('should have content even when it is automatically collapsed', () => {
+ makeFileAutomaticallyCollapsed(store);
+
+ expect(findDiffContentArea(wrapper).element.children.length).toBe(1);
+ expect(wrapper.classes('has-body')).toBe(true);
+ });
+
+ it('should have content even when it is manually collapsed', () => {
+ makeFileManuallyCollapsed(store);
+
+ expect(findDiffContentArea(wrapper).element.children.length).toBe(1);
+ expect(wrapper.classes('has-body')).toBe(true);
+ });
+ });
+
describe(`\`${EVT_EXPAND_ALL_FILES}\` event`, () => {
beforeEach(() => {
jest.spyOn(wrapper.vm, 'handleToggle').mockImplementation(() => {});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 387407a7e4d..18e81232b5c 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -1627,6 +1627,7 @@ describe('DiffsStoreActions', () => {
name: updatedViewerName,
automaticallyCollapsed: false,
manuallyCollapsed: false,
+ forceOpen: false,
};
const testData = [{ rich_text: 'test' }, { rich_text: 'file2' }];
let renamedFile;
@@ -1673,7 +1674,7 @@ describe('DiffsStoreActions', () => {
});
});
- describe('setFileUserCollapsed', () => {
+ describe('setFileCollapsedByUser', () => {
it('commits SET_FILE_COLLAPSED', () => {
return testAction(
diffActions.setFileCollapsedByUser,
@@ -1690,6 +1691,17 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('setFileForcedOpen', () => {
+ it('commits SET_FILE_FORCED_OPEN', () => {
+ return testAction(diffActions.setFileForcedOpen, { filePath: 'test', forced: true }, null, [
+ {
+ type: types.SET_FILE_FORCED_OPEN,
+ payload: { filePath: 'test', forced: true },
+ },
+ ]);
+ });
+ });
+
describe('setExpandedDiffLines', () => {
beforeEach(() => {
utils.idleCallback.mockImplementation((cb) => {
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index e87c5d0a9b1..fdcf7c3eeab 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -1055,4 +1055,14 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].drafts[0]).toEqual('test');
});
});
+
+ describe('SET_FILE_FORCED_OPEN', () => {
+ it('sets the forceOpen property of a diff file viewer correctly', () => {
+ const state = { diffFiles: [{ file_path: 'abc', viewer: { forceOpen: 'not-a-boolean' } }] };
+
+ mutations[types.SET_FILE_FORCED_OPEN](state, { filePath: 'abc', force: true });
+
+ expect(state.diffFiles[0].viewer.forceOpen).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 24cb8158739..720b72f4965 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -927,19 +927,21 @@ describe('DiffsStoreUtils', () => {
describe('parseUrlHashAsFileHash', () => {
it.each`
- input | currentDiffId | resultId
- ${'#note_12345'} | ${'1A2B3C'} | ${'1A2B3C'}
- ${'note_12345'} | ${'1A2B3C'} | ${'1A2B3C'}
- ${'#note_12345'} | ${undefined} | ${null}
- ${'note_12345'} | ${undefined} | ${null}
- ${'#diff-content-12345'} | ${undefined} | ${'12345'}
- ${'diff-content-12345'} | ${undefined} | ${'12345'}
- ${'#diff-content-12345'} | ${'98765'} | ${'12345'}
- ${'diff-content-12345'} | ${'98765'} | ${'12345'}
- ${'#e334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
- ${'e334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
- ${'#Z334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${null}
- ${'Z334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${null}
+ input | currentDiffId | resultId
+ ${'#note_12345'} | ${'1A2B3C'} | ${'1A2B3C'}
+ ${'note_12345'} | ${'1A2B3C'} | ${'1A2B3C'}
+ ${'#note_12345'} | ${undefined} | ${null}
+ ${'note_12345'} | ${undefined} | ${null}
+ ${'#diff-content-12345'} | ${undefined} | ${'12345'}
+ ${'diff-content-12345'} | ${undefined} | ${'12345'}
+ ${'#diff-content-12345'} | ${'98765'} | ${'12345'}
+ ${'diff-content-12345'} | ${'98765'} | ${'12345'}
+ ${'#e334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
+ ${'e334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
+ ${'#Z334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${null}
+ ${'Z334a2a10f036c00151a04cea7938a5d4213a818'} | ${undefined} | ${null}
+ ${'#e334a2a10f036c00151a04cea7938a5d4213a818_0_42'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
+ ${'e334a2a10f036c00151a04cea7938a5d4213a818_0_42'} | ${undefined} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'}
`('returns $resultId for $input and $currentDiffId', ({ input, currentDiffId, resultId }) => {
expect(utils.parseUrlHashAsFileHash(input, currentDiffId)).toBe(resultId);
});
diff --git a/spec/frontend/diffs/utils/merge_request_spec.js b/spec/frontend/diffs/utils/merge_request_spec.js
index 11c0efb9a9c..f5145b3c4c7 100644
--- a/spec/frontend/diffs/utils/merge_request_spec.js
+++ b/spec/frontend/diffs/utils/merge_request_spec.js
@@ -1,6 +1,7 @@
import {
updateChangesTabCount,
getDerivedMergeRequestInformation,
+ extractFileHash,
} from '~/diffs/utils/merge_request';
import { ZERO_CHANGES_ALT_DISPLAY } from '~/diffs/constants';
import { diffMetadata } from '../mock_data/diff_metadata';
@@ -128,4 +129,19 @@ describe('Merge Request utilities', () => {
});
});
});
+
+ describe('extractFileHash', () => {
+ const sha1Like = 'abcdef1234567890abcdef1234567890abcdef12';
+ const sha1LikeToo = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
+
+ it('returns undefined when a SHA1-like string cannot be found in the input', () => {
+ expect(extractFileHash({ input: 'something' })).toBe(undefined);
+ });
+
+ it('returns the first matching string of SHA1-like characters in the input', () => {
+ const fullString = `#${sha1Like}_34_42--${sha1LikeToo}`;
+
+ expect(extractFileHash({ input: fullString })).toBe(sha1Like);
+ });
+ });
});
diff --git a/spec/frontend/diffs/utils/sort_errors_by_file_spec.js b/spec/frontend/diffs/utils/sort_errors_by_file_spec.js
new file mode 100644
index 00000000000..ca8a8ec3516
--- /dev/null
+++ b/spec/frontend/diffs/utils/sort_errors_by_file_spec.js
@@ -0,0 +1,52 @@
+import { sortFindingsByFile } from '~/diffs/utils/sort_findings_by_file';
+
+describe('sort_findings_by_file utilities', () => {
+ const mockDescription = 'mockDescription';
+ const mockSeverity = 'mockseverity';
+ const mockLine = '00';
+ const mockFile1 = 'file1.js';
+ const mockFile2 = 'file2.rb';
+ const emptyResponse = {
+ files: {},
+ };
+
+ const unsortedFindings = [
+ {
+ severity: mockSeverity,
+ filePath: mockFile1,
+ line: mockLine,
+ description: mockDescription,
+ },
+ {
+ severity: mockSeverity,
+ filePath: mockFile2,
+ line: mockLine,
+ description: mockDescription,
+ },
+ ];
+ const sortedFindings = {
+ files: {
+ [mockFile1]: [
+ {
+ line: mockLine,
+ description: mockDescription,
+ severity: mockSeverity,
+ },
+ ],
+ [mockFile2]: [
+ {
+ line: mockLine,
+ description: mockDescription,
+ severity: mockSeverity,
+ },
+ ],
+ },
+ };
+
+ it('sorts Findings correctly', () => {
+ expect(sortFindingsByFile(unsortedFindings)).toEqual(sortedFindings);
+ });
+ it('does not throw error when given no input', () => {
+ expect(sortFindingsByFile()).toEqual(emptyResponse);
+ });
+});
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 77c7f0d49a8..0f380f13679 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -36,6 +36,7 @@ import HooksYaml from './yaml_tests/positive_tests/hooks.yml';
import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
+import ScriptYaml from './yaml_tests/positive_tests/script.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
@@ -60,6 +61,7 @@ import ServicesNegativeYaml from './yaml_tests/negative_tests/services.yml';
import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/parallel_matrix/numeric.yml';
import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
+import ScriptNegativeYaml from './yaml_tests/negative_tests/script.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -101,6 +103,7 @@ describe('positive tests', () => {
ServicesYaml,
SecretsYaml,
NeedsParallelMatrixYaml,
+ ScriptYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@@ -144,6 +147,7 @@ describe('negative tests', () => {
NeedsParallelMatrixNumericYaml,
NeedsParallelMatrixWrongParallelValueYaml,
NeedsParallelMatrixWrongMatrixValueYaml,
+ ScriptNegativeYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/script.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/script.yml
new file mode 100644
index 00000000000..f5bf3f54f6f
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/script.yml
@@ -0,0 +1,14 @@
+script: echo "invalid global script"
+
+default:
+ before_script: 0.1
+ after_script: 1
+
+invalid_script_type:
+ script: true
+
+empty_array_script:
+ script: []
+
+empty_string_script:
+ script: ""
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/script.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/script.yml
new file mode 100644
index 00000000000..0ffb1f3e89e
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/script.yml
@@ -0,0 +1,52 @@
+default:
+ before_script:
+ - echo "default before_script"
+ after_script: |
+ echo "default after_script"
+
+valid_job_with_empty_string_script:
+ before_script: ""
+ after_script: ""
+ script:
+ - echo "overwrite default before_script and after_script"
+
+valid_job_with_empty_array_script:
+ before_script: []
+ after_script: []
+ script:
+ - echo "overwrite default before_script and after_script"
+
+valid_job_with_string_scripts:
+ before_script: echo before_script
+ script: echo script
+ after_script: echo after_script
+
+valid_job_with_multi_line_scripts:
+ before_script: |
+ echo multiline
+ echo before_script
+ script: |
+ echo multiline
+ echo script
+ after_script: |
+ echo multiline
+ echo after_script
+
+valid_job_with_array_scripts:
+ before_script:
+ - echo array
+ - echo before_script
+ script:
+ - echo array
+ - echo script
+ after_script:
+ - echo array
+ - echo after_script
+
+valid_job_with_nested_array_scripts:
+ before_script:
+ - [echo nested_array, echo before_script]
+ script:
+ - [echo nested_array, echo script]
+ after_script:
+ - [echo nested_array, echo after_script]
diff --git a/spec/frontend/editor/source_editor_spec.js b/spec/frontend/editor/source_editor_spec.js
index 6a8e7b296aa..f66de61da1e 100644
--- a/spec/frontend/editor/source_editor_spec.js
+++ b/spec/frontend/editor/source_editor_spec.js
@@ -9,21 +9,6 @@ import SourceEditor from '~/editor/source_editor';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
import { joinPaths } from '~/lib/utils/url_utility';
-jest.mock('~/helpers/startup_css_helper', () => {
- return {
- waitForCSSLoaded: jest.fn().mockImplementation((cb) => {
- // We have to artificially put the callback's execution
- // to the end of the current call stack to be able to
- // test that the callback is called after waitForCSSLoaded.
- // setTimeout with 0 delay does exactly that.
- // Otherwise we might end up with false positive results
- setTimeout(() => {
- cb.apply();
- }, 0);
- }),
- };
-});
-
describe('Base editor', () => {
let editorEl;
let editor;
@@ -161,7 +146,7 @@ describe('Base editor', () => {
expect(instance.getModel()).toBeNull();
});
- it('resets the layout in waitForCSSLoaded callback', async () => {
+ it('resets the layout in createInstance', () => {
const layoutSpy = jest.fn();
jest.spyOn(monacoEditor, 'create').mockReturnValue({
layout: layoutSpy,
@@ -170,10 +155,6 @@ describe('Base editor', () => {
dispose: jest.fn(),
});
editor.createInstance(defaultArguments);
- expect(layoutSpy).not.toHaveBeenCalled();
-
- // We're waiting for the waitForCSSLoaded mock to kick in
- await jest.runOnlyPendingTimers();
expect(layoutSpy).toHaveBeenCalled();
});
diff --git a/spec/frontend/environments/canary_ingress_spec.js b/spec/frontend/environments/canary_ingress_spec.js
index e0247731b63..1d0d9385bfe 100644
--- a/spec/frontend/environments/canary_ingress_spec.js
+++ b/spec/frontend/environments/canary_ingress_spec.js
@@ -1,21 +1,21 @@
-import { GlDropdownItem } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import CanaryIngress from '~/environments/components/canary_ingress.vue';
-import { CANARY_UPDATE_MODAL } from '~/environments/constants';
import { rolloutStatus } from './graphql/mock_data';
+jest.mock('lodash/uniqueId', () => {
+ return jest.fn((input) => input);
+});
+
describe('/environments/components/canary_ingress.vue', () => {
let wrapper;
- const setWeightTo = (weightWrapper, x) =>
- weightWrapper
- .findAllComponents(GlDropdownItem)
- .at(x / 5)
- .vm.$emit('click');
+ const setWeightTo = (weightWrapper, x) => {
+ weightWrapper.vm.$emit('select', x);
+ };
const createComponent = (props = {}, options = {}) => {
- wrapper = mount(CanaryIngress, {
+ wrapper = mountExtended(CanaryIngress, {
propsData: {
canaryIngress: {
canary_weight: 60,
@@ -37,11 +37,11 @@ describe('/environments/components/canary_ingress.vue', () => {
let stableWeightDropdown;
beforeEach(() => {
- stableWeightDropdown = wrapper.find('[data-testid="stable-weight"]');
+ stableWeightDropdown = extendedWrapper(wrapper.find('#stable-weight-'));
});
it('displays the current stable weight', () => {
- expect(stableWeightDropdown.props('text')).toBe('40');
+ expect(stableWeightDropdown.props('selected')).toBe(40);
});
it('emits a change with the new canary weight', () => {
@@ -51,17 +51,9 @@ describe('/environments/components/canary_ingress.vue', () => {
});
it('lists options from 0 to 100 in increments of 5', () => {
- const options = stableWeightDropdown.findAllComponents(GlDropdownItem);
+ const options = stableWeightDropdown.props('items');
expect(options).toHaveLength(21);
- options.wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
- });
-
- it('is set to open the change modal', () => {
- stableWeightDropdown
- .findAllComponents(GlDropdownItem)
- .wrappers.forEach((w) =>
- expect(getBinding(w.element, 'gl-modal')).toMatchObject({ value: CANARY_UPDATE_MODAL }),
- );
+ options.forEach((option, i) => expect(option.text).toBe((i * 5).toString()));
});
});
@@ -69,11 +61,11 @@ describe('/environments/components/canary_ingress.vue', () => {
let canaryWeightDropdown;
beforeEach(() => {
- canaryWeightDropdown = wrapper.find('[data-testid="canary-weight"]');
+ canaryWeightDropdown = wrapper.find('#canary-weight-');
});
it('displays the current canary weight', () => {
- expect(canaryWeightDropdown.props('text')).toBe('60');
+ expect(canaryWeightDropdown.props('selected')).toBe(60);
});
it('emits a change with the new canary weight', () => {
@@ -83,17 +75,9 @@ describe('/environments/components/canary_ingress.vue', () => {
});
it('lists options from 0 to 100 in increments of 5', () => {
- canaryWeightDropdown
- .findAllComponents(GlDropdownItem)
- .wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
- });
-
- it('is set to open the change modal', () => {
- canaryWeightDropdown
- .findAllComponents(GlDropdownItem)
- .wrappers.forEach((w) =>
- expect(getBinding(w.element, 'gl-modal')).toMatchObject({ value: CANARY_UPDATE_MODAL }),
- );
+ const options = canaryWeightDropdown.props('items');
+ expect(options).toHaveLength(21);
+ options.forEach((option, i) => expect(option.text).toBe((i * 5).toString()));
});
});
@@ -106,8 +90,8 @@ describe('/environments/components/canary_ingress.vue', () => {
});
it('shows the correct weight', () => {
- const canaryWeightDropdown = wrapper.find('[data-testid="canary-weight"]');
- expect(canaryWeightDropdown.props('text')).toBe('50');
+ const canaryWeightDropdown = wrapper.find('#canary-weight-');
+ expect(canaryWeightDropdown.props('selected')).toBe(50);
});
});
});
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
index 22dd7437d82..5888b22aece 100644
--- a/spec/frontend/environments/environment_form_spec.js
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -28,12 +28,11 @@ const userAccessAuthorizedAgents = [
const configuration = {
basePath: mockKasTunnelUrl.replace(/\/$/, ''),
- baseOptions: {
- headers: {
- 'GitLab-Agent-Id': 2,
- },
- withCredentials: true,
+ headers: {
+ 'GitLab-Agent-Id': 2,
+ 'Content-Type': 'application/json',
},
+ credentials: 'include',
};
describe('~/environments/components/form.vue', () => {
diff --git a/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
index 1d41fb11b14..ed15c66f4c6 100644
--- a/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
+++ b/spec/frontend/environments/graphql/resolvers/kubernetes_spec.js
@@ -29,9 +29,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('k8sPods', () => {
const mockPodsListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
- data: {
- items: k8sPodsMock,
- },
+ items: k8sPodsMock,
});
});
@@ -50,7 +48,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
it('should request namespaced pods from the cluster_client library if namespace is specified', async () => {
const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace });
- expect(mockNamespacedPodsListFn).toHaveBeenCalledWith(namespace);
+ expect(mockNamespacedPodsListFn).toHaveBeenCalledWith({ namespace });
expect(mockAllPodsListFn).not.toHaveBeenCalled();
expect(pods).toEqual(k8sPodsMock);
@@ -76,22 +74,42 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('k8sServices', () => {
const mockServicesListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
- data: {
- items: k8sServicesMock,
- },
+ items: k8sServicesMock,
});
});
+ const mockNamespacedServicesListFn = jest.fn().mockImplementation(mockServicesListFn);
+ const mockAllServicesListFn = jest.fn().mockImplementation(mockServicesListFn);
+
beforeEach(() => {
jest
.spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
.mockImplementation(mockServicesListFn);
+
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedService')
+ .mockImplementation(mockNamespacedServicesListFn);
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockImplementation(mockAllServicesListFn);
});
- it('should request services from the cluster_client library', async () => {
- const services = await mockResolvers.Query.k8sServices(null, { configuration });
+ it('should request namespaced services from the cluster_client library if namespace is specified', async () => {
+ const services = await mockResolvers.Query.k8sServices(null, { configuration, namespace });
+
+ expect(mockNamespacedServicesListFn).toHaveBeenCalledWith({ namespace });
+ expect(mockAllServicesListFn).not.toHaveBeenCalled();
+
+ expect(services).toEqual(k8sServicesMock);
+ });
+ it('should request all services from the cluster_client library if namespace is not specified', async () => {
+ const services = await mockResolvers.Query.k8sServices(null, {
+ configuration,
+ namespace: '',
+ });
expect(mockServicesListFn).toHaveBeenCalled();
+ expect(mockNamespacedServicesListFn).not.toHaveBeenCalled();
expect(services).toEqual(k8sServicesMock);
});
@@ -159,7 +177,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace });
namespacedMocks.forEach((workloadMock) => {
- expect(workloadMock.spy).toHaveBeenCalledWith(namespace);
+ expect(workloadMock.spy).toHaveBeenCalledWith({ namespace });
});
});
@@ -194,9 +212,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('k8sNamespaces', () => {
const mockNamespacesListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
- data: {
- items: k8sNamespacesMock,
- },
+ items: k8sNamespacesMock,
});
});
@@ -221,13 +237,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
])(
'should throw an error if the API call fails with the reason "%s"',
async (reason, message) => {
- jest.spyOn(CoreV1Api.prototype, 'listCoreV1Namespace').mockRejectedValue({
- response: {
- data: {
- reason,
- },
- },
- });
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1Namespace').mockRejectedValue({ reason });
await expect(mockResolvers.Query.k8sNamespaces(null, { configuration })).rejects.toThrow(
message,
diff --git a/spec/frontend/environments/kubernetes_overview_spec.js b/spec/frontend/environments/kubernetes_overview_spec.js
index aa7e2e9a3b7..2b810aac653 100644
--- a/spec/frontend/environments/kubernetes_overview_spec.js
+++ b/spec/frontend/environments/kubernetes_overview_spec.js
@@ -27,10 +27,11 @@ const provide = {
const configuration = {
basePath: provide.kasTunnelUrl.replace(/\/$/, ''),
- baseOptions: {
- headers: { 'GitLab-Agent-Id': '1' },
- withCredentials: true,
+ headers: {
+ 'GitLab-Agent-Id': '1',
+ 'Content-Type': 'application/json',
},
+ credentials: 'include',
};
describe('~/environments/components/kubernetes_overview.vue', () => {
diff --git a/spec/frontend/environments/kubernetes_pods_spec.js b/spec/frontend/environments/kubernetes_pods_spec.js
index 0420d8df1a9..a51c85468b4 100644
--- a/spec/frontend/environments/kubernetes_pods_spec.js
+++ b/spec/frontend/environments/kubernetes_pods_spec.js
@@ -123,7 +123,7 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
});
it('emits an error message', () => {
- expect(wrapper.emitted('cluster-error')).toMatchObject([[error]]);
+ expect(wrapper.emitted('cluster-error')).toMatchObject([[error.message]]);
});
});
});
diff --git a/spec/frontend/environments/kubernetes_summary_spec.js b/spec/frontend/environments/kubernetes_summary_spec.js
index 22c81f29f64..fdcf32e7d01 100644
--- a/spec/frontend/environments/kubernetes_summary_spec.js
+++ b/spec/frontend/environments/kubernetes_summary_spec.js
@@ -16,9 +16,7 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
const namespace = 'my-kubernetes-namespace';
const configuration = {
basePath: mockKasTunnelUrl,
- baseOptions: {
- headers: { 'GitLab-Agent-Id': '1' },
- },
+ headers: { 'GitLab-Agent-Id': '1', 'Content-Type': 'application/json' },
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
@@ -121,7 +119,7 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
createWrapper(createErroredApolloProvider());
await waitForPromises();
- expect(wrapper.emitted('cluster-error')).toEqual([[error]]);
+ expect(wrapper.emitted('cluster-error')).toEqual([[error.message]]);
});
});
});
diff --git a/spec/frontend/environments/kubernetes_tabs_spec.js b/spec/frontend/environments/kubernetes_tabs_spec.js
index 81b0bb86e0e..fecd6d2a8ee 100644
--- a/spec/frontend/environments/kubernetes_tabs_spec.js
+++ b/spec/frontend/environments/kubernetes_tabs_spec.js
@@ -162,7 +162,7 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
createWrapper(createErroredApolloProvider());
await waitForPromises();
- expect(wrapper.emitted('cluster-error')).toEqual([[error]]);
+ expect(wrapper.emitted('cluster-error')).toEqual([[error.message]]);
});
});
diff --git a/spec/frontend/fixtures/autocomplete.rb b/spec/frontend/fixtures/autocomplete.rb
index 6215fa44e27..0ceacc41cdb 100644
--- a/spec/frontend/fixtures/autocomplete.rb
+++ b/spec/frontend/fixtures/autocomplete.rb
@@ -22,15 +22,17 @@ RSpec.describe ::AutocompleteController, '(JavaScript fixtures)', type: :control
project.add_developer(user)
end
- get :users,
- format: :json,
- params: {
- project_id: project.id,
- active: true,
- current_user: true,
- author: merge_request.author.id,
- merge_request_iid: merge_request.iid
- }
+ get(
+ :users,
+ format: :json,
+ params: {
+ project_id: project.id,
+ active: true,
+ current_user: true,
+ author: merge_request.author.id,
+ merge_request_iid: merge_request.iid
+ }
+ )
expect(response).to be_successful
end
diff --git a/spec/frontend/fixtures/autocomplete_sources.rb b/spec/frontend/fixtures/autocomplete_sources.rb
index 74bf58cc106..2c28440ab0c 100644
--- a/spec/frontend/fixtures/autocomplete_sources.rb
+++ b/spec/frontend/fixtures/autocomplete_sources.rb
@@ -26,14 +26,16 @@ RSpec.describe Projects::AutocompleteSourcesController, '(JavaScript fixtures)',
create(:label, project: project, title: 'P3')
create(:label, project: project, title: 'P4')
- get :labels,
- format: :json,
- params: {
- namespace_id: group.path,
- project_id: project.path,
- type: issue.class.name,
- type_id: issue.id
- }
+ get(
+ :labels,
+ format: :json,
+ params: {
+ namespace_id: group.path,
+ project_id: project.path,
+ type: issue.class.name,
+ type_id: issue.id
+ }
+ )
expect(response).to be_successful
end
diff --git a/spec/frontend/fixtures/environments.rb b/spec/frontend/fixtures/environments.rb
index 81f1eb11e3e..8cf0977c5ed 100644
--- a/spec/frontend/fixtures/environments.rb
+++ b/spec/frontend/fixtures/environments.rb
@@ -27,13 +27,16 @@ RSpec.describe 'Environments (JavaScript fixtures)', feature_category: :environm
query = get_graphql_query_as_string(environment_details_query_path)
puts project.full_path
puts environment.name
- post_graphql(query, current_user: admin,
- variables:
- {
- projectFullPath: project.full_path,
- environmentName: environment.name,
- pageSize: 10
- })
+ post_graphql(
+ query,
+ current_user: admin,
+ variables:
+ {
+ projectFullPath: project.full_path,
+ environmentName: environment.name,
+ pageSize: 10
+ }
+ )
expect_graphql_errors_to_be_empty
end
end
@@ -58,13 +61,16 @@ RSpec.describe 'Environments (JavaScript fixtures)', feature_category: :environm
it "graphql/#{environment_details_query_path}.json" do
query = get_graphql_query_as_string(environment_details_query_path)
- post_graphql(query, current_user: admin,
- variables:
- {
- projectFullPath: project.full_path,
- environmentName: environment.name,
- pageSize: 10
- })
+ post_graphql(
+ query,
+ current_user: admin,
+ variables:
+ {
+ projectFullPath: project.full_path,
+ environmentName: environment.name,
+ pageSize: 10
+ }
+ )
expect_graphql_errors_to_be_empty
end
end
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index 9e6fcea2d17..90aa0544526 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -70,25 +70,29 @@ RSpec.describe API::Issues, '(JavaScript fixtures)', type: :request do
issue_title = 'foo'
issue_description = 'closed'
milestone = create(:milestone, title: '1.0.0', project: project)
- issue = create :issue,
- author: user,
- assignees: [user],
- project: project,
- milestone: milestone,
- created_at: generate(:past_time),
- updated_at: 1.hour.ago,
- title: issue_title,
- description: issue_description
+ issue = create(
+ :issue,
+ author: user,
+ assignees: [user],
+ project: project,
+ milestone: milestone,
+ created_at: generate(:past_time),
+ updated_at: 1.hour.ago,
+ title: issue_title,
+ description: issue_description
+ )
project.add_reporter(user)
create_referencing_mr(user, project, issue)
- create(:merge_request,
- :simple,
- author: user,
- source_project: project,
- target_project: project,
- description: "Some description")
+ create(
+ :merge_request,
+ :simple,
+ author: user,
+ source_project: project,
+ target_project: project,
+ description: "Some description"
+ )
project2 = create(:project, :public, creator_id: user.id, namespace: user.namespace)
create_referencing_mr(user, project2, issue).update!(head_pipeline: create(:ci_pipeline))
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
index c7e3d8fe804..32ebe174800 100644
--- a/spec/frontend/fixtures/releases.rb
+++ b/spec/frontend/fixtures/releases.rb
@@ -11,23 +11,27 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
let_it_be(:user) { create(:user, email: 'user@example.gitlab.com', username: 'user1') }
let_it_be(:milestone_12_3) do
- create(:milestone,
- id: 123,
- project: project,
- title: '12.3',
- description: 'The 12.3 milestone',
- start_date: Time.zone.parse('2018-12-10'),
- due_date: Time.zone.parse('2019-01-10'))
+ create(
+ :milestone,
+ id: 123,
+ project: project,
+ title: '12.3',
+ description: 'The 12.3 milestone',
+ start_date: Time.zone.parse('2018-12-10'),
+ due_date: Time.zone.parse('2019-01-10')
+ )
end
let_it_be(:milestone_12_4) do
- create(:milestone,
- id: 124,
- project: project,
- title: '12.4',
- description: 'The 12.4 milestone',
- start_date: Time.zone.parse('2019-01-10'),
- due_date: Time.zone.parse('2019-02-10'))
+ create(
+ :milestone,
+ id: 124,
+ project: project,
+ title: '12.4',
+ description: 'The 12.4 milestone',
+ start_date: Time.zone.parse('2019-01-10'),
+ due_date: Time.zone.parse('2019-02-10')
+ )
end
let_it_be(:open_issues_12_3) do
@@ -47,68 +51,78 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
end
let_it_be(:release) do
- create(:release,
- milestones: [milestone_12_3, milestone_12_4],
- project: project,
- tag: 'v1.1',
- name: 'The first release',
- author: user,
- description: 'Best. Release. **Ever.** :rocket:',
- created_at: Time.zone.parse('2018-12-3'),
- released_at: Time.zone.parse('2018-12-10'))
+ create(
+ :release,
+ milestones: [milestone_12_3, milestone_12_4],
+ project: project,
+ tag: 'v1.1',
+ name: 'The first release',
+ author: user,
+ description: 'Best. Release. **Ever.** :rocket:',
+ created_at: Time.zone.parse('2018-12-3'),
+ released_at: Time.zone.parse('2018-12-10')
+ )
end
let_it_be(:evidence) do
- create(:evidence,
- release: release,
- collected_at: Time.zone.parse('2018-12-03'))
+ create(:evidence, release: release, collected_at: Time.zone.parse('2018-12-03'))
end
let_it_be(:other_link) do
- create(:release_link,
- id: 10,
- release: release,
- name: 'linux-amd64 binaries',
- filepath: '/binaries/linux-amd64',
- url: 'https://downloads.example.com/bin/gitlab-linux-amd64')
+ create(
+ :release_link,
+ id: 10,
+ release: release,
+ name: 'linux-amd64 binaries',
+ filepath: '/binaries/linux-amd64',
+ url: 'https://downloads.example.com/bin/gitlab-linux-amd64'
+ )
end
let_it_be(:runbook_link) do
- create(:release_link,
- id: 11,
- release: release,
- name: 'Runbook',
- url: "#{release.project.web_url}/runbook",
- link_type: :runbook)
+ create(
+ :release_link,
+ id: 11,
+ release: release,
+ name: 'Runbook',
+ url: "#{release.project.web_url}/runbook",
+ link_type: :runbook
+ )
end
let_it_be(:package_link) do
- create(:release_link,
- id: 12,
- release: release,
- name: 'Package',
- url: 'https://example.com/package',
- link_type: :package)
+ create(
+ :release_link,
+ id: 12,
+ release: release,
+ name: 'Package',
+ url: 'https://example.com/package',
+ link_type: :package
+ )
end
let_it_be(:image_link) do
- create(:release_link,
- id: 13,
- release: release,
- name: 'Image',
- url: 'https://example.com/image',
- link_type: :image)
+ create(
+ :release_link,
+ id: 13,
+ release: release,
+ name: 'Image',
+ url: 'https://example.com/image',
+ link_type: :image
+ )
end
let_it_be(:another_release) do
- create(:release,
- project: project,
- tag: 'v1.2',
- name: 'The second release',
- author: user,
- description: 'An okay release :shrug:',
- created_at: Time.zone.parse('2019-01-03'),
- released_at: Time.zone.parse('2019-01-10'))
+ create(
+ :release,
+ project: project,
+ tag: 'v1.2',
+ name: 'The second release',
+ author: user,
+ description: 'An okay release :shrug:',
+ created_at: Time.zone.parse('2019-01-03'),
+ released_at: Time.zone.parse('2019-01-10')
+ )
end
before do
diff --git a/spec/frontend/fixtures/search.rb b/spec/frontend/fixtures/search.rb
index b2da383d657..0036fb353a5 100644
--- a/spec/frontend/fixtures/search.rb
+++ b/spec/frontend/fixtures/search.rb
@@ -58,9 +58,10 @@ RSpec.describe SearchController, '(JavaScript fixtures)', type: :controller do
project_id: project.id,
startline: 2)
],
- total_count: 4,
- limit: 4,
- offset: 0)
+ total_count: 4,
+ limit: 4,
+ offset: 0
+ )
end
before do
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 2d19c9871b6..da465552db3 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -55,14 +55,14 @@ describe('GfmAutoComplete', () => {
describe('assets loading', () => {
beforeEach(() => {
- atwhoInstance = { setting: {}, $inputor: 'inputor', at: '[vulnerability:' };
+ atwhoInstance = { setting: {}, $inputor: 'inputor', at: '~' };
items = ['loading'];
filterValue = gfmAutoCompleteCallbacks.filter.call(atwhoInstance, '', items);
});
it('should call the fetchData function without query', () => {
- expect(fetchDataMock.fetchData).toHaveBeenCalledWith('inputor', '[vulnerability:');
+ expect(fetchDataMock.fetchData).toHaveBeenCalledWith('inputor', '~');
});
it('should not call the default atwho filter', () => {
@@ -80,6 +80,29 @@ describe('GfmAutoComplete', () => {
items = [];
});
+ describe('when loading', () => {
+ beforeEach(() => {
+ items = ['loading'];
+ filterValue = gfmAutoCompleteCallbacks.filter.call(atwhoInstance, 'oldquery', items);
+ });
+
+ it('should call the fetchData function with query', () => {
+ expect(fetchDataMock.fetchData).toHaveBeenCalledWith(
+ 'inputor',
+ '[vulnerability:',
+ 'oldquery',
+ );
+ });
+
+ it('should not call the default atwho filter', () => {
+ expect($.fn.atwho.default.callbacks.filter).not.toHaveBeenCalled();
+ });
+
+ it('should return the passed unfiltered items', () => {
+ expect(filterValue).toEqual(items);
+ });
+ });
+
describe('when previous query is different from current one', () => {
beforeEach(() => {
gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
@@ -173,7 +196,7 @@ describe('GfmAutoComplete', () => {
context = {
isLoadingData: { '[vulnerability:': false },
dataSources: { vulnerabilities: 'vulnerabilities_autocomplete_url' },
- cachedData: {},
+ cachedData: { '[vulnerability:': { other_query: [] } },
};
});
@@ -206,15 +229,14 @@ describe('GfmAutoComplete', () => {
const context = {
isLoadingData: { '[vulnerability:': false },
dataSources: { vulnerabilities: 'vulnerabilities_autocomplete_url' },
- cachedData: { '[vulnerability:': [{}] },
+ cachedData: { '[vulnerability:': { query: [] } },
+ loadData: () => {},
};
fetchData.call(context, {}, '[vulnerability:', 'query');
});
- it('should anyway call axios with query ignoring cache', () => {
- expect(axios.get).toHaveBeenCalledWith('vulnerabilities_autocomplete_url', {
- params: { search: 'query' },
- });
+ it('should not call axios', () => {
+ expect(axios.get).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/google_tag_manager/index_spec.js b/spec/frontend/google_tag_manager/index_spec.js
index dd8e886e6bc..c32c86d5f5a 100644
--- a/spec/frontend/google_tag_manager/index_spec.js
+++ b/spec/frontend/google_tag_manager/index_spec.js
@@ -1,537 +1,9 @@
-import { merge } from 'lodash';
-import { v4 as uuidv4 } from 'uuid';
-import {
- trackCombinedGroupProjectForm,
- trackFreeTrialAccountSubmissions,
- trackProjectImport,
- trackNewRegistrations,
- trackSaasTrialSubmit,
- trackSaasTrialGroup,
- trackSaasTrialGetStarted,
- trackTrialAcceptTerms,
- trackCheckout,
- trackTransaction,
- trackAddToCartUsageTab,
- getNamespaceId,
- trackCompanyForm,
-} from '~/google_tag_manager';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { logError } from '~/lib/logger';
-
-jest.mock('~/lib/logger');
-jest.mock('uuid');
+import { trackTrialAcceptTerms } from 'ee_else_ce/google_tag_manager';
describe('~/google_tag_manager/index', () => {
- let spy;
-
- beforeEach(() => {
- spy = jest.fn();
-
- window.dataLayer = {
- push: spy,
- };
- window.gon.features = {
- gitlabGtmDatalayer: true,
- };
- });
-
- const createHTML = ({ links = [], forms = [] } = {}) => {
- // .foo elements are used to test elements which shouldn't do anything
- const allLinks = links.concat({ cls: 'foo' });
- const allForms = forms.concat({ cls: 'foo' });
-
- const el = document.createElement('div');
-
- allLinks.forEach(({ cls = '', id = '', href = '#', text = 'Hello', attributes = {} }) => {
- const a = document.createElement('a');
- a.id = id;
- a.href = href || '#';
- a.className = cls;
- a.textContent = text;
-
- Object.entries(attributes).forEach(([key, value]) => {
- a.setAttribute(key, value);
- });
-
- el.append(a);
- });
-
- allForms.forEach(({ cls = '', id = '' }) => {
- const form = document.createElement('form');
- form.id = id;
- form.className = cls;
-
- el.append(form);
- });
-
- return el.innerHTML;
- };
-
- const triggerEvent = (selector, eventType) => {
- const el = document.querySelector(selector);
-
- el.dispatchEvent(new Event(eventType));
- };
-
- const getSelector = ({ id, cls }) => (id ? `#${id}` : `.${cls}`);
-
- const createTestCase = (subject, { forms = [], links = [] }) => {
- const expectedFormEvents = forms.map(({ expectation, ...form }) => ({
- selector: getSelector(form),
- trigger: 'submit',
- expectation,
- }));
-
- const expectedLinkEvents = links.map(({ expectation, ...link }) => ({
- selector: getSelector(link),
- trigger: 'click',
- expectation,
- }));
-
- return [
- subject,
- {
- forms,
- links,
- expectedEvents: [...expectedFormEvents, ...expectedLinkEvents],
- },
- ];
- };
-
- const createOmniAuthTestCase = (subject, accountType) =>
- createTestCase(subject, {
- forms: [
- {
- id: 'new_new_user',
- expectation: {
- event: 'accountSubmit',
- accountMethod: 'form',
- accountType,
- },
- },
- ],
- links: [
- {
- // id is needed so that the test selects the right element to trigger
- id: 'test-0',
- cls: 'js-oauth-login',
- attributes: {
- 'data-provider': 'myspace',
- },
- expectation: {
- event: 'accountSubmit',
- accountMethod: 'myspace',
- accountType,
- },
- },
- {
- id: 'test-1',
- cls: 'js-oauth-login',
- attributes: {
- 'data-provider': 'gitlab',
- },
- expectation: {
- event: 'accountSubmit',
- accountMethod: 'gitlab',
- accountType,
- },
- },
- ],
- });
-
- describe.each([
- createOmniAuthTestCase(trackFreeTrialAccountSubmissions, 'freeThirtyDayTrial'),
- createOmniAuthTestCase(trackNewRegistrations, 'standardSignUp'),
- createTestCase(trackSaasTrialGroup, {
- forms: [{ cls: 'js-saas-trial-group', expectation: { event: 'saasTrialGroup' } }],
- }),
- createTestCase(trackProjectImport, {
- links: [
- {
- id: 'js-test-btn-0',
- cls: 'js-import-project-btn',
- attributes: { 'data-platform': 'bitbucket' },
- expectation: { event: 'projectImport', platform: 'bitbucket' },
- },
- {
- // id is neeeded so we trigger the right element in the test
- id: 'js-test-btn-1',
- cls: 'js-import-project-btn',
- attributes: { 'data-platform': 'github' },
- expectation: { event: 'projectImport', platform: 'github' },
- },
- ],
- }),
- createTestCase(trackSaasTrialGetStarted, {
- links: [
- {
- cls: 'js-get-started-btn',
- expectation: { event: 'saasTrialGetStarted' },
- },
- ],
- }),
- createTestCase(trackAddToCartUsageTab, {
- links: [
- {
- cls: 'js-buy-additional-minutes',
- expectation: {
- event: 'EECproductAddToCart',
- ecommerce: {
- currencyCode: 'USD',
- add: {
- products: [
- {
- name: 'CI/CD Minutes',
- id: '0003',
- price: '10',
- brand: 'GitLab',
- category: 'DevOps',
- variant: 'add-on',
- quantity: 1,
- },
- ],
- },
- },
- },
- },
- ],
- }),
- createTestCase(trackCombinedGroupProjectForm, {
- forms: [
- {
- cls: 'js-groups-projects-form',
- expectation: { event: 'combinedGroupProjectFormSubmit' },
- },
- ],
- }),
- ])('%p', (subject, { links = [], forms = [], expectedEvents }) => {
- beforeEach(() => {
- setHTMLFixture(createHTML({ links, forms }));
-
- subject();
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it.each(expectedEvents)('when %p', ({ selector, trigger, expectation }) => {
- expect(spy).not.toHaveBeenCalled();
-
- triggerEvent(selector, trigger);
-
- expect(spy).toHaveBeenCalledTimes(1);
- expect(spy).toHaveBeenCalledWith(expectation);
- expect(logError).not.toHaveBeenCalled();
- });
-
- it('when random link is clicked, does nothing', () => {
- triggerEvent('a.foo', 'click');
-
- expect(spy).not.toHaveBeenCalled();
- });
-
- it('when random form is submitted, does nothing', () => {
- triggerEvent('form.foo', 'submit');
-
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
describe('No listener events', () => {
- it('when trackSaasTrialSubmit is invoked', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackSaasTrialSubmit();
-
- expect(spy).toHaveBeenCalledTimes(1);
- expect(spy).toHaveBeenCalledWith({ event: 'saasTrialSubmit' });
- expect(logError).not.toHaveBeenCalled();
- });
-
it('when trackTrialAcceptTerms is invoked', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackTrialAcceptTerms();
-
- expect(spy).toHaveBeenCalledTimes(1);
- expect(spy).toHaveBeenCalledWith({ event: 'saasTrialAcceptTerms' });
- expect(logError).not.toHaveBeenCalled();
- });
-
- describe('when trackCheckout is invoked', () => {
- it('with selectedPlan: 2c92a00d76f0d5060176f2fb0a5029ff', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCheckout('2c92a00d76f0d5060176f2fb0a5029ff', 1);
-
- expect(spy.mock.calls.flatMap((x) => x)).toEqual([
- { ecommerce: null },
- {
- event: 'EECCheckout',
- ecommerce: {
- currencyCode: 'USD',
- checkout: {
- actionField: { step: 1 },
- products: [
- {
- brand: 'GitLab',
- category: 'DevOps',
- id: '0002',
- name: 'Premium',
- price: '228',
- quantity: 1,
- variant: 'SaaS',
- },
- ],
- },
- },
- },
- ]);
- });
-
- it('with selectedPlan: 2c92a0ff76f0d5250176f2f8c86f305a', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCheckout('2c92a0ff76f0d5250176f2f8c86f305a', 1);
-
- expect(spy).toHaveBeenCalledTimes(2);
- expect(spy).toHaveBeenCalledWith({ ecommerce: null });
- expect(spy).toHaveBeenCalledWith({
- event: 'EECCheckout',
- ecommerce: {
- currencyCode: 'USD',
- checkout: {
- actionField: { step: 1 },
- products: [
- {
- brand: 'GitLab',
- category: 'DevOps',
- id: '0001',
- name: 'Ultimate',
- price: '1188',
- quantity: 1,
- variant: 'SaaS',
- },
- ],
- },
- },
- });
- });
-
- it('with selectedPlan: Something else', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCheckout('Something else', 1);
-
- expect(spy).not.toHaveBeenCalled();
- });
-
- it('with a different number of users', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCheckout('2c92a0ff76f0d5250176f2f8c86f305a', 5);
-
- expect(spy).toHaveBeenCalledTimes(2);
- expect(spy).toHaveBeenCalledWith({ ecommerce: null });
- expect(spy).toHaveBeenCalledWith({
- event: 'EECCheckout',
- ecommerce: {
- currencyCode: 'USD',
- checkout: {
- actionField: { step: 1 },
- products: [
- {
- brand: 'GitLab',
- category: 'DevOps',
- id: '0001',
- name: 'Ultimate',
- price: '1188',
- quantity: 5,
- variant: 'SaaS',
- },
- ],
- },
- },
- });
- });
- });
-
- describe('when trackTransactions is invoked', () => {
- describe.each([
- {
- selectedPlan: '2c92a00d76f0d5060176f2fb0a5029ff',
- revenue: 228,
- name: 'Premium',
- id: '0002',
- },
- {
- selectedPlan: '2c92a0ff76f0d5250176f2f8c86f305a',
- revenue: 1188,
- name: 'Ultimate',
- id: '0001',
- },
- ])('with %o', (planObject) => {
- it('invokes pushes a new event that references the selected plan', () => {
- const { selectedPlan, revenue, name, id } = planObject;
-
- expect(spy).not.toHaveBeenCalled();
- uuidv4.mockImplementationOnce(() => '123');
-
- const transactionDetails = {
- paymentOption: 'visa',
- revenue,
- tax: 10,
- selectedPlan,
- quantity: 1,
- };
-
- trackTransaction(transactionDetails);
-
- expect(spy.mock.calls.flatMap((x) => x)).toEqual([
- { ecommerce: null },
- {
- event: 'EECtransactionSuccess',
- ecommerce: {
- currencyCode: 'USD',
- purchase: {
- actionField: {
- id: '123',
- affiliation: 'GitLab',
- option: 'visa',
- revenue: revenue.toString(),
- tax: '10',
- },
- products: [
- {
- brand: 'GitLab',
- category: 'DevOps',
- dimension36: 'not available',
- id,
- name,
- price: revenue.toString(),
- quantity: 1,
- variant: 'SaaS',
- },
- ],
- },
- },
- },
- ]);
- });
- });
- });
-
- describe('when trackTransaction is invoked', () => {
- describe('with an invalid plan object', () => {
- it('does not get called', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackTransaction({ selectedPlan: 'notAplan' });
-
- expect(spy).not.toHaveBeenCalled();
- });
- });
- });
-
- describe('when trackCompanyForm is invoked', () => {
- it('with an ultimate trial', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCompanyForm('ultimate_trial');
-
- expect(spy).toHaveBeenCalledTimes(1);
- expect(spy).toHaveBeenCalledWith({
- event: 'aboutYourCompanyFormSubmit',
- aboutYourCompanyType: 'ultimate_trial',
- });
- expect(logError).not.toHaveBeenCalled();
- });
-
- it('with a free account', () => {
- expect(spy).not.toHaveBeenCalled();
-
- trackCompanyForm('free_account');
-
- expect(spy).toHaveBeenCalledTimes(1);
- expect(spy).toHaveBeenCalledWith({
- event: 'aboutYourCompanyFormSubmit',
- aboutYourCompanyType: 'free_account',
- });
- expect(logError).not.toHaveBeenCalled();
- });
- });
- });
-
- describe.each([
- { dataLayer: null },
- { gon: { features: null } },
- { gon: { features: { gitlabGtmDatalayer: false } } },
- ])('when window %o', (windowAttrs) => {
- beforeEach(() => {
- merge(window, windowAttrs);
- });
-
- it('no ops', () => {
- setHTMLFixture(createHTML({ forms: [{ cls: 'js-saas-trial-group' }] }));
-
- trackSaasTrialGroup();
-
- triggerEvent('.js-saas-trial-group', 'submit');
-
- expect(spy).not.toHaveBeenCalled();
- expect(logError).not.toHaveBeenCalled();
-
- resetHTMLFixture();
- });
- });
-
- describe('when window.dataLayer throws error', () => {
- const pushError = new Error('test');
-
- beforeEach(() => {
- window.dataLayer = {
- push() {
- throw pushError;
- },
- };
- });
-
- it('logs error', () => {
- setHTMLFixture(createHTML({ forms: [{ cls: 'js-saas-trial-group' }] }));
-
- trackSaasTrialGroup();
-
- triggerEvent('.js-saas-trial-group', 'submit');
-
- expect(logError).toHaveBeenCalledWith(
- 'Unexpected error while pushing to dataLayer',
- pushError,
- );
-
- resetHTMLFixture();
- });
- });
-
- describe('when getting the namespace_id from Snowplow standard context', () => {
- describe('when window.gl.snowplowStandardContext.data.namespace_id has a value', () => {
- beforeEach(() => {
- window.gl = { snowplowStandardContext: { data: { namespace_id: '321' } } };
- });
-
- it('returns the value', () => {
- expect(getNamespaceId()).toBe('321');
- });
- });
-
- describe('when window.gl.snowplowStandardContext.data.namespace_id is undefined', () => {
- beforeEach(() => {
- window.gl = {};
- });
-
- it('returns a placeholder value', () => {
- expect(getNamespaceId()).toBe('not available');
- });
+ expect(trackTrialAcceptTerms()).toBeUndefined();
});
});
});
diff --git a/spec/frontend/helpers/startup_css_helper_spec.js b/spec/frontend/helpers/startup_css_helper_spec.js
deleted file mode 100644
index 28c742386cc..00000000000
--- a/spec/frontend/helpers/startup_css_helper_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { waitForCSSLoaded } from '~/helpers/startup_css_helper';
-
-describe('waitForCSSLoaded', () => {
- let mockedCallback;
-
- beforeEach(() => {
- mockedCallback = jest.fn();
- });
-
- describe('Promise-like api', () => {
- it('can be used with a callback', async () => {
- await waitForCSSLoaded(mockedCallback);
- expect(mockedCallback).toHaveBeenCalledTimes(1);
- });
-
- it('can be used as a promise', async () => {
- await waitForCSSLoaded().then(mockedCallback);
- expect(mockedCallback).toHaveBeenCalledTimes(1);
- });
- });
-
- describe('when gon features is not provided', () => {
- beforeEach(() => {
- window.gon = null;
- });
-
- it('should invoke the action right away', async () => {
- const events = waitForCSSLoaded(mockedCallback);
- await events;
-
- expect(mockedCallback).toHaveBeenCalledTimes(1);
- });
- });
-
- describe('with startup css enabled', () => {
- it('should dispatch CSSLoaded when the assets are cached or already loaded', async () => {
- setHTMLFixture(`
- <link href="one.css" data-startupcss="loaded">
- <link href="two.css" data-startupcss="loaded">
- `);
- await waitForCSSLoaded(mockedCallback);
-
- expect(mockedCallback).toHaveBeenCalledTimes(1);
-
- resetHTMLFixture();
- });
-
- it('should wait to call CssLoaded until the assets are loaded', async () => {
- setHTMLFixture(`
- <link href="one.css" data-startupcss="loading">
- <link href="two.css" data-startupcss="loading">
- `);
- const events = waitForCSSLoaded(mockedCallback);
- document.querySelectorAll('[data-startupcss="loading"]').forEach((elem) => {
- // eslint-disable-next-line no-param-reassign
- elem.dataset.startupcss = 'loaded';
- });
- document.dispatchEvent(new CustomEvent('CSSStartupLinkLoaded'));
- await events;
-
- expect(mockedCallback).toHaveBeenCalledTimes(1);
-
- resetHTMLFixture();
- });
- });
-});
diff --git a/spec/frontend/ide/init_gitlab_web_ide_spec.js b/spec/frontend/ide/init_gitlab_web_ide_spec.js
index efbbd6c7514..6a5bedb0bbb 100644
--- a/spec/frontend/ide/init_gitlab_web_ide_spec.js
+++ b/spec/frontend/ide/init_gitlab_web_ide_spec.js
@@ -4,6 +4,7 @@ import { initGitlabWebIDE } from '~/ide/init_gitlab_web_ide';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_action';
import { createAndSubmitForm } from '~/lib/utils/create_and_submit_form';
import { handleTracking } from '~/ide/lib/gitlab_web_ide/handle_tracking_event';
+import Tracking from '~/tracking';
import { TEST_HOST } from 'helpers/test_constants';
import setWindowLocation from 'helpers/set_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -15,6 +16,7 @@ jest.mock('~/lib/utils/csrf', () => ({
token: 'mock-csrf-token',
headerKey: 'mock-csrf-header',
}));
+jest.mock('~/tracking');
const ROOT_ELEMENT_ID = 'ide';
const TEST_NONCE = 'test123nonce';
@@ -34,9 +36,9 @@ const TEST_START_REMOTE_PARAMS = {
remotePath: '/test/projects/f oo',
connectionToken: '123abc',
};
-const TEST_EDITOR_FONT_SRC_URL = 'http://gitlab.test/assets/jetbrains-mono/JetBrainsMono.woff2';
+const TEST_EDITOR_FONT_SRC_URL = 'http://gitlab.test/assets/gitlab-mono/GitLabMono.woff2';
const TEST_EDITOR_FONT_FORMAT = 'woff2';
-const TEST_EDITOR_FONT_FAMILY = 'JebBrains Mono';
+const TEST_EDITOR_FONT_FAMILY = 'GitLab Mono';
describe('ide/init_gitlab_web_ide', () => {
let resolveConfirm;
@@ -54,9 +56,20 @@ describe('ide/init_gitlab_web_ide', () => {
el.dataset.userPreferencesPath = TEST_USER_PREFERENCES_PATH;
el.dataset.mergeRequest = TEST_MR_ID;
el.dataset.filePath = TEST_FILE_PATH;
- el.dataset.editorFontSrcUrl = TEST_EDITOR_FONT_SRC_URL;
- el.dataset.editorFontFormat = TEST_EDITOR_FONT_FORMAT;
- el.dataset.editorFontFamily = TEST_EDITOR_FONT_FAMILY;
+ el.dataset.editorFont = JSON.stringify({
+ fallback_font_family: 'monospace',
+ font_faces: [
+ {
+ family: TEST_EDITOR_FONT_FAMILY,
+ src: [
+ {
+ url: TEST_EDITOR_FONT_SRC_URL,
+ format: TEST_EDITOR_FONT_FORMAT,
+ },
+ ],
+ },
+ ],
+ });
el.dataset.signInPath = TEST_SIGN_IN_PATH;
document.body.append(el);
@@ -88,7 +101,11 @@ describe('ide/init_gitlab_web_ide', () => {
});
describe('default', () => {
+ const telemetryEnabled = true;
+
beforeEach(() => {
+ Tracking.enabled.mockReturnValueOnce(telemetryEnabled);
+
createSubject();
});
@@ -115,12 +132,22 @@ describe('ide/init_gitlab_web_ide', () => {
signIn: TEST_SIGN_IN_PATH,
},
editorFont: {
- srcUrl: TEST_EDITOR_FONT_SRC_URL,
- fontFamily: TEST_EDITOR_FONT_FAMILY,
- format: TEST_EDITOR_FONT_FORMAT,
+ fallbackFontFamily: 'monospace',
+ fontFaces: [
+ {
+ family: TEST_EDITOR_FONT_FAMILY,
+ src: [
+ {
+ url: TEST_EDITOR_FONT_SRC_URL,
+ format: TEST_EDITOR_FONT_FORMAT,
+ },
+ ],
+ },
+ ],
},
handleStartRemote: expect.any(Function),
handleTracking,
+ telemetryEnabled,
});
});
diff --git a/spec/frontend/import/details/mock_data.js b/spec/frontend/import/details/mock_data.js
index 67148173404..b61a7f36f85 100644
--- a/spec/frontend/import/details/mock_data.js
+++ b/spec/frontend/import/details/mock_data.js
@@ -7,7 +7,7 @@ export const mockImportFailures = [
exception_class: 'ActiveRecord::RecordInvalid',
exception_message: 'Record invalid',
source: 'Gitlab::GithubImport::Importer::PullRequestImporter',
- github_identifiers: {
+ external_identifiers: {
iid: 2,
issuable_type: 'MergeRequest',
object_type: 'pull_request',
@@ -22,7 +22,7 @@ export const mockImportFailures = [
exception_class: 'ActiveRecord::RecordInvalid',
exception_message: 'Record invalid',
source: 'Gitlab::GithubImport::Importer::PullRequestImporter',
- github_identifiers: {
+ external_identifiers: {
iid: 3,
issuable_type: 'MergeRequest',
object_type: 'pull_request',
@@ -37,7 +37,7 @@ export const mockImportFailures = [
exception_class: 'NameError',
exception_message: 'some message',
source: 'Gitlab::GithubImport::Importer::LfsObjectImporter',
- github_identifiers: {
+ external_identifiers: {
oid: '3a9257fae9e86faee27d7208cb55e086f18e6f29f48c430bfbc26d42eb',
size: 2473979,
},
diff --git a/spec/frontend/import_entities/components/group_dropdown_spec.js b/spec/frontend/import_entities/components/group_dropdown_spec.js
deleted file mode 100644
index 14f39a35387..00000000000
--- a/spec/frontend/import_entities/components/group_dropdown_spec.js
+++ /dev/null
@@ -1,94 +0,0 @@
-import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import GroupDropdown from '~/import_entities/components/group_dropdown.vue';
-import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
-import searchNamespacesWhereUserCanImportProjectsQuery from '~/import_entities/import_projects/graphql/queries/search_namespaces_where_user_can_import_projects.query.graphql';
-
-Vue.use(VueApollo);
-
-const makeGroupMock = (fullPath) => ({
- id: `gid://gitlab/Group/${fullPath}`,
- fullPath,
- name: fullPath,
- visibility: 'public',
- webUrl: `http://gdk.test:3000/groups/${fullPath}`,
- __typename: 'Group',
-});
-
-const AVAILABLE_NAMESPACES = [
- makeGroupMock('match1'),
- makeGroupMock('unrelated'),
- makeGroupMock('match2'),
-];
-
-const SEARCH_NAMESPACES_MOCK = Promise.resolve({
- data: {
- currentUser: {
- id: 'gid://gitlab/User/1',
- groups: {
- nodes: AVAILABLE_NAMESPACES,
- __typename: 'GroupConnection',
- },
- namespace: {
- id: 'gid://gitlab/Namespaces::UserNamespace/1',
- fullPath: 'root',
- __typename: 'Namespace',
- },
- __typename: 'UserCore',
- },
- },
-});
-
-describe('Import entities group dropdown component', () => {
- let wrapper;
- let namespacesTracker;
-
- const createComponent = (propsData) => {
- const apolloProvider = createMockApollo([
- [searchNamespacesWhereUserCanImportProjectsQuery, () => SEARCH_NAMESPACES_MOCK],
- ]);
-
- namespacesTracker = jest.fn();
-
- wrapper = shallowMount(GroupDropdown, {
- apolloProvider,
- scopedSlots: {
- default: namespacesTracker,
- },
- stubs: { GlDropdown },
- propsData,
- });
- };
-
- it('passes namespaces from graphql query to default slot', async () => {
- createComponent();
- jest.advanceTimersByTime(DEBOUNCE_DELAY);
- await nextTick();
- await waitForPromises();
- await nextTick();
-
- expect(namespacesTracker).toHaveBeenCalledWith({ namespaces: AVAILABLE_NAMESPACES });
- });
-
- it('filters namespaces based on user input', async () => {
- createComponent();
-
- namespacesTracker.mockReset();
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'match');
- jest.advanceTimersByTime(DEBOUNCE_DELAY);
- await nextTick();
- await waitForPromises();
- await nextTick();
-
- expect(namespacesTracker).toHaveBeenCalledWith({
- namespaces: [
- expect.objectContaining({ fullPath: 'match1' }),
- expect.objectContaining({ fullPath: 'match2' }),
- ],
- });
- });
-});
diff --git a/spec/frontend/import_entities/components/import_target_dropdown_spec.js b/spec/frontend/import_entities/components/import_target_dropdown_spec.js
index c12baed2374..ba0bb0b0f74 100644
--- a/spec/frontend/import_entities/components/import_target_dropdown_spec.js
+++ b/spec/frontend/import_entities/components/import_target_dropdown_spec.js
@@ -18,7 +18,6 @@ describe('ImportTargetDropdown', () => {
const defaultProps = {
selected: mockUserNamespace,
- userNamespace: mockUserNamespace,
};
const createComponent = ({ props = {} } = {}) => {
@@ -39,7 +38,7 @@ describe('ImportTargetDropdown', () => {
};
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
- const findListboxUsersItems = () => findListbox().props('items')[0].options;
+ const findListboxFirstGroupItems = () => findListbox().props('items')[0].options;
const findListboxGroupsItems = () => findListbox().props('items')[1].options;
const waitForQuery = async () => {
@@ -63,12 +62,54 @@ describe('ImportTargetDropdown', () => {
expect(findListbox().props('toggleText')).toBe('a-group-path-that-is-lo…');
});
- it('passes userNamespace as "Users" group item', () => {
- createComponent();
+ describe('when used on group import', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- expect(findListboxUsersItems()).toEqual([
- { text: mockUserNamespace, value: mockUserNamespace },
- ]);
+ it('adds "No parent" in "Parent" group', () => {
+ expect(findListboxFirstGroupItems()).toEqual([{ text: 'No parent', value: '' }]);
+ });
+
+ it('emits "select" event with { fullPath: "", id: null } when "No parent" is selected', () => {
+ findListbox().vm.$emit('select', '');
+
+ expect(wrapper.emitted('select')[0]).toEqual([{ fullPath: '', id: null }]);
+ });
+
+ it('emits "select" event with { fullPath, id } when a group is selected', async () => {
+ await waitForQuery();
+
+ const mockGroupPath = 'match1';
+
+ findListbox().vm.$emit('select', mockGroupPath);
+
+ expect(wrapper.emitted('select')[0]).toEqual([
+ { fullPath: mockGroupPath, id: `gid://gitlab/Group/${mockGroupPath}` },
+ ]);
+ });
+ });
+
+ describe('when used on project import', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { userNamespace: mockUserNamespace },
+ });
+ });
+
+ it('passes userNamespace as "Users" group item', () => {
+ expect(findListboxFirstGroupItems()).toEqual([
+ { text: mockUserNamespace, value: mockUserNamespace },
+ ]);
+ });
+
+ it('emits "select" event with path as value', () => {
+ const mockProjectPath = 'mock-project';
+
+ findListbox().vm.$emit('select', mockProjectPath);
+
+ expect(wrapper.emitted('select')[0]).toEqual([mockProjectPath]);
+ });
});
it('passes namespaces from GraphQL as "Groups" group item', async () => {
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 03d0920994c..4fab22e316a 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -1,8 +1,8 @@
import { GlEmptyState, GlIcon, GlLoadingIcon } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import MockAdapter from 'axios-mock-adapter';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -55,14 +55,15 @@ describe('import table', () => {
wrapper.findAll('tbody td button').wrappers.filter((w) => w.text() === 'Import with projects')[
idx
];
- const findPaginationDropdown = () => wrapper.find('[data-testid="page-size"]');
+ const findPaginationDropdown = () => wrapper.findByTestId('page-size');
const findTargetNamespaceDropdown = (rowWrapper) =>
- rowWrapper.find('[data-testid="target-namespace-selector"]');
+ extendedWrapper(rowWrapper).findByTestId('target-namespace-dropdown');
+ const findTargetNamespaceInput = (rowWrapper) =>
+ extendedWrapper(rowWrapper).findByTestId('target-namespace-input');
const findPaginationDropdownText = () => findPaginationDropdown().find('button').text();
const findSelectionCount = () => wrapper.find('[data-test-id="selection-count"]');
const findNewPathCol = () => wrapper.find('[data-test-id="new-path-col"]');
- const findUnavailableFeaturesWarning = () =>
- wrapper.find('[data-testid="unavailable-features-alert"]');
+ const findUnavailableFeaturesWarning = () => wrapper.findByTestId('unavailable-features-alert');
const triggerSelectAllCheckbox = (checked = true) =>
wrapper.find('thead input[type=checkbox]').setChecked(checked);
@@ -88,7 +89,7 @@ describe('import table', () => {
},
);
- wrapper = mount(ImportTable, {
+ wrapper = mountExtended(ImportTable, {
propsData: {
groupPathRegex: /.*/,
jobsPath: '/fake_job_path',
@@ -220,32 +221,42 @@ describe('import table', () => {
expect(wrapper.text()).not.toContain('Showing 1-0');
});
- it('invokes importGroups mutation when row button is clicked', async () => {
- createComponent({
- bulkImportSourceGroups: () => ({
- nodes: [FAKE_GROUP],
- pageInfo: FAKE_PAGE_INFO,
- versionValidation: FAKE_VERSION_VALIDATION,
- }),
- });
+ describe('when import button is clicked', () => {
+ beforeEach(async () => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: [FAKE_GROUP],
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ });
- jest.spyOn(apolloProvider.defaultClient, 'mutate');
+ jest.spyOn(apolloProvider.defaultClient, 'mutate');
- await waitForPromises();
+ await waitForPromises();
- await findRowImportDropdownAtIndex(0).trigger('click');
- expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
- mutation: importGroupsMutation,
- variables: {
- importRequests: [
- {
- migrateProjects: true,
- newName: FAKE_GROUP.lastImportTarget.newName,
- sourceGroupId: FAKE_GROUP.id,
- targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
- },
- ],
- },
+ await findRowImportDropdownAtIndex(0).trigger('click');
+ });
+
+ it('invokes importGroups mutation', () => {
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: importGroupsMutation,
+ variables: {
+ importRequests: [
+ {
+ migrateProjects: true,
+ newName: FAKE_GROUP.lastImportTarget.newName,
+ sourceGroupId: FAKE_GROUP.id,
+ targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
+ },
+ ],
+ },
+ });
+ });
+
+ it('disables the import target input', () => {
+ const firstRow = wrapper.find('tbody tr');
+ expect(findTargetNamespaceInput(firstRow).attributes('disabled')).toBe('disabled');
});
});
@@ -294,6 +305,42 @@ describe('import table', () => {
expect(wrapper.find('tbody tr').text()).toContain(i18n.ERROR_TOO_MANY_REQUESTS);
});
+ it('displays inline error if backend returns validation error', async () => {
+ const mockValidationError =
+ 'Import failed. Destination URL must not start or end with a special character and must not contain consecutive special characters.';
+ const mockMutationWithProgressError = jest.fn().mockResolvedValue({
+ __typename: 'ClientBulkImportSourceGroup',
+ id: 1,
+ lastImportTarget: { id: 1, targetNamespace: 'root', newName: 'group1' },
+ progress: {
+ __typename: 'ClientBulkImportProgress',
+ id: null,
+ status: 'failed',
+ message: mockValidationError,
+ },
+ });
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: [FAKE_GROUP],
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ importGroups: mockMutationWithProgressError,
+ });
+
+ await waitForPromises();
+ await findRowImportDropdownAtIndex(0).trigger('click');
+ await waitForPromises();
+
+ expect(mockMutationWithProgressError).toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
+
+ const firstRow = wrapper.find('tbody tr');
+ expect(findTargetNamespaceInput(firstRow).attributes('disabled')).toBeUndefined();
+ expect(firstRow.text()).toContain(mockValidationError);
+ });
+
describe('pagination', () => {
const bulkImportSourceGroupsQueryMock = jest.fn().mockResolvedValue({
nodes: [FAKE_GROUP],
@@ -345,6 +392,28 @@ describe('import table', () => {
);
});
+ it('resets page to 1 when page size is changed', async () => {
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page', 2);
+ await waitForPromises();
+
+ expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page: 2, perPage: 50 }),
+ expect.anything(),
+ expect.anything(),
+ );
+
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', 200);
+ await waitForPromises();
+
+ expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page: 1, perPage: 200 }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
it('updates status text when page is changed', async () => {
const REQUESTED_PAGE = 2;
bulkImportSourceGroupsQueryMock.mockResolvedValue({
@@ -601,7 +670,7 @@ describe('import table', () => {
});
describe('re-import', () => {
- it('renders finished row as disabled by default', async () => {
+ beforeEach(async () => {
createComponent({
bulkImportSourceGroups: () => ({
nodes: [generateFakeEntry({ id: 5, status: STATUSES.FINISHED })],
@@ -609,21 +678,15 @@ describe('import table', () => {
versionValidation: FAKE_VERSION_VALIDATION,
}),
});
+
await waitForPromises();
+ });
+ it('renders finished row as disabled by default', () => {
expect(findRowCheckbox(0).attributes('disabled')).toBeDefined();
});
it('enables row after clicking re-import', async () => {
- createComponent({
- bulkImportSourceGroups: () => ({
- nodes: [generateFakeEntry({ id: 5, status: STATUSES.FINISHED })],
- pageInfo: FAKE_PAGE_INFO,
- versionValidation: FAKE_VERSION_VALIDATION,
- }),
- });
- await waitForPromises();
-
const reimportButton = wrapper
.findAll('tbody td button')
.wrappers.find((w) => w.text().includes('Re-import'));
diff --git a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
index 46884a42707..ac95026a9a4 100644
--- a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js
@@ -1,10 +1,9 @@
-import { GlDropdownItem, GlFormInput } from '@gitlab/ui';
+import { GlFormInput } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import ImportGroupDropdown from '~/import_entities/components/group_dropdown.vue';
+import ImportTargetDropdown from '~/import_entities/components/import_target_dropdown.vue';
import { STATUSES } from '~/import_entities/constants';
import ImportTargetCell from '~/import_entities/import_groups/components/import_target_cell.vue';
import { DEBOUNCE_DELAY } from '~/vue_shared/components/filtered_search_bar/constants';
@@ -37,7 +36,7 @@ describe('import target cell', () => {
let group;
const findNameInput = () => wrapper.findComponent(GlFormInput);
- const findNamespaceDropdown = () => wrapper.findComponent(ImportGroupDropdown);
+ const findNamespaceDropdown = () => wrapper.findComponent(ImportTargetDropdown);
const createComponent = (props) => {
apolloProvider = createMockApollo([
@@ -49,7 +48,7 @@ describe('import target cell', () => {
wrapper = shallowMount(ImportTargetCell, {
apolloProvider,
- stubs: { ImportGroupDropdown },
+ stubs: { ImportTargetDropdown },
propsData: {
groupPathRegex: /.*/,
...props,
@@ -73,14 +72,14 @@ describe('import target cell', () => {
});
it('emits update-target-namespace when dropdown option is clicked', () => {
- const dropdownItem = findNamespaceDropdown().findAllComponents(GlDropdownItem).at(2);
+ const targetNamespace = {
+ fullPath: AVAILABLE_NAMESPACES[1].fullPath,
+ id: AVAILABLE_NAMESPACES[1].id,
+ };
- dropdownItem.vm.$emit('click');
+ findNamespaceDropdown().vm.$emit('select', targetNamespace);
- expect(wrapper.emitted('update-target-namespace')).toBeDefined();
- expect(wrapper.emitted('update-target-namespace')[0][0]).toStrictEqual(
- AVAILABLE_NAMESPACES[1],
- );
+ expect(wrapper.emitted('update-target-namespace')[0]).toStrictEqual([targetNamespace]);
});
});
@@ -101,36 +100,6 @@ describe('import target cell', () => {
});
});
- it('renders only no parent option if available namespaces list is empty', () => {
- createComponent({
- group: generateFakeTableEntry({ id: 1, status: STATUSES.NONE }),
- availableNamespaces: [],
- });
-
- const items = findNamespaceDropdown()
- .findAllComponents(GlDropdownItem)
- .wrappers.map((w) => w.text());
-
- expect(items[0]).toBe('No parent');
- expect(items).toHaveLength(1);
- });
-
- it('renders both no parent option and available namespaces list when available namespaces list is not empty', async () => {
- createComponent({
- group: generateFakeTableEntry({ id: 1, status: STATUSES.NONE }),
- });
- jest.advanceTimersByTime(DEBOUNCE_DELAY);
- await waitForPromises();
- await nextTick();
-
- const [firstItem, ...rest] = findNamespaceDropdown()
- .findAllComponents(GlDropdownItem)
- .wrappers.map((w) => w.text());
-
- expect(firstItem).toBe('No parent');
- expect(rest).toHaveLength(AVAILABLE_NAMESPACES.length);
- });
-
describe('when entity is not available for import', () => {
beforeEach(() => {
group = generateFakeTableEntry({
@@ -147,6 +116,7 @@ describe('import target cell', () => {
describe('when entity is available for import', () => {
const FAKE_PROGRESS_MESSAGE = 'progress message';
+
beforeEach(() => {
group = generateFakeTableEntry({
id: 1,
diff --git a/spec/frontend/integrations/gitlab_slack_application/components/projects_dropdown_spec.js b/spec/frontend/integrations/gitlab_slack_application/components/projects_dropdown_spec.js
new file mode 100644
index 00000000000..8879a86a578
--- /dev/null
+++ b/spec/frontend/integrations/gitlab_slack_application/components/projects_dropdown_spec.js
@@ -0,0 +1,54 @@
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ProjectsDropdown from '~/integrations/gitlab_slack_application/components/projects_dropdown.vue';
+
+describe('Slack application projects dropdown', () => {
+ let wrapper;
+
+ const projectsMockData = [
+ {
+ avatar_url: null,
+ id: 1,
+ name: 'Gitlab Smoke Tests',
+ name_with_namespace: 'Toolbox / Gitlab Smoke Tests',
+ },
+ {
+ avatar_url: null,
+ id: 2,
+ name: 'Gitlab Test',
+ name_with_namespace: 'Gitlab Org / Gitlab Test',
+ },
+ {
+ avatar_url: 'foo/bar',
+ id: 3,
+ name: 'Gitlab Shell',
+ name_with_namespace: 'Gitlab Org / Gitlab Shell',
+ },
+ ];
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ProjectsDropdown, {
+ propsData: {
+ projects: projectsMockData,
+ ...props,
+ },
+ });
+ };
+
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the listbox with 3 items', () => {
+ expect(findListbox().exists()).toBe(true);
+ expect(findListbox().props('items')).toHaveLength(3);
+ });
+
+ it('should emit project-selected if a project is clicked', () => {
+ findListbox().vm.$emit('select', 1);
+
+ expect(wrapper.emitted('project-selected')).toMatchObject([[projectsMockData[0]]]);
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 526487f6460..cfc2fd65cc1 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -34,6 +34,7 @@ import {
displaySuccessfulInvitationAlert,
reloadOnInvitationSuccess,
} from '~/invite_members/utils/trigger_successful_invite_alert';
+import { captureException } from '~/ci/runner/sentry_utils';
import { GROUPS_INVITATIONS_PATH, invitationsApiResponse } from '../mock_data/api_responses';
import {
propsData,
@@ -52,6 +53,7 @@ import {
jest.mock('~/invite_members/utils/trigger_successful_invite_alert');
jest.mock('~/experimentation/experiment_tracking');
+jest.mock('~/ci/runner/sentry_utils');
describe('InviteMembersModal', () => {
let wrapper;
@@ -130,10 +132,10 @@ describe('InviteMembersModal', () => {
const findUserLimitAlert = () => wrapper.findComponent(UserLimitNotification);
const findAccordion = () => wrapper.findComponent(GlCollapse);
const findErrorsIcon = () => wrapper.findComponent(GlIcon);
- const findMemberErrorMessage = (element) =>
- `${Object.keys(invitationsApiResponse.EXPANDED_RESTRICTED.message)[element]}: ${
- Object.values(invitationsApiResponse.EXPANDED_RESTRICTED.message)[element]
- }`;
+ const expectedErrorMessage = (index, errorType) => {
+ const [username, message] = Object.entries(errorType.parsedMessage)[index];
+ return `${username}: ${message}`;
+ };
const findActionButton = () => wrapper.findByTestId('invite-modal-submit');
const findCancelButton = () => wrapper.findByTestId('invite-modal-cancel');
@@ -315,8 +317,6 @@ describe('InviteMembersModal', () => {
mock.onPost(GROUPS_INVITATIONS_PATH).reply(code, data);
};
- const expectedEmailRestrictedError =
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.";
const expectedSyntaxError = 'email contains an invalid email address';
describe('when no invites have been entered in the form and then some are entered', () => {
@@ -447,10 +447,8 @@ describe('InviteMembersModal', () => {
});
it('displays the generic error for http server error', async () => {
- mockInvitationsApi(
- HTTP_STATUS_INTERNAL_SERVER_ERROR,
- 'Request failed with status code 500',
- );
+ const SERVER_ERROR_MESSAGE = 'Request failed with status code 500';
+ mockInvitationsApi(HTTP_STATUS_INTERNAL_SERVER_ERROR, SERVER_ERROR_MESSAGE);
clickInviteButton();
@@ -458,17 +456,25 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
expect(findMembersSelect().props('exceptionState')).toBe(false);
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(SERVER_ERROR_MESSAGE),
+ component: wrapper.vm.$options.name,
+ });
});
it('displays the restricted user api message for response with bad request', async () => {
mockInvitationsApi(HTTP_STATUS_CREATED, invitationsApiResponse.EMAIL_RESTRICTED);
+ await triggerMembersTokenSelect([user3]);
+
clickInviteButton();
await waitForPromises();
expect(findMemberErrorAlert().exists()).toBe(true);
- expect(findMemberErrorAlert().text()).toContain(expectedEmailRestrictedError);
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(0, invitationsApiResponse.EMAIL_RESTRICTED),
+ );
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
@@ -476,19 +482,21 @@ describe('InviteMembersModal', () => {
it('displays all errors when there are multiple existing users that are restricted by email', async () => {
mockInvitationsApi(HTTP_STATUS_CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
+ await triggerMembersTokenSelect([user3, user4, user5]);
+
clickInviteButton();
await waitForPromises();
expect(findMemberErrorAlert().exists()).toBe(true);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[0],
+ expectedErrorMessage(0, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[1],
+ expectedErrorMessage(1, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[2],
+ expectedErrorMessage(2, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('exceptionState')).not.toBe(false);
@@ -608,7 +616,9 @@ describe('InviteMembersModal', () => {
await waitForPromises();
expect(findMemberErrorAlert().exists()).toBe(true);
- expect(findMemberErrorAlert().text()).toContain(expectedEmailRestrictedError);
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(0, invitationsApiResponse.EMAIL_RESTRICTED),
+ );
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('exceptionState')).not.toBe(false);
expect(findActionButton().props('loading')).toBe(false);
@@ -617,19 +627,21 @@ describe('InviteMembersModal', () => {
it('displays all errors when there are multiple emails that return a restricted error message', async () => {
mockInvitationsApi(HTTP_STATUS_CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
+ await triggerMembersTokenSelect([user3, user4, user5]);
+
clickInviteButton();
await waitForPromises();
expect(findMemberErrorAlert().exists()).toBe(true);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[0],
+ expectedErrorMessage(0, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[1],
+ expectedErrorMessage(1, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(findMemberErrorAlert().text()).toContain(
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[2],
+ expectedErrorMessage(2, invitationsApiResponse.MULTIPLE_RESTRICTED),
);
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('exceptionState')).not.toBe(false);
@@ -685,10 +697,18 @@ describe('InviteMembersModal', () => {
expect(findMemberErrorAlert().props('title')).toContain(
"The following 4 members couldn't be invited",
);
- expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(0));
- expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(1));
- expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(2));
- expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(3));
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(0, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(1, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(2, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
+ expect(findMemberErrorAlert().text()).toContain(
+ expectedErrorMessage(3, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
expect(findAccordion().exists()).toBe(true);
expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
expect(findErrorsIcon().attributes('class')).not.toContain('gl-rotate-180');
@@ -711,7 +731,9 @@ describe('InviteMembersModal', () => {
expect(findMemberErrorAlert().props('title')).toContain(
"The following 3 members couldn't be invited",
);
- expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(0));
+ expect(findMemberErrorAlert().text()).not.toContain(
+ expectedErrorMessage(0, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
await removeMembersToken(user6);
@@ -719,14 +741,18 @@ describe('InviteMembersModal', () => {
expect(findMemberErrorAlert().props('title')).toContain(
"The following 2 members couldn't be invited",
);
- expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(2));
+ expect(findMemberErrorAlert().text()).not.toContain(
+ expectedErrorMessage(2, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
await removeMembersToken(user4);
expect(findMemberErrorAlert().props('title')).toContain(
"The following member couldn't be invited",
);
- expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(1));
+ expect(findMemberErrorAlert().text()).not.toContain(
+ expectedErrorMessage(1, invitationsApiResponse.EXPANDED_RESTRICTED),
+ );
await removeMembersToken(user5);
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
index 4f773009f37..9190f85d7a0 100644
--- a/spec/frontend/invite_members/mock_data/api_responses.js
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -6,36 +6,56 @@ const ERROR_EMAIL_INVALID = {
error: 'email contains an invalid email address',
};
+const BASE_ERROR_MEMBER_NOT_ALLOWED = `The member's email address is not allowed for this project. \
+Go to the &#39;Admin area &gt; Sign-up restrictions&#39;, and check`;
+
+const ALLOWED_DOMAIN_ERROR = `${BASE_ERROR_MEMBER_NOT_ALLOWED} &#39;Allowed domains for sign-ups&#39;.`;
+const DOMAIN_DENYLIST_ERROR = `${BASE_ERROR_MEMBER_NOT_ALLOWED} the &#39;Domain denylist&#39;.`;
+
+function htmlDecode(input) {
+ const doc = new DOMParser().parseFromString(input, 'text/html');
+ return doc.documentElement.textContent;
+}
+
+const DECODED_ALLOWED_DOMAIN_ERROR = htmlDecode(ALLOWED_DOMAIN_ERROR);
+const DECODED_DOMAIN_DENYLIST_ERROR = htmlDecode(DOMAIN_DENYLIST_ERROR);
+
const EMAIL_RESTRICTED = {
message: {
- 'email@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ 'email@example.com': ALLOWED_DOMAIN_ERROR,
+ },
+ parsedMessage: {
+ 'email@example.com': DECODED_ALLOWED_DOMAIN_ERROR,
},
status: 'error',
};
const MULTIPLE_RESTRICTED = {
message: {
- 'email@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
- 'email4@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
- root:
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ 'email@example.com': ALLOWED_DOMAIN_ERROR,
+ 'email4@example.com': DOMAIN_DENYLIST_ERROR,
+ root: ALLOWED_DOMAIN_ERROR,
+ },
+ parsedMessage: {
+ 'email@example.com': DECODED_ALLOWED_DOMAIN_ERROR,
+ 'email4@example.com': DECODED_DOMAIN_DENYLIST_ERROR,
+ root: DECODED_ALLOWED_DOMAIN_ERROR,
},
status: 'error',
};
const EXPANDED_RESTRICTED = {
message: {
- 'email@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
- 'email4@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
- 'email5@example.com':
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
- root:
- "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ 'email@example.com': ALLOWED_DOMAIN_ERROR,
+ 'email4@example.com': DOMAIN_DENYLIST_ERROR,
+ 'email5@example.com': DOMAIN_DENYLIST_ERROR,
+ root: ALLOWED_DOMAIN_ERROR,
+ },
+ parsedMessage: {
+ 'email@example.com': DECODED_ALLOWED_DOMAIN_ERROR,
+ 'email4@example.com': DECODED_DOMAIN_DENYLIST_ERROR,
+ 'email5@example.com': DECODED_DOMAIN_DENYLIST_ERROR,
+ root: DECODED_ALLOWED_DOMAIN_ERROR,
},
status: 'error',
};
diff --git a/spec/frontend/issuable/components/hidden_badge_spec.js b/spec/frontend/issuable/components/hidden_badge_spec.js
new file mode 100644
index 00000000000..db2248bb2d2
--- /dev/null
+++ b/spec/frontend/issuable/components/hidden_badge_spec.js
@@ -0,0 +1,45 @@
+import { GlBadge, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import HiddenBadge from '~/issuable/components/hidden_badge.vue';
+
+describe('HiddenBadge component', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMount(HiddenBadge, {
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ propsData: {
+ issuableType: 'issue',
+ },
+ });
+ };
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders warning badge', () => {
+ expect(findBadge().text()).toBe('Hidden');
+ expect(findBadge().props('variant')).toEqual('warning');
+ });
+
+ it('renders spam icon', () => {
+ expect(findIcon().props('name')).toBe('spam');
+ });
+
+ it('has tooltip', () => {
+ expect(getBinding(wrapper.element, 'gl-tooltip')).not.toBeUndefined();
+ });
+
+ it('has title', () => {
+ expect(findBadge().attributes('title')).toBe(
+ 'This issue is hidden because its author has been banned.',
+ );
+ });
+});
diff --git a/spec/frontend/issuable/components/locked_badge_spec.js b/spec/frontend/issuable/components/locked_badge_spec.js
new file mode 100644
index 00000000000..73ab6e36ba1
--- /dev/null
+++ b/spec/frontend/issuable/components/locked_badge_spec.js
@@ -0,0 +1,45 @@
+import { GlBadge, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import LockedBadge from '~/issuable/components/locked_badge.vue';
+
+describe('LockedBadge component', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMount(LockedBadge, {
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ propsData: {
+ issuableType: 'issue',
+ },
+ });
+ };
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders warning badge', () => {
+ expect(findBadge().text()).toBe('Locked');
+ expect(findBadge().props('variant')).toEqual('warning');
+ });
+
+ it('renders lock icon', () => {
+ expect(findIcon().props('name')).toBe('lock');
+ });
+
+ it('has tooltip', () => {
+ expect(getBinding(wrapper.element, 'gl-tooltip')).not.toBeUndefined();
+ });
+
+ it('has title', () => {
+ expect(findBadge().attributes('title')).toBe(
+ 'This issue is locked. Only project members can comment.',
+ );
+ });
+});
diff --git a/spec/frontend/issues/dashboard/mock_data.js b/spec/frontend/issues/dashboard/mock_data.js
index 1e3abd5a018..adcd4268449 100644
--- a/spec/frontend/issues/dashboard/mock_data.js
+++ b/spec/frontend/issues/dashboard/mock_data.js
@@ -19,7 +19,6 @@ export const issuesQueryResponse = {
reference: 'group/project#123456',
state: 'opened',
title: 'Issue title',
- titleHtml: 'Issue title',
type: 'issue',
updatedAt: '2021-05-22T04:08:01Z',
upvotes: 3,
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 73fda11f38c..b9a8bc171db 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -49,7 +49,6 @@ export const getIssuesQueryResponse = {
moved: false,
state: 'opened',
title: 'Issue title',
- titleHtml: 'Issue title',
updatedAt: '2021-05-22T04:08:01Z',
closedAt: null,
upvotes: 3,
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 93860aaa925..25e89db7957 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -69,8 +69,8 @@ describe('Description component', () => {
wrapper = shallowMountExtended(Description, {
apolloProvider: mockApollo,
propsData: {
- issueId: 1,
- issueIid: 1,
+ issueId: '1',
+ issueIid: '1',
...initialProps,
...props,
},
diff --git a/spec/frontend/issues/show/components/fields/description_spec.js b/spec/frontend/issues/show/components/fields/description_spec.js
index 83b927d3699..e1d2809be9d 100644
--- a/spec/frontend/issues/show/components/fields/description_spec.js
+++ b/spec/frontend/issues/show/components/fields/description_spec.js
@@ -10,7 +10,7 @@ describe('Description field component', () => {
let trackingSpy;
const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
- const mountComponent = ({ description = 'test', contentEditorOnIssues = false } = {}) => {
+ const mountComponent = ({ description = 'test' } = {}) => {
wrapper = shallowMount(DescriptionField, {
attachTo: document.body,
propsData: {
@@ -18,11 +18,6 @@ describe('Description field component', () => {
markdownDocsPath: '/',
value: description,
},
- provide: {
- glFeatures: {
- contentEditorOnIssues,
- },
- },
stubs: {
MarkdownField,
},
@@ -33,15 +28,7 @@ describe('Description field component', () => {
trackingSpy = mockTracking(undefined, null, jest.spyOn);
jest.spyOn(eventHub, '$emit');
- mountComponent({ contentEditorOnIssues: true });
- });
-
- it('passes feature flag to the MarkdownEditorComponent', () => {
- expect(findMarkdownEditor().props('enableContentEditor')).toBe(true);
-
- mountComponent({ contentEditorOnIssues: false });
-
- expect(findMarkdownEditor().props('enableContentEditor')).toBe(false);
+ mountComponent();
});
it('uses the MarkdownEditor component to edit markdown', () => {
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index ce2161f4670..e508045eff3 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -123,7 +123,7 @@ describe('HeaderActions component', () => {
const findMobileDropdownItems = () => findMobileDropdown().findAllComponents(GlDropdownItem);
const findDesktopDropdownItems = () => findDesktopDropdown().findAllComponents(GlDropdownItem);
const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
- const findReportAbuseSelectorItem = () => wrapper.find(`[data-testid="report-abuse-item"]`);
+ const findReportAbuseButton = () => wrapper.find(`[data-testid="report-abuse-item"]`);
const findNotificationWidget = () => wrapper.find(`[data-testid="notification-toggle"]`);
const findLockIssueWidget = () => wrapper.find(`[data-testid="lock-issue-toggle"]`);
const findCopyRefenceDropdownItem = () => wrapper.find(`[data-testid="copy-reference"]`);
@@ -239,24 +239,24 @@ describe('HeaderActions component', () => {
});
describe.each`
- description | isCloseIssueItemVisible | findDropdownItems | findDropdown
- ${'mobile dropdown'} | ${true} | ${findMobileDropdownItems} | ${findMobileDropdown}
- ${'desktop dropdown'} | ${false} | ${findDesktopDropdownItems} | ${findDesktopDropdown}
- `('$description', ({ isCloseIssueItemVisible, findDropdownItems, findDropdown }) => {
+ description | findDropdownItems
+ ${'mobile dropdown'} | ${findMobileDropdownItems}
+ ${'desktop dropdown'} | ${findDesktopDropdownItems}
+ `('$description', ({ findDropdownItems }) => {
describe.each`
- description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic | canDestroyIssue
- ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${isCloseIssueItemVisible} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user can create ${issueType}`} | ${`New related ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot create ${issueType}`} | ${`New related ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} | ${true}
- ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} | ${true}
- ${'when user can report abuse'} | ${'Report abuse to administrator'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} | ${true}
- ${'when user cannot report abuse'} | ${'Report abuse to administrator'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true}
- ${`when user can delete ${issueType}`} | ${`Delete ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot delete ${issueType}`} | ${`Delete ${issueType}`} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${false}
+ description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic | canDestroyIssue
+ ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user can create ${issueType}`} | ${`New related ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot create ${issueType}`} | ${`New related ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} | ${true}
+ ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} | ${true}
+ ${'when user can report abuse'} | ${'Report abuse to administrator'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} | ${true}
+ ${'when user cannot report abuse'} | ${'Report abuse to administrator'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true}
+ ${`when user can delete ${issueType}`} | ${`Delete ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot delete ${issueType}`} | ${`Delete ${issueType}`} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${false}
`(
'$description',
({
@@ -292,24 +292,6 @@ describe('HeaderActions component', () => {
});
},
);
-
- describe(`when user can update but not create ${issueType}`, () => {
- beforeEach(() => {
- wrapper = mountComponent({
- props: {
- canUpdateIssue: true,
- canCreateIssue: false,
- isIssueAuthor: true,
- issueType,
- canReportSpam: false,
- canPromoteToEpic: false,
- },
- });
- });
- it(`${isCloseIssueItemVisible ? 'shows' : 'hides'} the dropdown button`, () => {
- expect(findDropdown().exists()).toBe(isCloseIssueItemVisible);
- });
- });
});
describe(`show edit button ${issueType}`, () => {
@@ -346,7 +328,7 @@ describe('HeaderActions component', () => {
});
it('tracks clicking on button', () => {
- findDesktopDropdownItems().at(3).vm.$emit('click');
+ findDesktopDropdownItems().at(4).vm.$emit('click');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_dropdown', {
label: 'delete_issue',
@@ -490,29 +472,41 @@ describe('HeaderActions component', () => {
});
});
- describe('abuse category selector', () => {
+ describe('report abuse to admin button', () => {
beforeEach(() => {
wrapper = mountComponent({ props: { isIssueAuthor: false } });
});
- it("doesn't render", () => {
+ it('renders the button but not the abuse category drawer', () => {
+ expect(findReportAbuseButton().exists()).toBe(true);
expect(findAbuseCategorySelector().exists()).toEqual(false);
});
- it('opens the drawer', async () => {
- findReportAbuseSelectorItem().vm.$emit('click');
+ it('opens the abuse category drawer', async () => {
+ findReportAbuseButton().vm.$emit('click');
await nextTick();
expect(findAbuseCategorySelector().props('showDrawer')).toEqual(true);
});
- it('closes the drawer', async () => {
- await findReportAbuseSelectorItem().vm.$emit('click');
- await findAbuseCategorySelector().vm.$emit('close-drawer');
+ it('closes the abuse category drawer', async () => {
+ await findReportAbuseButton().vm.$emit('click');
+ expect(findAbuseCategorySelector().exists()).toEqual(true);
+ await findAbuseCategorySelector().vm.$emit('close-drawer');
expect(findAbuseCategorySelector().exists()).toEqual(false);
});
+
+ describe('when the logged in user is the issue author', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ props: { isIssueAuthor: true } });
+ });
+
+ it('does not render the button', () => {
+ expect(findReportAbuseButton().exists()).toBe(false);
+ });
+ });
});
describe('notification toggle', () => {
@@ -694,7 +688,7 @@ describe('HeaderActions component', () => {
expect(findDesktopDropdown().exists()).toBe(headerActionsVisible);
expect(findCopyRefenceDropdownItem().exists()).toBe(headerActionsVisible);
expect(findNotificationWidget().exists()).toBe(false);
- expect(findReportAbuseSelectorItem().exists()).toBe(false);
+ expect(findReportAbuseButton().exists()).toBe(false);
expect(findLockIssueWidget().exists()).toBe(false);
});
},
@@ -720,7 +714,7 @@ describe('HeaderActions component', () => {
`${capitalizeFirstCharacter(expectedText)} actions`,
);
expect(findDropdownBy('copy-email').text()).toBe(`Copy ${expectedText} email address`);
- expect(findDesktopDropdownItems().at(0).text()).toBe(`New related ${expectedText}`);
+ expect(findDesktopDropdownItems().at(1).text()).toBe(`New related ${expectedText}`);
});
});
});
diff --git a/spec/frontend/issues/show/components/new_header_actions_popover_spec.js b/spec/frontend/issues/show/components/new_header_actions_popover_spec.js
deleted file mode 100644
index bf3e81c7d3a..00000000000
--- a/spec/frontend/issues/show/components/new_header_actions_popover_spec.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { GlPopover } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import NewHeaderActionsPopover from '~/issues/show/components/new_header_actions_popover.vue';
-import { NEW_ACTIONS_POPOVER_KEY } from '~/issues/show/constants';
-import { TYPE_ISSUE } from '~/issues/constants';
-import * as utils from '~/lib/utils/common_utils';
-
-describe('NewHeaderActionsPopover', () => {
- let wrapper;
-
- const createComponent = ({ issueType = TYPE_ISSUE, movedMrSidebarEnabled = true }) => {
- wrapper = shallowMountExtended(NewHeaderActionsPopover, {
- propsData: {
- issueType,
- },
- stubs: {
- GlPopover,
- },
- provide: {
- glFeatures: {
- movedMrSidebar: movedMrSidebarEnabled,
- },
- },
- });
- };
-
- const findPopover = () => wrapper.findComponent(GlPopover);
- const findConfirmButton = () => wrapper.findByTestId('confirm-button');
-
- it('should not be visible when the feature flag :moved_mr_sidebar is disabled', () => {
- createComponent({ movedMrSidebarEnabled: false });
- expect(findPopover().exists()).toBe(false);
- });
-
- describe('without the popover cookie', () => {
- beforeEach(() => {
- utils.setCookie = jest.fn();
-
- createComponent({});
- });
-
- it('renders the popover with correct text', () => {
- expect(findPopover().exists()).toBe(true);
- expect(findPopover().text()).toContain('issue actions');
- });
-
- it('does not call setCookie', () => {
- expect(utils.setCookie).not.toHaveBeenCalled();
- });
-
- describe('when the confirm button is clicked', () => {
- beforeEach(() => {
- findConfirmButton().vm.$emit('click');
- });
-
- it('sets the popover cookie', () => {
- expect(utils.setCookie).toHaveBeenCalledWith(NEW_ACTIONS_POPOVER_KEY, true);
- });
-
- it('hides the popover', () => {
- expect(findPopover().exists()).toBe(false);
- });
- });
- });
-
- describe('with the popover cookie', () => {
- beforeEach(() => {
- jest.spyOn(utils, 'getCookie').mockReturnValue('true');
-
- createComponent({});
- });
-
- it('does not render the popover', () => {
- expect(findPopover().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/issues/show/components/sticky_header_spec.js b/spec/frontend/issues/show/components/sticky_header_spec.js
index 0c54ae45e70..a909084956f 100644
--- a/spec/frontend/issues/show/components/sticky_header_spec.js
+++ b/spec/frontend/issues/show/components/sticky_header_spec.js
@@ -1,6 +1,7 @@
-import { GlIcon } from '@gitlab/ui';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import HiddenBadge from '~/issuable/components/hidden_badge.vue';
+import LockedBadge from '~/issuable/components/locked_badge.vue';
import {
issuableStatusText,
STATUS_CLOSED,
@@ -17,20 +18,17 @@ describe('StickyHeader component', () => {
let wrapper;
const findConfidentialBadge = () => wrapper.findComponent(ConfidentialityBadge);
- const findHiddenBadge = () => wrapper.findByTestId('hidden');
- const findLockedBadge = () => wrapper.findByTestId('locked');
+ const findHiddenBadge = () => wrapper.findComponent(HiddenBadge);
+ const findLockedBadge = () => wrapper.findComponent(LockedBadge);
+ const findTitle = () => wrapper.findComponent(GlLink);
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(StickyHeader, {
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
propsData: {
issuableStatus: STATUS_OPEN,
issuableType: TYPE_ISSUE,
show: true,
title: 'A sticky issue',
- titleHtml: '',
...props,
},
});
@@ -91,13 +89,6 @@ describe('StickyHeader component', () => {
const lockedBadge = findLockedBadge();
expect(lockedBadge.exists()).toBe(isLocked);
-
- if (isLocked) {
- expect(lockedBadge.attributes('title')).toBe(
- 'This issue is locked. Only project members can comment.',
- );
- expect(getBinding(lockedBadge.element, 'gl-tooltip')).not.toBeUndefined();
- }
});
it.each`
@@ -109,27 +100,13 @@ describe('StickyHeader component', () => {
const hiddenBadge = findHiddenBadge();
expect(hiddenBadge.exists()).toBe(isHidden);
-
- if (isHidden) {
- expect(hiddenBadge.attributes('title')).toBe(
- 'This issue is hidden because its author has been banned',
- );
- expect(getBinding(hiddenBadge.element, 'gl-tooltip')).not.toBeUndefined();
- }
});
it('shows with title', () => {
createComponent();
- const title = wrapper.find('a');
+ const title = findTitle();
expect(title.text()).toContain('A sticky issue');
expect(title.attributes('href')).toBe('#top');
});
-
- it('shows title containing markup', () => {
- const titleHtml = '<b>A sticky issue</b>';
- createComponent({ titleHtml });
-
- expect(wrapper.find('a').html()).toContain(titleHtml);
- });
});
diff --git a/spec/frontend/issues/show/mock_data/mock_data.js b/spec/frontend/issues/show/mock_data/mock_data.js
index 37aa18ced8d..ed969a08ac5 100644
--- a/spec/frontend/issues/show/mock_data/mock_data.js
+++ b/spec/frontend/issues/show/mock_data/mock_data.js
@@ -1,9 +1,8 @@
import { TEST_HOST } from 'helpers/test_constants';
export const initialRequest = {
- title: '<gl-emoji title="party-parrot"></gl-emoji>this is a title',
+ title: '<p>this is a title</p>',
title_text: 'this is a title',
- title_html: '<gl-emoji title="party-parrot"></gl-emoji>this is a title',
description: '<p>this is a description!</p>',
description_text: 'this is a description',
task_completion_status: { completed_count: 2, count: 4 },
diff --git a/spec/frontend/lib/utils/global_alerts_spec.js b/spec/frontend/lib/utils/global_alerts_spec.js
new file mode 100644
index 00000000000..97fe427c281
--- /dev/null
+++ b/spec/frontend/lib/utils/global_alerts_spec.js
@@ -0,0 +1,80 @@
+import {
+ getGlobalAlerts,
+ setGlobalAlerts,
+ removeGlobalAlertById,
+ GLOBAL_ALERTS_SESSION_STORAGE_KEY,
+} from '~/lib/utils/global_alerts';
+
+describe('global alerts utils', () => {
+ describe('getGlobalAlerts', () => {
+ describe('when there are alerts', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(Storage.prototype, 'getItem')
+ .mockImplementation(() => '[{"id":"foo","variant":"danger","message":"Foo"}]');
+ });
+
+ it('returns alerts from session storage', () => {
+ expect(getGlobalAlerts()).toEqual([{ id: 'foo', variant: 'danger', message: 'Foo' }]);
+ });
+ });
+
+ describe('when there are no alerts', () => {
+ beforeEach(() => {
+ jest.spyOn(Storage.prototype, 'getItem').mockImplementation(() => null);
+ });
+
+ it('returns empty array', () => {
+ expect(getGlobalAlerts()).toEqual([]);
+ });
+ });
+ });
+});
+
+describe('setGlobalAlerts', () => {
+ it('sets alerts in session storage', () => {
+ const setItemSpy = jest.spyOn(Storage.prototype, 'setItem').mockImplementation(() => {});
+
+ setGlobalAlerts([
+ {
+ id: 'foo',
+ variant: 'danger',
+ message: 'Foo',
+ },
+ {
+ id: 'bar',
+ variant: 'success',
+ message: 'Bar',
+ persistOnPages: ['dashboard:groups:index'],
+ dismissible: false,
+ },
+ ]);
+
+ expect(setItemSpy).toHaveBeenCalledWith(
+ GLOBAL_ALERTS_SESSION_STORAGE_KEY,
+ '[{"dismissible":true,"persistOnPages":[],"id":"foo","variant":"danger","message":"Foo"},{"dismissible":false,"persistOnPages":["dashboard:groups:index"],"id":"bar","variant":"success","message":"Bar"}]',
+ );
+ });
+});
+
+describe('removeGlobalAlertById', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(Storage.prototype, 'getItem')
+ .mockImplementation(
+ () =>
+ '[{"id":"foo","variant":"success","message":"Foo"},{"id":"bar","variant":"danger","message":"Bar"}]',
+ );
+ });
+
+ it('removes alert', () => {
+ const setItemSpy = jest.spyOn(Storage.prototype, 'setItem').mockImplementation(() => {});
+
+ removeGlobalAlertById('bar');
+
+ expect(setItemSpy).toHaveBeenCalledWith(
+ GLOBAL_ALERTS_SESSION_STORAGE_KEY,
+ '[{"id":"foo","variant":"success","message":"Foo"}]',
+ );
+ });
+});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index ecd2d7f888d..3a846bbda06 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1,8 +1,20 @@
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as urlUtils from '~/lib/utils/url_utility';
+import { setGlobalAlerts } from '~/lib/utils/global_alerts';
import { safeUrls, unsafeUrls } from './mock_data';
+jest.mock('~/lib/utils/global_alerts', () => ({
+ getGlobalAlerts: jest.fn().mockImplementation(() => [
+ {
+ id: 'foo',
+ message: 'Foo',
+ variant: 'success',
+ },
+ ]),
+ setGlobalAlerts: jest.fn(),
+}));
+
const shas = {
valid: [
'ad9be38573f9ee4c4daec22673478c2dd1d81cd8',
@@ -327,6 +339,26 @@ describe('URL utility', () => {
});
});
+ describe('getLocationHash', () => {
+ it('gets a default empty value', () => {
+ setWindowLocation(TEST_HOST);
+
+ expect(urlUtils.getLocationHash()).toBeUndefined();
+ });
+
+ it('gets a value', () => {
+ setWindowLocation('#hash-value');
+
+ expect(urlUtils.getLocationHash()).toBe('hash-value');
+ });
+
+ it('gets an empty value when only hash is set', () => {
+ setWindowLocation('#');
+
+ expect(urlUtils.getLocationHash()).toBeUndefined();
+ });
+ });
+
describe('doesHashExistInUrl', () => {
beforeEach(() => {
setWindowLocation('#note_1');
@@ -462,6 +494,48 @@ describe('URL utility', () => {
});
});
+ describe('visitUrlWithAlerts', () => {
+ let originalLocation;
+
+ beforeAll(() => {
+ originalLocation = window.location;
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: {
+ assign: jest.fn(),
+ protocol: 'http:',
+ host: TEST_HOST,
+ },
+ });
+ });
+
+ afterAll(() => {
+ window.location = originalLocation;
+ });
+
+ it('sets alerts and then visits url', () => {
+ const url = '/foo/bar';
+ const alert = {
+ id: 'bar',
+ message: 'Bar',
+ variant: 'danger',
+ };
+
+ urlUtils.visitUrlWithAlerts(url, [alert]);
+
+ expect(setGlobalAlerts).toHaveBeenCalledWith([
+ {
+ id: 'foo',
+ message: 'Foo',
+ variant: 'success',
+ },
+ alert,
+ ]);
+ expect(window.location.assign).toHaveBeenCalledWith(url);
+ });
+ });
+
describe('updateHistory', () => {
const state = { key: 'prop' };
const title = 'TITLE';
diff --git a/spec/frontend/merge_requests/components/header_metadata_spec.js b/spec/frontend/merge_requests/components/header_metadata_spec.js
deleted file mode 100644
index 2823b4b9d97..00000000000
--- a/spec/frontend/merge_requests/components/header_metadata_spec.js
+++ /dev/null
@@ -1,93 +0,0 @@
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import HeaderMetadata from '~/merge_requests/components/header_metadata.vue';
-import mrStore from '~/mr_notes/stores';
-import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
-
-jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
-
-describe('HeaderMetadata component', () => {
- let wrapper;
-
- const findConfidentialIcon = () => wrapper.findComponent(ConfidentialityBadge);
- const findLockedIcon = () => wrapper.findByTestId('locked');
- const findHiddenIcon = () => wrapper.findByTestId('hidden');
-
- const renderTestMessage = (renders) => (renders ? 'renders' : 'does not render');
-
- const createComponent = ({ store, provide }) => {
- wrapper = shallowMountExtended(HeaderMetadata, {
- mocks: {
- $store: store,
- },
- provide,
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- });
- };
-
- describe.each`
- lockStatus | confidentialStatus | hiddenStatus
- ${true} | ${true} | ${false}
- ${true} | ${false} | ${false}
- ${false} | ${true} | ${false}
- ${false} | ${false} | ${false}
- ${true} | ${true} | ${true}
- ${true} | ${false} | ${true}
- ${false} | ${true} | ${true}
- ${false} | ${false} | ${true}
- `(
- `when locked=$lockStatus, confidential=$confidentialStatus, and hidden=$hiddenStatus`,
- ({ lockStatus, confidentialStatus, hiddenStatus }) => {
- const store = mrStore;
-
- beforeEach(() => {
- store.getters.getNoteableData = {};
- store.getters.getNoteableData.confidential = confidentialStatus;
- store.getters.getNoteableData.discussion_locked = lockStatus;
- store.getters.getNoteableData.targetType = 'merge_request';
-
- createComponent({ store, provide: { hidden: hiddenStatus } });
- });
-
- it(`${renderTestMessage(lockStatus)} the locked icon`, () => {
- const lockedIcon = findLockedIcon();
-
- expect(lockedIcon.exists()).toBe(lockStatus);
-
- if (lockStatus) {
- expect(lockedIcon.attributes('title')).toBe(
- `This merge request is locked. Only project members can comment.`,
- );
- expect(getBinding(lockedIcon.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
-
- it(`${renderTestMessage(confidentialStatus)} the confidential icon`, () => {
- const confidentialIcon = findConfidentialIcon();
- expect(confidentialIcon.exists()).toBe(confidentialStatus);
-
- if (confidentialStatus && !hiddenStatus) {
- expect(confidentialIcon.props()).toMatchObject({
- workspaceType: 'project',
- issuableType: 'issue',
- });
- }
- });
-
- it(`${renderTestMessage(confidentialStatus)} the hidden icon`, () => {
- const hiddenIcon = findHiddenIcon();
-
- expect(hiddenIcon.exists()).toBe(hiddenStatus);
-
- if (hiddenStatus) {
- expect(hiddenIcon.attributes('title')).toBe(
- `This merge request is hidden because its author has been banned`,
- );
- expect(getBinding(hiddenIcon.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
- },
- );
-});
diff --git a/spec/frontend/merge_requests/components/merge_request_header_spec.js b/spec/frontend/merge_requests/components/merge_request_header_spec.js
new file mode 100644
index 00000000000..3f774098379
--- /dev/null
+++ b/spec/frontend/merge_requests/components/merge_request_header_spec.js
@@ -0,0 +1,88 @@
+import { shallowMount } from '@vue/test-utils';
+import HiddenBadge from '~/issuable/components/hidden_badge.vue';
+import LockedBadge from '~/issuable/components/locked_badge.vue';
+import StatusBadge from '~/issuable/components/status_badge.vue';
+import MergeRequestHeader from '~/merge_requests/components/merge_request_header.vue';
+import mrStore from '~/mr_notes/stores';
+import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
+
+describe('MergeRequestHeader component', () => {
+ let wrapper;
+
+ const findConfidentialBadge = () => wrapper.findComponent(ConfidentialityBadge);
+ const findLockedBadge = () => wrapper.findComponent(LockedBadge);
+ const findHiddenBadge = () => wrapper.findComponent(HiddenBadge);
+ const findStatusBadge = () => wrapper.findComponent(StatusBadge);
+
+ const renderTestMessage = (renders) => (renders ? 'renders' : 'does not render');
+
+ const createComponent = ({ confidential, hidden, locked }) => {
+ const store = mrStore;
+ store.getters.getNoteableData = {};
+ store.getters.getNoteableData.confidential = confidential;
+ store.getters.getNoteableData.discussion_locked = locked;
+ store.getters.getNoteableData.targetType = 'merge_request';
+
+ wrapper = shallowMount(MergeRequestHeader, {
+ mocks: {
+ $store: store,
+ },
+ provide: {
+ hidden,
+ },
+ propsData: {
+ initialState: 'opened',
+ },
+ });
+ };
+
+ it('renders status badge', () => {
+ createComponent({ propsData: { initialState: 'opened' } });
+
+ expect(findStatusBadge().props()).toEqual({
+ issuableType: 'merge_request',
+ state: 'opened',
+ });
+ });
+
+ describe.each`
+ locked | confidential | hidden
+ ${true} | ${true} | ${false}
+ ${true} | ${false} | ${false}
+ ${false} | ${true} | ${false}
+ ${false} | ${false} | ${false}
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${true}
+ `(
+ `when locked=$locked, confidential=$confidential, and hidden=$hidden`,
+ ({ locked, confidential, hidden }) => {
+ beforeEach(() => {
+ createComponent({ confidential, hidden, locked });
+ });
+
+ it(`${renderTestMessage(confidential)} the confidential badge`, () => {
+ const confidentialBadge = findConfidentialBadge();
+ expect(confidentialBadge.exists()).toBe(confidential);
+
+ if (confidential && !hidden) {
+ expect(confidentialBadge.props()).toMatchObject({
+ workspaceType: 'project',
+ issuableType: 'issue',
+ });
+ }
+ });
+
+ it(`${renderTestMessage(locked)} the locked badge`, () => {
+ expect(findLockedBadge().exists()).toBe(locked);
+ });
+
+ it(`${renderTestMessage(hidden)} the hidden badge`, () => {
+ expect(findHiddenBadge().exists()).toBe(hidden);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
index 53dbd796d85..cd252560590 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
@@ -2,15 +2,14 @@ import { shallowMount } from '@vue/test-utils';
import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
describe('CandidateDetailRow', () => {
- const SECTION_LABEL_CELL = 0;
- const ROW_LABEL_CELL = 1;
- const ROW_VALUE_CELL = 2;
+ const ROW_LABEL_CELL = 0;
+ const ROW_VALUE_CELL = 1;
let wrapper;
const createWrapper = ({ slots = {} } = {}) => {
wrapper = shallowMount(DetailRow, {
- propsData: { sectionLabel: 'Section', label: 'Item' },
+ propsData: { label: 'Item' },
slots,
});
};
@@ -19,10 +18,6 @@ describe('CandidateDetailRow', () => {
beforeEach(() => createWrapper());
- it('renders section label', () => {
- expect(findCellAt(SECTION_LABEL_CELL).text()).toBe('Section');
- });
-
it('renders row label', () => {
expect(findCellAt(ROW_LABEL_CELL).text()).toBe('Item');
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
index 0b3b780cb3f..296728af46a 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
@@ -1,32 +1,51 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlAvatarLabeled, GlLink } from '@gitlab/ui';
+import { GlAvatarLabeled, GlLink, GlTableLite } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MlCandidatesShow from '~/ml/experiment_tracking/routes/candidates/show';
import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
-import { TITLE_LABEL } from '~/ml/experiment_tracking/routes/candidates/show/translations';
+import {
+ TITLE_LABEL,
+ NO_PARAMETERS_MESSAGE,
+ NO_METRICS_MESSAGE,
+ NO_METADATA_MESSAGE,
+ NO_CI_MESSAGE,
+} from '~/ml/experiment_tracking/routes/candidates/show/translations';
import DeleteButton from '~/ml/experiment_tracking/components/delete_button.vue';
import ModelExperimentsHeader from '~/ml/experiment_tracking/components/model_experiments_header.vue';
+import { stubComponent } from 'helpers/stub_component';
import { newCandidate } from './mock_data';
describe('MlCandidatesShow', () => {
let wrapper;
const CANDIDATE = newCandidate();
- const USER_ROW = 6;
+ const USER_ROW = 1;
+
+ const INFO_SECTION = 0;
+ const CI_SECTION = 1;
+ const PARAMETER_SECTION = 2;
+ const METADATA_SECTION = 3;
const createWrapper = (createCandidate = () => CANDIDATE) => {
- wrapper = shallowMount(MlCandidatesShow, {
+ wrapper = shallowMountExtended(MlCandidatesShow, {
propsData: { candidate: createCandidate() },
+ stubs: {
+ GlTableLite: { ...stubComponent(GlTableLite), props: ['items', 'fields'] },
+ },
});
};
const findDeleteButton = () => wrapper.findComponent(DeleteButton);
const findHeader = () => wrapper.findComponent(ModelExperimentsHeader);
- const findNthDetailRow = (index) => wrapper.findAllComponents(DetailRow).at(index);
- const findLinkInNthDetailRow = (index) => findNthDetailRow(index).findComponent(GlLink);
- const findSectionLabel = (label) => wrapper.find(`[sectionLabel='${label}']`);
+ const findSection = (section) => wrapper.findAll('section').at(section);
+ const findRowInSection = (section, row) =>
+ findSection(section).findAllComponents(DetailRow).at(row);
+ const findLinkAtRow = (section, rowIndex) =>
+ findRowInSection(section, rowIndex).findComponent(GlLink);
+ const findNoDataMessage = (label) => wrapper.findByText(label);
const findLabel = (label) => wrapper.find(`[label='${label}']`);
- const findCiUserDetailRow = () => findNthDetailRow(USER_ROW);
+ const findCiUserDetailRow = () => findRowInSection(CI_SECTION, USER_ROW);
const findCiUserAvatar = () => findCiUserDetailRow().findComponent(GlAvatarLabeled);
const findCiUserAvatarNameLink = () => findCiUserAvatar().findComponent(GlLink);
+ const findMetricsTable = () => wrapper.findComponent(GlTableLite);
describe('Header', () => {
beforeEach(() => createWrapper());
@@ -50,42 +69,57 @@ describe('MlCandidatesShow', () => {
const mrText = `!${CANDIDATE.info.ci_job.merge_request.iid} ${CANDIDATE.info.ci_job.merge_request.title}`;
const expectedTable = [
- ['Info', 'ID', CANDIDATE.info.iid],
- ['', 'MLflow run ID', CANDIDATE.info.eid],
- ['', 'Status', CANDIDATE.info.status],
- ['', 'Experiment', CANDIDATE.info.experiment_name],
- ['', 'Artifacts', 'Artifacts'],
- ['CI', 'Job', CANDIDATE.info.ci_job.name],
- ['', 'Triggered by', 'CI User'],
- ['', 'Merge request', mrText],
- ['Parameters', CANDIDATE.params[0].name, CANDIDATE.params[0].value],
- ['', CANDIDATE.params[1].name, CANDIDATE.params[1].value],
- ['Metrics', CANDIDATE.metrics[0].name, CANDIDATE.metrics[0].value],
- ['', CANDIDATE.metrics[1].name, CANDIDATE.metrics[1].value],
- ['Metadata', CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value],
- ['', CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value],
- ].map((row, index) => [index, ...row]);
-
- it.each(expectedTable)(
- 'row %s is created correctly',
- (rowIndex, sectionLabel, label, text) => {
- const row = findNthDetailRow(rowIndex);
-
- expect(row.props()).toMatchObject({ sectionLabel, label });
- expect(row.text()).toBe(text);
- },
- );
+ [INFO_SECTION, 0, 'ID', CANDIDATE.info.iid],
+ [INFO_SECTION, 1, 'MLflow run ID', CANDIDATE.info.eid],
+ [INFO_SECTION, 2, 'Status', CANDIDATE.info.status],
+ [INFO_SECTION, 3, 'Experiment', CANDIDATE.info.experiment_name],
+ [INFO_SECTION, 4, 'Artifacts', 'Artifacts'],
+ [CI_SECTION, 0, 'Job', CANDIDATE.info.ci_job.name],
+ [CI_SECTION, 1, 'Triggered by', 'CI User'],
+ [CI_SECTION, 2, 'Merge request', mrText],
+ [PARAMETER_SECTION, 0, CANDIDATE.params[0].name, CANDIDATE.params[0].value],
+ [PARAMETER_SECTION, 1, CANDIDATE.params[1].name, CANDIDATE.params[1].value],
+ [METADATA_SECTION, 0, CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value],
+ [METADATA_SECTION, 1, CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value],
+ ];
+
+ it.each(expectedTable)('row %s is created correctly', (section, rowIndex, label, text) => {
+ const row = findRowInSection(section, rowIndex);
+
+ expect(row.props()).toMatchObject({ label });
+ expect(row.text()).toBe(text);
+ });
describe('Table links', () => {
const linkRows = [
- [3, CANDIDATE.info.path_to_experiment],
- [4, CANDIDATE.info.path_to_artifact],
- [5, CANDIDATE.info.ci_job.path],
- [7, CANDIDATE.info.ci_job.merge_request.path],
+ [INFO_SECTION, 3, CANDIDATE.info.path_to_experiment],
+ [INFO_SECTION, 4, CANDIDATE.info.path_to_artifact],
+ [CI_SECTION, 0, CANDIDATE.info.ci_job.path],
+ [CI_SECTION, 2, CANDIDATE.info.ci_job.merge_request.path],
];
- it.each(linkRows)('row %s is created correctly', (rowIndex, href) => {
- expect(findLinkInNthDetailRow(rowIndex).attributes().href).toBe(href);
+ it.each(linkRows)('row %s is created correctly', (section, rowIndex, href) => {
+ expect(findLinkAtRow(section, rowIndex).attributes().href).toBe(href);
+ });
+ });
+
+ describe('Metrics table', () => {
+ it('computes metrics table items correctly', () => {
+ expect(findMetricsTable().props('items')).toEqual([
+ { name: 'AUC', 0: '.55' },
+ { name: 'Accuracy', 1: '.99', 2: '.98', 3: '.97' },
+ { name: 'F1', 3: '.1' },
+ ]);
+ });
+
+ it('computes metrics table fields correctly', () => {
+ expect(findMetricsTable().props('fields')).toEqual([
+ expect.objectContaining({ key: 'name', label: 'Metric' }),
+ expect.objectContaining({ key: '0', label: 'Step 0' }),
+ expect.objectContaining({ key: '1', label: 'Step 1' }),
+ expect.objectContaining({ key: '2', label: 'Step 2' }),
+ expect.objectContaining({ key: '3', label: 'Step 3' }),
+ ]);
});
});
@@ -105,22 +139,6 @@ describe('MlCandidatesShow', () => {
expect(nameLink.text()).toEqual('CI User');
});
});
-
- it('does not render params', () => {
- expect(findSectionLabel('Parameters').exists()).toBe(true);
- });
-
- it('renders all conditional rows', () => {
- // This is a bit of a duplicated test from the above table test, but having this makes sure that the
- // tests that test the negatives are implemented correctly
- expect(findLabel('Artifacts').exists()).toBe(true);
- expect(findSectionLabel('Parameters').exists()).toBe(true);
- expect(findSectionLabel('Metadata').exists()).toBe(true);
- expect(findSectionLabel('Metrics').exists()).toBe(true);
- expect(findSectionLabel('CI').exists()).toBe(true);
- expect(findLabel('Merge request').exists()).toBe(true);
- expect(findLabel('Triggered by').exists()).toBe(true);
- });
});
describe('No artifact path', () => {
@@ -150,19 +168,19 @@ describe('MlCandidatesShow', () => {
);
it('does not render params', () => {
- expect(findSectionLabel('Parameters').exists()).toBe(false);
+ expect(findNoDataMessage(NO_PARAMETERS_MESSAGE).exists()).toBe(true);
});
it('does not render metadata', () => {
- expect(findSectionLabel('Metadata').exists()).toBe(false);
+ expect(findNoDataMessage(NO_METADATA_MESSAGE).exists()).toBe(true);
});
it('does not render metrics', () => {
- expect(findSectionLabel('Metrics').exists()).toBe(false);
+ expect(findNoDataMessage(NO_METRICS_MESSAGE).exists()).toBe(true);
});
it('does not render CI info', () => {
- expect(findSectionLabel('CI').exists()).toBe(false);
+ expect(findNoDataMessage(NO_CI_MESSAGE).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
index 3fbcf122997..4ea23ed2513 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
@@ -4,8 +4,11 @@ export const newCandidate = () => ({
{ name: 'MaxDepth', value: '3' },
],
metrics: [
- { name: 'AUC', value: '.55' },
- { name: 'Accuracy', value: '.99' },
+ { name: 'AUC', value: '.55', step: 0 },
+ { name: 'Accuracy', value: '.99', step: 1 },
+ { name: 'Accuracy', value: '.98', step: 2 },
+ { name: 'Accuracy', value: '.97', step: 3 },
+ { name: 'F1', value: '.1', step: 3 },
],
metadata: [
{ name: 'FileName', value: 'test.py' },
diff --git a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
new file mode 100644
index 00000000000..57a5a5f003f
--- /dev/null
+++ b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
@@ -0,0 +1,15 @@
+import { shallowMount } from '@vue/test-utils';
+import { ShowMlModel } from '~/ml/model_registry/apps';
+import { MODEL } from '../mock_data';
+
+let wrapper;
+const createWrapper = () => {
+ wrapper = shallowMount(ShowMlModel, { propsData: { model: MODEL } });
+};
+
+describe('ShowMlModel', () => {
+ beforeEach(() => createWrapper());
+ it('renders the app', () => {
+ expect(wrapper.text()).toContain(MODEL.name);
+ });
+});
diff --git a/spec/frontend/ml/model_registry/mock_data.js b/spec/frontend/ml/model_registry/mock_data.js
new file mode 100644
index 00000000000..18b2b32e069
--- /dev/null
+++ b/spec/frontend/ml/model_registry/mock_data.js
@@ -0,0 +1 @@
+export const MODEL = { name: 'blah' };
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js b/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js
index d1715ccd8f1..c1b9aef9634 100644
--- a/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js
+++ b/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js
@@ -1,39 +1,63 @@
-import { GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MlModelsIndexApp from '~/ml/model_registry/routes/models/index';
-import { TITLE_LABEL } from '~/ml/model_registry/routes/models/index/translations';
-import { mockModels } from './mock_data';
+import ModelRow from '~/ml/model_registry/routes/models/index/components/model_row.vue';
+import { TITLE_LABEL, NO_MODELS_LABEL } from '~/ml/model_registry/routes/models/index/translations';
+import Pagination from '~/vue_shared/components/incubation/pagination.vue';
+import { mockModels, startCursor, defaultPageInfo } from './mock_data';
let wrapper;
-const createWrapper = (models = mockModels) => {
- wrapper = shallowMountExtended(MlModelsIndexApp, {
- propsData: { models },
- });
+const createWrapper = (propsData = { models: mockModels, pageInfo: defaultPageInfo }) => {
+ wrapper = shallowMountExtended(MlModelsIndexApp, { propsData });
};
-const findModelLink = (index) => wrapper.findAllComponents(GlLink).at(index);
-const modelLinkText = (index) => findModelLink(index).text();
-const modelLinkHref = (index) => findModelLink(index).attributes('href');
+const findModelRow = (index) => wrapper.findAllComponents(ModelRow).at(index);
+const findPagination = () => wrapper.findComponent(Pagination);
const findTitle = () => wrapper.findByText(TITLE_LABEL);
+const findEmptyLabel = () => wrapper.findByText(NO_MODELS_LABEL);
describe('MlModelsIndex', () => {
- beforeEach(() => {
- createWrapper();
- });
+ describe('empty state', () => {
+ beforeEach(() => createWrapper({ models: [], pageInfo: defaultPageInfo }));
+
+ it('displays empty state when no experiment', () => {
+ expect(findEmptyLabel().exists()).toBe(true);
+ });
- describe('header', () => {
- it('displays the title', () => {
- expect(findTitle().exists()).toBe(true);
+ it('does not show pagination', () => {
+ expect(findPagination().exists()).toBe(false);
});
});
- describe('model list', () => {
- it('displays the models', () => {
- expect(modelLinkHref(0)).toBe(mockModels[0].path);
- expect(modelLinkText(0)).toBe(`${mockModels[0].name} / ${mockModels[0].version}`);
+ describe('with data', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('does not show empty state', () => {
+ expect(findEmptyLabel().exists()).toBe(false);
+ });
+
+ describe('header', () => {
+ it('displays the title', () => {
+ expect(findTitle().exists()).toBe(true);
+ });
+ });
+
+ describe('model list', () => {
+ it('displays the models', () => {
+ expect(findModelRow(0).props('model')).toMatchObject(mockModels[0]);
+ expect(findModelRow(1).props('model')).toMatchObject(mockModels[1]);
+ });
+ });
+
+ describe('pagination', () => {
+ it('should show', () => {
+ expect(findPagination().exists()).toBe(true);
+ });
- expect(modelLinkHref(1)).toBe(mockModels[1].path);
- expect(modelLinkText(1)).toBe(`${mockModels[1].name} / ${mockModels[1].version}`);
+ it('passes pagination to pagination component', () => {
+ expect(findPagination().props('startCursor')).toBe(startCursor);
+ });
});
});
});
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js b/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
index b8a999abbbd..841a543606f 100644
--- a/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
+++ b/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
@@ -3,10 +3,27 @@ export const mockModels = [
name: 'model_1',
version: '1.0',
path: 'path/to/model_1',
+ versionCount: 3,
},
{
name: 'model_2',
- version: '1.0',
+ version: '1.1',
path: 'path/to/model_2',
+ versionCount: 1,
},
];
+
+export const modelWithoutVersion = {
+ name: 'model_without_version',
+ path: 'path/to/model_without_version',
+ versionCount: 0,
+};
+
+export const startCursor = 'eyJpZCI6IjE2In0';
+
+export const defaultPageInfo = Object.freeze({
+ startCursor,
+ endCursor: 'eyJpZCI6IjIifQ',
+ hasNextPage: true,
+ hasPreviousPage: true,
+});
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/model_row_spec.js b/spec/frontend/ml/model_registry/routes/models/index/components/model_row_spec.js
new file mode 100644
index 00000000000..7600288f560
--- /dev/null
+++ b/spec/frontend/ml/model_registry/routes/models/index/components/model_row_spec.js
@@ -0,0 +1,42 @@
+import { GlLink } from '@gitlab/ui';
+import {
+ mockModels,
+ modelWithoutVersion,
+} from 'jest/ml/model_registry/routes/models/index/components/mock_data';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ModelRow from '~/ml/model_registry/routes/models/index/components/model_row.vue';
+
+let wrapper;
+const createWrapper = (model = mockModels[0]) => {
+ wrapper = shallowMountExtended(ModelRow, { propsData: { model } });
+};
+
+const findLink = () => wrapper.findComponent(GlLink);
+const findMessage = (message) => wrapper.findByText(message);
+
+describe('ModelRow', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('Has a link to the model', () => {
+ expect(findLink().text()).toBe(mockModels[0].name);
+ expect(findLink().attributes('href')).toBe(mockModels[0].path);
+ });
+
+ it('Shows the latest version and the version count', () => {
+ expect(findMessage('1.0 · 3 versions').exists()).toBe(true);
+ });
+
+ it('Shows the latest version and no version count if it has only 1 version', () => {
+ createWrapper(mockModels[1]);
+
+ expect(findMessage('1.1 · No other versions').exists()).toBe(true);
+ });
+
+ it('Shows no version message if model has no versions', () => {
+ createWrapper(modelWithoutVersion);
+
+ expect(findMessage('No registered versions').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 9b1678c0a8a..1309fd79c14 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -5,6 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
+import waitForPromises from 'helpers/wait_for_promises';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
@@ -34,7 +35,6 @@ describe('issue_comment_form component', () => {
useLocalStorageSpy();
let trackingSpy;
- let store;
let wrapper;
let axiosMock;
@@ -48,21 +48,7 @@ describe('issue_comment_form component', () => {
const findCommentButton = () => findCommentTypeDropdown().find('button');
const findErrorAlerts = () => wrapper.findAllComponents(GlAlert).wrappers;
- async function clickCommentButton({ waitForComponent = true, waitForNetwork = true } = {}) {
- findCommentButton().trigger('click');
-
- if (waitForComponent || waitForNetwork) {
- // Wait for the click to bubble out and trigger the handler
- await nextTick();
-
- if (waitForNetwork) {
- // Wait for the network request promise to resolve
- await nextTick();
- }
- }
- }
-
- function createStore({ actions = {} } = {}) {
+ const createStore = ({ actions = {}, state = {} } = {}) => {
const baseModule = notesModule();
return new Vuex.Store({
@@ -71,8 +57,12 @@ describe('issue_comment_form component', () => {
...baseModule.actions,
...actions,
},
+ state: {
+ ...baseModule.state,
+ ...state,
+ },
});
- }
+ };
const createNotableDataMock = (data = {}) => {
return {
@@ -105,6 +95,7 @@ describe('issue_comment_form component', () => {
userData = userDataMock,
features = {},
mountFunction = shallowMount,
+ store = createStore(),
} = {}) => {
store.dispatch('setNoteableData', noteableData);
store.dispatch('setNotesData', notesData);
@@ -139,7 +130,6 @@ describe('issue_comment_form component', () => {
beforeEach(() => {
axiosMock = new MockAdapter(axios);
- store = createStore();
trackingSpy = mockTracking(undefined, null, jest.spyOn);
});
@@ -149,25 +139,32 @@ describe('issue_comment_form component', () => {
describe('user is logged in', () => {
describe('handleSave', () => {
- it('should request to save note when note is entered', () => {
- mountComponent({ mountFunction: mount, initialData: { note: 'hello world' } });
-
- jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
-
- findCloseReopenButton().trigger('click');
+ const note = 'hello world';
- expect(wrapper.vm.isSubmitting).toBe(true);
- expect(wrapper.vm.note).toBe('');
- expect(wrapper.vm.saveNote).toHaveBeenCalled();
+ it('should request to save note when note is entered', async () => {
+ const saveNoteSpy = jest.fn();
+ const store = createStore({
+ actions: {
+ saveNote: saveNoteSpy,
+ },
+ });
+ mountComponent({ mountFunction: mount, initialData: { note }, store });
+ expect(findCloseReopenButton().props('disabled')).toBe(false);
+ expect(findMarkdownEditor().props('value')).toBe(note);
+ await findCloseReopenButton().trigger('click');
+ expect(findCloseReopenButton().props('disabled')).toBe(true);
+ expect(findMarkdownEditor().props('value')).toBe('');
+ expect(saveNoteSpy).toHaveBeenCalled();
});
- it('tracks event', () => {
- mountComponent({ mountFunction: mount, initialData: { note: 'hello world' } });
-
- jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
-
- findCloseReopenButton().trigger('click');
-
+ it('tracks event', async () => {
+ const store = createStore({
+ actions: {
+ saveNote: jest.fn().mockResolvedValue(),
+ },
+ });
+ mountComponent({ mountFunction: mount, initialData: { note }, store });
+ await findCloseReopenButton().trigger('click');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
label: 'markdown_editor',
property: 'Issue_comment',
@@ -175,12 +172,13 @@ describe('issue_comment_form component', () => {
});
it('does not report errors in the UI when the save succeeds', async () => {
- mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' } });
-
- jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
-
- await clickCommentButton();
-
+ const store = createStore({
+ actions: {
+ saveNote: jest.fn().mockResolvedValue(),
+ },
+ });
+ mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' }, store });
+ await findCommentButton().trigger('click');
// findErrorAlerts().exists returns false if *any* wrapper is empty,
// not necessarily that there aren't any at all.
// We want to check here that there are none found, so we use the
@@ -197,20 +195,17 @@ describe('issue_comment_form component', () => {
`(
'displays the correct errors ($errors) for a $httpStatus network response',
async ({ errors, httpStatus }) => {
- store = createStore({
+ const store = createStore({
actions: {
saveNote: jest.fn().mockRejectedValue({
response: { status: httpStatus, data: { errors: { commands_only: errors } } },
}),
},
});
-
- mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' } });
-
- await clickCommentButton();
-
+ mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' }, store });
+ await findCommentButton().trigger('click');
+ await waitForPromises();
const errorAlerts = findErrorAlerts();
-
expect(errorAlerts.length).toBe(errors.length);
errors.forEach((msg, index) => {
const alert = errorAlerts[index];
@@ -222,7 +217,7 @@ describe('issue_comment_form component', () => {
describe('if response contains validation errors', () => {
beforeEach(() => {
- store = createStore({
+ const store = createStore({
actions: {
saveNote: jest.fn().mockRejectedValue({
response: {
@@ -233,9 +228,9 @@ describe('issue_comment_form component', () => {
},
});
- mountComponent({ mountFunction: mount, initialData: { note: 'invalid note' } });
+ mountComponent({ mountFunction: mount, initialData: { note: 'invalid note' }, store });
- clickCommentButton();
+ findCommentButton().trigger('click');
});
it('renders an error message', () => {
@@ -251,7 +246,7 @@ describe('issue_comment_form component', () => {
it('should remove the correct error from the list when it is dismissed', async () => {
const commandErrors = ['1', '2', '3'];
- store = createStore({
+ const store = createStore({
actions: {
saveNote: jest.fn().mockRejectedValue({
response: {
@@ -261,10 +256,9 @@ describe('issue_comment_form component', () => {
}),
},
});
-
- mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' } });
-
- await clickCommentButton();
+ mountComponent({ mountFunction: mount, initialData: { note: '/label ~sdfghj' }, store });
+ await findCommentButton().trigger('click');
+ await waitForPromises();
let errorAlerts = findErrorAlerts();
@@ -314,15 +308,8 @@ describe('issue_comment_form component', () => {
});
});
- it('hides content editor switcher if feature flag content_editor_on_issues is off', () => {
- mountComponent({ mountFunction: mount, features: { contentEditorOnIssues: false } });
-
- expect(wrapper.text()).not.toContain('Switch to rich text editing');
- });
-
- it('shows content editor switcher if feature flag content_editor_on_issues is on', () => {
- mountComponent({ mountFunction: mount, features: { contentEditorOnIssues: true } });
-
+ it('shows content editor switcher', () => {
+ mountComponent({ mountFunction: mount });
expect(wrapper.text()).toContain('Switch to rich text editing');
});
@@ -335,11 +322,8 @@ describe('issue_comment_form component', () => {
`(
'should render textarea with placeholder for $noteType',
async ({ noteIsInternal, placeholder }) => {
- mountComponent();
-
- wrapper.vm.noteIsInternal = noteIsInternal;
- await nextTick();
-
+ await mountComponent();
+ await findConfidentialNoteCheckbox().vm.$emit('input', noteIsInternal);
expect(findMarkdownEditor().props('formFieldProps').placeholder).toBe(placeholder);
},
);
@@ -371,25 +355,20 @@ describe('issue_comment_form component', () => {
expect(wrapper.find(`[href="${markdownDocsPath}"]`).exists()).toBe(true);
});
- it('should resize textarea after note discarded', async () => {
- mountComponent({ mountFunction: mount, initialData: { note: 'foo' } });
-
- jest.spyOn(wrapper.vm, 'discard');
-
- wrapper.vm.discard();
-
- await nextTick();
-
+ it('should resize textarea after note is saved', async () => {
+ const store = createStore();
+ store.registerModule('batchComments', batchComments());
+ store.state.batchComments.drafts = [{ note: 'A' }];
+ await mountComponent({ mountFunction: mount, initialData: { note: 'foo' }, store });
+ await findAddCommentNowButton().trigger('click');
+ await waitForPromises();
expect(Autosize.update).toHaveBeenCalled();
});
});
describe('edit mode', () => {
- beforeEach(() => {
- mountComponent({ mountFunction: mount });
- });
-
it('should enter edit mode when arrow up is pressed', () => {
+ mountComponent({ mountFunction: mount });
jest.spyOn(wrapper.vm, 'editCurrentUserLastNote');
findMarkdownEditorTextarea().trigger('keydown.up');
@@ -400,6 +379,7 @@ describe('issue_comment_form component', () => {
describe('event enter', () => {
describe('when no draft exists', () => {
it('should save note when cmd+enter is pressed', () => {
+ mountComponent({ mountFunction: mount });
jest.spyOn(wrapper.vm, 'handleSave');
findMarkdownEditorTextarea().trigger('keydown.enter', { metaKey: true });
@@ -408,6 +388,7 @@ describe('issue_comment_form component', () => {
});
it('should save note when ctrl+enter is pressed', () => {
+ mountComponent({ mountFunction: mount });
jest.spyOn(wrapper.vm, 'handleSave');
findMarkdownEditorTextarea().trigger('keydown.enter', { ctrlKey: true });
@@ -417,24 +398,25 @@ describe('issue_comment_form component', () => {
});
describe('when a draft exists', () => {
+ let store;
+
beforeEach(() => {
+ store = createStore();
store.registerModule('batchComments', batchComments());
store.state.batchComments.drafts = [{ note: 'A' }];
});
- it('should save note draft when cmd+enter is pressed', () => {
+ it('should save note draft when cmd+enter is pressed', async () => {
+ mountComponent({ mountFunction: mount, store });
jest.spyOn(wrapper.vm, 'handleSaveDraft');
-
- findMarkdownEditorTextarea().trigger('keydown.enter', { metaKey: true });
-
+ await findMarkdownEditorTextarea().trigger('keydown.enter', { metaKey: true });
expect(wrapper.vm.handleSaveDraft).toHaveBeenCalledWith();
});
- it('should save note draft when ctrl+enter is pressed', () => {
+ it('should save note draft when ctrl+enter is pressed', async () => {
+ mountComponent({ mountFunction: mount, store });
jest.spyOn(wrapper.vm, 'handleSaveDraft');
-
- findMarkdownEditorTextarea().trigger('keydown.enter', { ctrlKey: true });
-
+ await findMarkdownEditorTextarea().trigger('keydown.enter', { ctrlKey: true });
expect(wrapper.vm.handleSaveDraft).toHaveBeenCalledWith();
});
});
@@ -706,7 +688,7 @@ describe('issue_comment_form component', () => {
jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
- clickCommentButton();
+ findCommentButton().trigger('click');
expect(wrapper.vm.saveNote).not.toHaveBeenCalled();
});
@@ -719,7 +701,7 @@ describe('issue_comment_form component', () => {
jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
- clickCommentButton();
+ findCommentButton().trigger('click');
expect(wrapper.vm.saveNote).toHaveBeenCalled();
});
@@ -740,14 +722,16 @@ describe('issue_comment_form component', () => {
});
describe('with batchComments in store', () => {
- beforeEach(() => {
- store.registerModule('batchComments', batchComments());
- });
-
describe('add to review and comment now buttons', () => {
- it('when no drafts exist, should not render', () => {
- mountComponent();
+ let store;
+
+ beforeEach(() => {
+ store = createStore();
+ store.registerModule('batchComments', batchComments());
+ });
+ it('when no drafts exist, should not render', () => {
+ mountComponent({ store });
expect(findCommentTypeDropdown().exists()).toBe(true);
expect(findAddToReviewButton().exists()).toBe(false);
expect(findAddCommentNowButton().exists()).toBe(false);
@@ -758,20 +742,17 @@ describe('issue_comment_form component', () => {
store.state.batchComments.drafts = [{ note: 'A' }];
});
- it('should render', () => {
- mountComponent();
-
+ it('should render', async () => {
+ await mountComponent({ store });
expect(findCommentTypeDropdown().exists()).toBe(false);
expect(findAddToReviewButton().exists()).toBe(true);
expect(findAddCommentNowButton().exists()).toBe(true);
});
- it('clicking `add to review`, should call draft endpoint, set `isDraft` true', () => {
- mountComponent({ mountFunction: mount, initialData: { note: 'a draft note' } });
-
+ it('clicking `add to review`, should call draft endpoint, set `isDraft` true', async () => {
+ mountComponent({ mountFunction: mount, initialData: { note: 'a draft note' }, store });
jest.spyOn(store, 'dispatch').mockResolvedValue();
- findAddToReviewButton().trigger('click');
-
+ await findAddToReviewButton().trigger('click');
expect(store.dispatch).toHaveBeenCalledWith(
'saveNote',
expect.objectContaining({
@@ -781,12 +762,10 @@ describe('issue_comment_form component', () => {
);
});
- it('clicking `add comment now`, should call note endpoint, set `isDraft` false', () => {
- mountComponent({ mountFunction: mount, initialData: { note: 'a comment' } });
-
+ it('clicking `add comment now`, should call note endpoint, set `isDraft` false', async () => {
+ await mountComponent({ mountFunction: mount, initialData: { note: 'a comment' }, store });
jest.spyOn(store, 'dispatch').mockResolvedValue();
- findAddCommentNowButton().trigger('click');
-
+ await findAddCommentNowButton().trigger('click');
expect(store.dispatch).toHaveBeenCalledWith(
'saveNote',
expect.objectContaining({
diff --git a/spec/frontend/notes/components/email_participants_warning_spec.js b/spec/frontend/notes/components/email_participants_warning_spec.js
index 34b7524d8fb..620c753e3c5 100644
--- a/spec/frontend/notes/components/email_participants_warning_spec.js
+++ b/spec/frontend/notes/components/email_participants_warning_spec.js
@@ -1,10 +1,12 @@
import { mount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+
import EmailParticipantsWarning from '~/notes/components/email_participants_warning.vue';
describe('Email Participants Warning Component', () => {
let wrapper;
- const findMoreButton = () => wrapper.find('button');
+ const findMoreButton = () => wrapper.findComponent(GlButton);
const createWrapper = (emails) => {
wrapper = mount(EmailParticipantsWarning, {
@@ -48,7 +50,7 @@ describe('Email Participants Warning Component', () => {
describe('when more button clicked', () => {
beforeEach(() => {
- findMoreButton().trigger('click');
+ findMoreButton().vm.$emit('click');
});
it('more button no longer exists', () => {
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index 3c461f2b382..e2072ebd04d 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -4,6 +4,7 @@ import batchComments from '~/batch_comments/stores/modules/batch_comments';
import NoteForm from '~/notes/components/note_form.vue';
import createStore from '~/notes/stores';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import CommentFieldLayout from '~/notes/components/comment_field_layout.vue';
import { AT_WHO_ACTIVE_CLASS } from '~/gfm_auto_complete';
import eventHub from '~/environments/event_hub';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -75,14 +76,8 @@ describe('issue_note_form component', () => {
});
});
- it('hides content editor switcher if feature flag content_editor_on_issues is off', () => {
- createComponentWrapper({}, { contentEditorOnIssues: false });
-
- expect(wrapper.text()).not.toContain('Switch to rich text editing');
- });
-
- it('shows content editor switcher if feature flag content_editor_on_issues is on', () => {
- createComponentWrapper({}, { contentEditorOnIssues: true });
+ it('shows content editor switcher', () => {
+ createComponentWrapper();
expect(wrapper.text()).toContain('Switch to rich text editing');
});
@@ -239,6 +234,21 @@ describe('issue_note_form component', () => {
property: 'Issue_note',
});
});
+
+ describe('when discussion is confidential', () => {
+ beforeEach(() => {
+ createComponentWrapper({
+ discussion: {
+ ...discussionMock,
+ confidential: true,
+ },
+ });
+ });
+
+ it('passes correct confidentiality to CommentFieldLayout', () => {
+ expect(wrapper.findComponent(CommentFieldLayout).props('isInternalNote')).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index b291eba61f5..67c0ba90d40 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -321,6 +321,7 @@ export const discussionMock = {
individual_note: false,
resolvable: true,
active: true,
+ confidential: false,
};
export const loggedOutnoteableData = {
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 104c297b44e..f07ba1e032f 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1343,8 +1343,6 @@ describe('Actions Notes Store', () => {
});
it('dispatches `fetchDiscussionsBatch` action with notes_filter 0 for merge request', () => {
- window.gon = { features: { mrActivityFilters: true } };
-
return testAction(
actions.fetchDiscussions,
{ path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
@@ -1397,7 +1395,7 @@ describe('Actions Notes Store', () => {
type: 'fetchDiscussionsBatch',
payload: {
config: {
- params: { notes_filter: 'test-filter', persist_filter: 'test-persist-filter' },
+ params: { notes_filter: 0, persist_filter: false },
},
path: 'test-path',
perPage: 20,
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index 056175eac07..68a53131539 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -12,7 +12,8 @@ describe('buildClient', () => {
const tracingUrl = 'https://example.com/tracing';
const provisioningUrl = 'https://example.com/provisioning';
-
+ const servicesUrl = 'https://example.com/services';
+ const operationsUrl = 'https://example.com/services/$SERVICE_NAME$/operations';
const FETCHING_TRACES_ERROR = 'traces are missing/invalid in the response';
beforeEach(() => {
@@ -22,6 +23,8 @@ describe('buildClient', () => {
client = buildClient({
tracingUrl,
provisioningUrl,
+ servicesUrl,
+ operationsUrl,
});
});
@@ -29,6 +32,27 @@ describe('buildClient', () => {
axiosMock.restore();
});
+ describe('buildClient', () => {
+ it('rejects if params are missing', () => {
+ const e = new Error(
+ 'missing required params. provisioningUrl, tracingUrl, servicesUrl, operationsUrl are required',
+ );
+ expect(() =>
+ buildClient({ tracingUrl: 'test', servicesUrl: 'test', operationsUrl: 'test' }),
+ ).toThrow(e);
+ expect(() =>
+ buildClient({ provisioningUrl: 'test', servicesUrl: 'test', operationsUrl: 'test' }),
+ ).toThrow(e);
+ expect(() =>
+ buildClient({ provisioningUrl: 'test', tracingUrl: 'test', operationsUrl: 'test' }),
+ ).toThrow(e);
+ expect(() =>
+ buildClient({ provisioningUrl: 'test', tracingUrl: 'test', servicesUrl: 'test' }),
+ ).toThrow(e);
+ expect(() => buildClient({})).toThrow(e);
+ });
+ });
+
describe('isTracingEnabled', () => {
it('returns true if requests succeedes', async () => {
axiosMock.onGet(provisioningUrl).reply(200, {
@@ -145,18 +169,18 @@ describe('buildClient', () => {
describe('fetchTraces', () => {
it('fetches traces from the tracing URL', async () => {
- const mockTraces = [
- {
- trace_id: 'trace-1',
- duration_nano: 3000,
- spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }],
- },
- { trace_id: 'trace-2', duration_nano: 3000, spans: [{ duration_nano: 2000 }] },
- ];
-
- axiosMock.onGet(tracingUrl).reply(200, {
- traces: mockTraces,
- });
+ const mockResponse = {
+ traces: [
+ {
+ trace_id: 'trace-1',
+ duration_nano: 3000,
+ spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }],
+ },
+ { trace_id: 'trace-2', duration_nano: 3000, spans: [{ duration_nano: 2000 }] },
+ ],
+ };
+
+ axiosMock.onGet(tracingUrl).reply(200, mockResponse);
const result = await client.fetchTraces();
@@ -165,7 +189,7 @@ describe('buildClient', () => {
withCredentials: true,
params: new URLSearchParams(),
});
- expect(result).toEqual(mockTraces);
+ expect(result).toEqual(mockResponse);
});
it('rejects if traces are missing', async () => {
@@ -197,28 +221,42 @@ describe('buildClient', () => {
expect(getQueryParam()).toBe('');
});
+ it('appends page_token if specified', async () => {
+ await client.fetchTraces({ pageToken: 'page-token' });
+
+ expect(getQueryParam()).toBe('page_token=page-token');
+ });
+
+ it('appends page_size if specified', async () => {
+ await client.fetchTraces({ pageSize: 10 });
+
+ expect(getQueryParam()).toBe('page_size=10');
+ });
+
it('converts filter to proper query params', async () => {
await client.fetchTraces({
- durationMs: [
- { operator: '>', value: '100' },
- { operator: '<', value: '1000' },
- ],
- operation: [
- { operator: '=', value: 'op' },
- { operator: '!=', value: 'not-op' },
- ],
- serviceName: [
- { operator: '=', value: 'service' },
- { operator: '!=', value: 'not-service' },
- ],
- period: [{ operator: '=', value: '5m' }],
- traceId: [
- { operator: '=', value: 'trace-id' },
- { operator: '!=', value: 'not-trace-id' },
- ],
+ filters: {
+ durationMs: [
+ { operator: '>', value: '100' },
+ { operator: '<', value: '1000' },
+ ],
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '!=', value: 'not-op' },
+ ],
+ serviceName: [
+ { operator: '=', value: 'service' },
+ { operator: '!=', value: 'not-service' },
+ ],
+ period: [{ operator: '=', value: '5m' }],
+ traceId: [
+ { operator: '=', value: 'trace-id' },
+ { operator: '!=', value: 'not-trace-id' },
+ ],
+ },
});
expect(getQueryParam()).toBe(
- 'gt[duration_nano]=100000&lt[duration_nano]=1000000' +
+ 'gt[duration_nano]=100000000&lt[duration_nano]=1000000000' +
'&operation=op&not[operation]=not-op' +
'&service_name=service&not[service_name]=not-service' +
'&period=5m' +
@@ -228,17 +266,21 @@ describe('buildClient', () => {
it('handles repeated params', async () => {
await client.fetchTraces({
- operation: [
- { operator: '=', value: 'op' },
- { operator: '=', value: 'op2' },
- ],
+ filters: {
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '=', value: 'op2' },
+ ],
+ },
});
expect(getQueryParam()).toBe('operation=op&operation=op2');
});
it('ignores unsupported filters', async () => {
await client.fetchTraces({
- unsupportedFilter: [{ operator: '=', value: 'foo' }],
+ filters: {
+ unsupportedFilter: [{ operator: '=', value: 'foo' }],
+ },
});
expect(getQueryParam()).toBe('');
@@ -246,8 +288,10 @@ describe('buildClient', () => {
it('ignores empty filters', async () => {
await client.fetchTraces({
- durationMs: null,
- traceId: undefined,
+ filters: {
+ durationMs: null,
+ traceId: undefined,
+ },
});
expect(getQueryParam()).toBe('');
@@ -255,28 +299,103 @@ describe('buildClient', () => {
it('ignores unsupported operators', async () => {
await client.fetchTraces({
- durationMs: [
- { operator: '*', value: 'foo' },
- { operator: '=', value: 'foo' },
- { operator: '!=', value: 'foo' },
- ],
- operation: [
- { operator: '>', value: 'foo' },
- { operator: '<', value: 'foo' },
- ],
- serviceName: [
- { operator: '>', value: 'foo' },
- { operator: '<', value: 'foo' },
- ],
- period: [{ operator: '!=', value: 'foo' }],
- traceId: [
- { operator: '>', value: 'foo' },
- { operator: '<', value: 'foo' },
- ],
+ filters: {
+ durationMs: [
+ { operator: '*', value: 'foo' },
+ { operator: '=', value: 'foo' },
+ { operator: '!=', value: 'foo' },
+ ],
+ operation: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ serviceName: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ period: [{ operator: '!=', value: 'foo' }],
+ traceId: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ },
});
expect(getQueryParam()).toBe('');
});
});
});
+
+ describe('fetchServices', () => {
+ it('fetches services from the services URL', async () => {
+ const mockResponse = {
+ services: [{ name: 'service-1' }, { name: 'service-2' }],
+ };
+
+ axiosMock.onGet(servicesUrl).reply(200, mockResponse);
+
+ const result = await client.fetchServices();
+
+ expect(axios.get).toHaveBeenCalledTimes(1);
+ expect(axios.get).toHaveBeenCalledWith(servicesUrl, {
+ withCredentials: true,
+ });
+ expect(result).toEqual(mockResponse.services);
+ });
+
+ it('rejects if services are missing', async () => {
+ axiosMock.onGet(servicesUrl).reply(200, {});
+
+ const e = 'failed to fetch services. invalid response';
+ await expect(client.fetchServices()).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+ });
+
+ describe('fetchOperations', () => {
+ const serviceName = 'test-service';
+ const parsedOperationsUrl = `https://example.com/services/${serviceName}/operations`;
+
+ it('fetches operations from the operations URL', async () => {
+ const mockResponse = {
+ operations: [{ name: 'operation-1' }, { name: 'operation-2' }],
+ };
+
+ axiosMock.onGet(parsedOperationsUrl).reply(200, mockResponse);
+
+ const result = await client.fetchOperations(serviceName);
+
+ expect(axios.get).toHaveBeenCalledTimes(1);
+ expect(axios.get).toHaveBeenCalledWith(parsedOperationsUrl, {
+ withCredentials: true,
+ });
+ expect(result).toEqual(mockResponse.operations);
+ });
+
+ it('rejects if serviceName is missing', async () => {
+ const e = 'fetchOperations() - serviceName is required.';
+ await expect(client.fetchOperations()).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+
+ it('rejects if operationUrl does not contain $SERVICE_NAME$', async () => {
+ client = buildClient({
+ tracingUrl,
+ provisioningUrl,
+ servicesUrl,
+ operationsUrl: 'something',
+ });
+ const e = 'fetchOperations() - operationsUrl must contain $SERVICE_NAME$';
+ await expect(client.fetchOperations(serviceName)).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+
+ it('rejects if operations are missing', async () => {
+ axiosMock.onGet(parsedOperationsUrl).reply(200, {});
+
+ const e = 'failed to fetch operations. invalid response';
+ await expect(client.fetchOperations(serviceName)).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+ });
});
diff --git a/spec/frontend/observability/index_spec.js b/spec/frontend/observability/index_spec.js
deleted file mode 100644
index 25eb048c62b..00000000000
--- a/spec/frontend/observability/index_spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import { createWrapper } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import renderObservability from '~/observability/index';
-import ObservabilityApp from '~/observability/components/observability_app.vue';
-import { SKELETON_VARIANTS_BY_ROUTE } from '~/observability/constants';
-
-describe('renderObservability', () => {
- let element;
- let vueInstance;
- let component;
-
- const OBSERVABILITY_ROUTES = Object.keys(SKELETON_VARIANTS_BY_ROUTE);
- const SKELETON_VARIANTS = Object.values(SKELETON_VARIANTS_BY_ROUTE);
-
- beforeEach(() => {
- element = document.createElement('div');
- element.setAttribute('id', 'js-observability-app');
- element.dataset.observabilityIframeSrc = 'https://observe.gitlab.com/';
- document.body.appendChild(element);
-
- vueInstance = renderObservability();
- component = createWrapper(vueInstance).findComponent(ObservabilityApp);
- });
-
- afterEach(() => {
- element.remove();
- });
-
- it('should return a Vue instance', () => {
- expect(vueInstance).toEqual(expect.any(Vue));
- });
-
- it('should render the ObservabilityApp component', () => {
- expect(component.props('observabilityIframeSrc')).toBe('https://observe.gitlab.com/');
- });
-
- describe('skeleton variant', () => {
- it.each`
- pathDescription | path | variant
- ${'dashboards'} | ${OBSERVABILITY_ROUTES[0]} | ${SKELETON_VARIANTS[0]}
- ${'explore'} | ${OBSERVABILITY_ROUTES[1]} | ${SKELETON_VARIANTS[1]}
- ${'manage dashboards'} | ${OBSERVABILITY_ROUTES[2]} | ${SKELETON_VARIANTS[2]}
- ${'any other'} | ${'unknown/route'} | ${SKELETON_VARIANTS[0]}
- `(
- 'renders the $variant skeleton variant for $pathDescription path',
- async ({ path, variant }) => {
- component.vm.$router.push(path);
- await nextTick();
-
- expect(component.props('skeletonVariant')).toBe(variant);
- },
- );
- });
-
- it('handle route-update events', () => {
- component.vm.$router.push('/something?foo=bar');
- component.vm.$emit('route-update', { url: '/some_path' });
- expect(component.vm.$router.currentRoute.path).toBe('/something');
- expect(component.vm.$router.currentRoute.query).toEqual({
- foo: 'bar',
- observability_path: '/some_path',
- });
- });
-});
diff --git a/spec/frontend/observability/observability_app_spec.js b/spec/frontend/observability/observability_app_spec.js
deleted file mode 100644
index 392992a5962..00000000000
--- a/spec/frontend/observability/observability_app_spec.js
+++ /dev/null
@@ -1,201 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { stubComponent } from 'helpers/stub_component';
-import ObservabilityApp from '~/observability/components/observability_app.vue';
-import ObservabilitySkeleton from '~/observability/components/skeleton/index.vue';
-import {
- MESSAGE_EVENT_TYPE,
- INLINE_EMBED_DIMENSIONS,
- FULL_APP_DIMENSIONS,
- SKELETON_VARIANT_EMBED,
-} from '~/observability/constants';
-
-import { darkModeEnabled } from '~/lib/utils/color_utils';
-
-jest.mock('~/lib/utils/color_utils');
-
-describe('ObservabilityApp', () => {
- let wrapper;
-
- const $route = {
- pathname: 'https://gitlab.com/gitlab-org/',
- path: 'https://gitlab.com/gitlab-org/-/observability/dashboards',
- query: { otherQuery: 100 },
- };
-
- const mockSkeletonOnContentLoaded = jest.fn();
-
- const findIframe = () => wrapper.findByTestId('observability-ui-iframe');
-
- const TEST_IFRAME_SRC = 'https://observe.gitlab.com/9970/?groupId=14485840';
-
- const TEST_USERNAME = 'test-user';
-
- const mountComponent = (props) => {
- wrapper = shallowMountExtended(ObservabilityApp, {
- propsData: {
- observabilityIframeSrc: TEST_IFRAME_SRC,
- ...props,
- },
- stubs: {
- ObservabilitySkeleton: stubComponent(ObservabilitySkeleton, {
- methods: { onContentLoaded: mockSkeletonOnContentLoaded },
- }),
- },
- mocks: {
- $route,
- },
- });
- };
-
- const dispatchMessageEvent = (message) =>
- window.dispatchEvent(new MessageEvent('message', message));
-
- beforeEach(() => {
- gon.current_username = TEST_USERNAME;
- });
-
- describe('iframe src', () => {
- it('should render an iframe with observabilityIframeSrc, decorated with light theme and username', () => {
- darkModeEnabled.mockReturnValueOnce(false);
- mountComponent();
- const iframe = findIframe();
-
- expect(iframe.exists()).toBe(true);
- expect(iframe.attributes('src')).toBe(
- `${TEST_IFRAME_SRC}&theme=light&username=${TEST_USERNAME}`,
- );
- });
-
- it('should render an iframe with observabilityIframeSrc decorated with dark theme and username', () => {
- darkModeEnabled.mockReturnValueOnce(true);
- mountComponent();
- const iframe = findIframe();
-
- expect(iframe.exists()).toBe(true);
- expect(iframe.attributes('src')).toBe(
- `${TEST_IFRAME_SRC}&theme=dark&username=${TEST_USERNAME}`,
- );
- });
- });
-
- describe('iframe sandbox', () => {
- it('should render an iframe with sandbox attributes', () => {
- mountComponent();
- const iframe = findIframe();
-
- expect(iframe.exists()).toBe(true);
- expect(iframe.attributes('sandbox')).toBe('allow-same-origin allow-forms allow-scripts');
- });
- });
-
- describe('iframe kiosk query param', () => {
- it('when inlineEmbed, it should set the proper kiosk query parameter', () => {
- mountComponent({
- inlineEmbed: true,
- });
-
- const iframe = findIframe();
-
- expect(iframe.attributes('src')).toBe(
- `${TEST_IFRAME_SRC}&theme=light&username=${TEST_USERNAME}&kiosk=inline-embed`,
- );
- });
- });
-
- describe('iframe size', () => {
- it('should set the specified size', () => {
- mountComponent({
- height: INLINE_EMBED_DIMENSIONS.HEIGHT,
- width: INLINE_EMBED_DIMENSIONS.WIDTH,
- });
-
- const iframe = findIframe();
-
- expect(iframe.attributes('width')).toBe(INLINE_EMBED_DIMENSIONS.WIDTH);
- expect(iframe.attributes('height')).toBe(INLINE_EMBED_DIMENSIONS.HEIGHT);
- });
-
- it('should fallback to default size', () => {
- mountComponent({});
-
- const iframe = findIframe();
-
- expect(iframe.attributes('width')).toBe(FULL_APP_DIMENSIONS.WIDTH);
- expect(iframe.attributes('height')).toBe(FULL_APP_DIMENSIONS.HEIGHT);
- });
- });
-
- describe('skeleton variant', () => {
- it('sets the specified skeleton variant', () => {
- mountComponent({ skeletonVariant: SKELETON_VARIANT_EMBED });
- const props = wrapper.findComponent(ObservabilitySkeleton).props();
-
- expect(props.variant).toBe(SKELETON_VARIANT_EMBED);
- });
-
- it('should have a default skeleton variant', () => {
- mountComponent();
- const props = wrapper.findComponent(ObservabilitySkeleton).props();
-
- expect(props.variant).toBe('dashboards');
- });
- });
-
- describe('on GOUI_ROUTE_UPDATE', () => {
- it('should emit a route-update event', () => {
- mountComponent();
-
- const payload = { url: '/explore' };
- dispatchMessageEvent({
- data: { type: MESSAGE_EVENT_TYPE.GOUI_ROUTE_UPDATE, payload },
- origin: 'https://observe.gitlab.com',
- });
-
- expect(wrapper.emitted('route-update')[0]).toEqual([payload]);
- });
- });
-
- describe('on GOUI_LOADED', () => {
- beforeEach(() => {
- mountComponent();
- });
-
- it('should call onContentLoaded method', () => {
- dispatchMessageEvent({
- data: { type: MESSAGE_EVENT_TYPE.GOUI_LOADED },
- origin: 'https://observe.gitlab.com',
- });
- expect(mockSkeletonOnContentLoaded).toHaveBeenCalled();
- });
-
- it('should not call onContentLoaded method if origin is different', () => {
- dispatchMessageEvent({
- data: { type: MESSAGE_EVENT_TYPE.GOUI_LOADED },
- origin: 'https://example.com',
- });
- expect(mockSkeletonOnContentLoaded).not.toHaveBeenCalled();
- });
-
- it('should not call onContentLoaded method if event type is different', () => {
- dispatchMessageEvent({
- data: { type: 'UNKNOWN_EVENT' },
- origin: 'https://observe.gitlab.com',
- });
- expect(mockSkeletonOnContentLoaded).not.toHaveBeenCalled();
- });
- });
-
- describe('on unmount', () => {
- it('should not emit any even on route update', () => {
- mountComponent();
- wrapper.destroy();
-
- dispatchMessageEvent({
- data: { type: MESSAGE_EVENT_TYPE.GOUI_ROUTE_UPDATE, payload: { url: '/explore' } },
- origin: 'https://observe.gitlab.com',
- });
-
- expect(wrapper.emitted('route-update')).toBeUndefined();
- });
- });
-});
diff --git a/spec/frontend/observability/observability_container_spec.js b/spec/frontend/observability/observability_container_spec.js
index 1152df072d4..5d838756308 100644
--- a/spec/frontend/observability/observability_container_spec.js
+++ b/spec/frontend/observability/observability_container_spec.js
@@ -16,6 +16,8 @@ describe('ObservabilityContainer', () => {
const OAUTH_URL = 'https://example.com/oauth';
const TRACING_URL = 'https://example.com/tracing';
const PROVISIONING_URL = 'https://example.com/provisioning';
+ const SERVICES_URL = 'https://example.com/services';
+ const OPERATIONS_URL = 'https://example.com/operations';
beforeEach(() => {
jest.spyOn(console, 'error').mockImplementation();
@@ -27,6 +29,8 @@ describe('ObservabilityContainer', () => {
oauthUrl: OAUTH_URL,
tracingUrl: TRACING_URL,
provisioningUrl: PROVISIONING_URL,
+ servicesUrl: SERVICES_URL,
+ operationsUrl: OPERATIONS_URL,
},
stubs: {
ObservabilitySkeleton: stubComponent(ObservabilitySkeleton, {
@@ -93,6 +97,8 @@ describe('ObservabilityContainer', () => {
expect(buildClient).toHaveBeenCalledWith({
provisioningUrl: PROVISIONING_URL,
tracingUrl: TRACING_URL,
+ servicesUrl: SERVICES_URL,
+ operationsUrl: OPERATIONS_URL,
});
expect(findIframe().exists()).toBe(false);
});
diff --git a/spec/frontend/observability/skeleton_spec.js b/spec/frontend/observability/skeleton_spec.js
index 979070cfb12..5501fa117e0 100644
--- a/spec/frontend/observability/skeleton_spec.js
+++ b/spec/frontend/observability/skeleton_spec.js
@@ -3,32 +3,16 @@ import { GlSkeletonLoader, GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Skeleton from '~/observability/components/skeleton/index.vue';
-import DashboardsSkeleton from '~/observability/components/skeleton/dashboards.vue';
-import ExploreSkeleton from '~/observability/components/skeleton/explore.vue';
-import ManageSkeleton from '~/observability/components/skeleton/manage.vue';
-import EmbedSkeleton from '~/observability/components/skeleton/embed.vue';
-import {
- SKELETON_VARIANTS_BY_ROUTE,
- DEFAULT_TIMERS,
- SKELETON_VARIANT_EMBED,
-} from '~/observability/constants';
+import { DEFAULT_TIMERS } from '~/observability/constants';
describe('Skeleton component', () => {
let wrapper;
- const SKELETON_VARIANTS = [...Object.values(SKELETON_VARIANTS_BY_ROUTE), 'spinner'];
+ const findSpinner = () => wrapper.findComponent(GlLoadingIcon);
const findContentWrapper = () => wrapper.findByTestId('content-wrapper');
- const findExploreSkeleton = () => wrapper.findComponent(ExploreSkeleton);
-
- const findDashboardsSkeleton = () => wrapper.findComponent(DashboardsSkeleton);
-
- const findManageSkeleton = () => wrapper.findComponent(ManageSkeleton);
-
- const findEmbedSkeleton = () => wrapper.findComponent(EmbedSkeleton);
-
const findAlert = () => wrapper.findComponent(GlAlert);
const mountComponent = ({ ...props } = {}) => {
@@ -39,39 +23,39 @@ describe('Skeleton component', () => {
describe('on mount', () => {
beforeEach(() => {
- mountComponent({ variant: 'explore' });
+ mountComponent({ variant: 'spinner' });
});
describe('showing content', () => {
it('shows the skeleton if content is not loaded within CONTENT_WAIT_MS', async () => {
- expect(findExploreSkeleton().exists()).toBe(false);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findSpinner().exists()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(false);
jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
await nextTick();
- expect(findExploreSkeleton().exists()).toBe(true);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findSpinner().exists()).toBe(true);
+ expect(findContentWrapper().exists()).toBe(false);
});
it('does not show the skeleton if content loads within CONTENT_WAIT_MS', async () => {
- expect(findExploreSkeleton().exists()).toBe(false);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findSpinner().exists()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(false);
wrapper.vm.onContentLoaded();
await nextTick();
- expect(findContentWrapper().isVisible()).toBe(true);
- expect(findExploreSkeleton().exists()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(true);
+ expect(findSpinner().exists()).toBe(false);
jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
await nextTick();
- expect(findContentWrapper().isVisible()).toBe(true);
- expect(findExploreSkeleton().exists()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(true);
+ expect(findSpinner().exists()).toBe(false);
});
it('hides the skeleton after content loads', async () => {
@@ -79,15 +63,15 @@ describe('Skeleton component', () => {
await nextTick();
- expect(findExploreSkeleton().exists()).toBe(true);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findSpinner().exists()).toBe(true);
+ expect(findContentWrapper().exists()).toBe(false);
wrapper.vm.onContentLoaded();
await nextTick();
- expect(findContentWrapper().isVisible()).toBe(true);
- expect(findExploreSkeleton().exists()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(true);
+ expect(findSpinner().exists()).toBe(false);
});
});
@@ -99,7 +83,7 @@ describe('Skeleton component', () => {
await nextTick();
expect(findAlert().exists()).toBe(true);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(false);
});
it('shows the error dialog if content fails to load', async () => {
@@ -110,7 +94,7 @@ describe('Skeleton component', () => {
await nextTick();
expect(findAlert().exists()).toBe(true);
- expect(findContentWrapper().isVisible()).toBe(false);
+ expect(findContentWrapper().exists()).toBe(false);
});
it('does not show the error dialog if content has loaded within TIMEOUT_MS', async () => {
@@ -120,36 +104,28 @@ describe('Skeleton component', () => {
await nextTick();
expect(findAlert().exists()).toBe(false);
- expect(findContentWrapper().isVisible()).toBe(true);
+ expect(findContentWrapper().exists()).toBe(true);
});
});
});
describe('skeleton variant', () => {
- it.each`
- skeletonType | condition | variant
- ${'dashboards'} | ${'variant is dashboards'} | ${SKELETON_VARIANTS[0]}
- ${'explore'} | ${'variant is explore'} | ${SKELETON_VARIANTS[1]}
- ${'manage'} | ${'variant is manage'} | ${SKELETON_VARIANTS[2]}
- ${'embed'} | ${'variant is embed'} | ${SKELETON_VARIANT_EMBED}
- ${'spinner'} | ${'variant is spinner'} | ${'spinner'}
- ${'default'} | ${'variant is not manage, dashboards or explore'} | ${'unknown'}
- `('should render $skeletonType skeleton if $condition', async ({ skeletonType, variant }) => {
- mountComponent({ variant });
+ it('shows only the spinner variant when variant is spinner', async () => {
+ mountComponent({ variant: 'spinner' });
jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
await nextTick();
- const showsDefaultSkeleton = ![...SKELETON_VARIANTS, SKELETON_VARIANT_EMBED].includes(
- variant,
- );
- expect(findDashboardsSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[0]);
- expect(findExploreSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[1]);
- expect(findManageSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[2]);
- expect(findEmbedSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANT_EMBED);
+ expect(findSpinner().exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
+ });
- expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(showsDefaultSkeleton);
+ it('shows only the default variant when variant is not spinner', async () => {
+ mountComponent({ variant: 'unknown' });
+ jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
+ await nextTick();
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(variant === 'spinner');
+ expect(findSpinner().exists()).toBe(false);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
});
diff --git a/spec/frontend/organizations/index/components/app_spec.js b/spec/frontend/organizations/index/components/app_spec.js
new file mode 100644
index 00000000000..175b1e1c552
--- /dev/null
+++ b/spec/frontend/organizations/index/components/app_spec.js
@@ -0,0 +1,87 @@
+import { GlButton } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import { organizations } from '~/organizations/mock_data';
+import resolvers from '~/organizations/shared/graphql/resolvers';
+import organizationsQuery from '~/organizations/index/graphql/organizations.query.graphql';
+import OrganizationsIndexApp from '~/organizations/index/components/app.vue';
+import OrganizationsView from '~/organizations/index/components/organizations_view.vue';
+import { MOCK_NEW_ORG_URL } from '../mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+
+describe('OrganizationsIndexApp', () => {
+ let wrapper;
+ let mockApollo;
+
+ const createComponent = (mockResolvers = resolvers) => {
+ mockApollo = createMockApollo([[organizationsQuery, mockResolvers]]);
+
+ wrapper = shallowMountExtended(OrganizationsIndexApp, {
+ apolloProvider: mockApollo,
+ provide: {
+ newOrganizationUrl: MOCK_NEW_ORG_URL,
+ },
+ });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ const findOrganizationHeaderText = () => wrapper.findByText('Organizations');
+ const findNewOrganizationButton = () => wrapper.findComponent(GlButton);
+ const findOrganizationsView = () => wrapper.findComponent(OrganizationsView);
+
+ const loadingResolver = jest.fn().mockReturnValue(new Promise(() => {}));
+ const successfulResolver = (nodes) =>
+ jest.fn().mockResolvedValue({
+ data: { currentUser: { id: 1, organizations: { nodes } } },
+ });
+ const errorResolver = jest.fn().mockRejectedValue('error');
+
+ describe.each`
+ description | mockResolver | headerText | newOrgLink | loading | orgsData | error
+ ${'when API call is loading'} | ${loadingResolver} | ${true} | ${MOCK_NEW_ORG_URL} | ${true} | ${[]} | ${false}
+ ${'when API returns successful with results'} | ${successfulResolver(organizations)} | ${true} | ${MOCK_NEW_ORG_URL} | ${false} | ${organizations} | ${false}
+ ${'when API returns successful without results'} | ${successfulResolver([])} | ${false} | ${false} | ${false} | ${[]} | ${false}
+ ${'when API returns error'} | ${errorResolver} | ${false} | ${false} | ${false} | ${[]} | ${true}
+ `('$description', ({ mockResolver, headerText, newOrgLink, loading, orgsData, error }) => {
+ beforeEach(async () => {
+ createComponent(mockResolver);
+ await waitForPromises();
+ });
+
+ it(`does ${headerText ? '' : 'not '}render the header text`, () => {
+ expect(findOrganizationHeaderText().exists()).toBe(headerText);
+ });
+
+ it(`does ${newOrgLink ? '' : 'not '}render new organization button with correct link`, () => {
+ expect(
+ findNewOrganizationButton().exists() && findNewOrganizationButton().attributes('href'),
+ ).toBe(newOrgLink);
+ });
+
+ it(`renders the organizations view with ${loading} loading prop`, () => {
+ expect(findOrganizationsView().props('loading')).toBe(loading);
+ });
+
+ it(`renders the organizations view with ${
+ orgsData ? 'correct' : 'empty'
+ } organizations array prop`, () => {
+ expect(findOrganizationsView().props('organizations')).toStrictEqual(orgsData);
+ });
+
+ it(`does ${error ? '' : 'not '}render an error message`, () => {
+ return error
+ ? expect(createAlert).toHaveBeenCalled()
+ : expect(createAlert).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/organizations/index/components/organizations_list_item_spec.js b/spec/frontend/organizations/index/components/organizations_list_item_spec.js
new file mode 100644
index 00000000000..b3bff5ed517
--- /dev/null
+++ b/spec/frontend/organizations/index/components/organizations_list_item_spec.js
@@ -0,0 +1,70 @@
+import { GlAvatarLabeled } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import OrganizationsListItem from '~/organizations/index/components/organizations_list_item.vue';
+import { organizations } from '~/organizations/mock_data';
+
+const MOCK_ORGANIZATION = organizations[0];
+
+describe('OrganizationsListItem', () => {
+ let wrapper;
+
+ const defaultProps = {
+ organization: MOCK_ORGANIZATION,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(OrganizationsListItem, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findGlAvatarLabeled = () => wrapper.findComponent(GlAvatarLabeled);
+ const findHTMLOrganizationDescription = () =>
+ wrapper.findByTestId('organization-description-html');
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders GlAvatarLabeled with correct data', () => {
+ expect(findGlAvatarLabeled().attributes()).toMatchObject({
+ 'entity-id': getIdFromGraphQLId(MOCK_ORGANIZATION.id).toString(),
+ 'entity-name': MOCK_ORGANIZATION.name,
+ src: MOCK_ORGANIZATION.avatarUrl,
+ label: MOCK_ORGANIZATION.name,
+ labellink: MOCK_ORGANIZATION.webUrl,
+ });
+ });
+ });
+
+ describe('organization description', () => {
+ const descriptionHtml = '<p>Foo bar</p>';
+
+ describe('is a HTML description', () => {
+ beforeEach(() => {
+ createComponent({ organization: { ...MOCK_ORGANIZATION, descriptionHtml } });
+ });
+
+ it('renders HTML description', () => {
+ expect(findHTMLOrganizationDescription().html()).toContain(descriptionHtml);
+ });
+ });
+
+ describe('is not a HTML description', () => {
+ beforeEach(() => {
+ createComponent({
+ organization: { ...MOCK_ORGANIZATION, descriptionHtml: null },
+ });
+ });
+
+ it('does not render HTML description', () => {
+ expect(findHTMLOrganizationDescription().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/index/components/organizations_list_spec.js b/spec/frontend/organizations/index/components/organizations_list_spec.js
new file mode 100644
index 00000000000..0b59c212314
--- /dev/null
+++ b/spec/frontend/organizations/index/components/organizations_list_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import OrganizationsList from '~/organizations/index/components/organizations_list.vue';
+import OrganizationsListItem from '~/organizations/index/components/organizations_list_item.vue';
+import { organizations } from '~/organizations/mock_data';
+
+describe('OrganizationsList', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(OrganizationsList, {
+ propsData: {
+ organizations,
+ },
+ });
+ };
+
+ const findAllOrganizationsListItem = () => wrapper.findAllComponents(OrganizationsListItem);
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders a list item for each organization', () => {
+ expect(findAllOrganizationsListItem()).toHaveLength(organizations.length);
+ });
+ });
+});
diff --git a/spec/frontend/organizations/index/components/organizations_view_spec.js b/spec/frontend/organizations/index/components/organizations_view_spec.js
new file mode 100644
index 00000000000..85a1c11a2b1
--- /dev/null
+++ b/spec/frontend/organizations/index/components/organizations_view_spec.js
@@ -0,0 +1,57 @@
+import { GlLoadingIcon, GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { organizations } from '~/organizations/mock_data';
+import OrganizationsView from '~/organizations/index/components/organizations_view.vue';
+import OrganizationsList from '~/organizations/index/components/organizations_list.vue';
+import { MOCK_NEW_ORG_URL, MOCK_ORG_EMPTY_STATE_SVG } from '../mock_data';
+
+describe('OrganizationsView', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(OrganizationsView, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ newOrganizationUrl: MOCK_NEW_ORG_URL,
+ organizationsEmptyStateSvgPath: MOCK_ORG_EMPTY_STATE_SVG,
+ },
+ });
+ };
+
+ const findGlLoading = () => wrapper.findComponent(GlLoadingIcon);
+ const findOrganizationsList = () => wrapper.findComponent(OrganizationsList);
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ describe.each`
+ description | loading | orgsData | emptyStateSvg | emptyStateUrl
+ ${'when loading'} | ${true} | ${[]} | ${false} | ${false}
+ ${'when not loading and has organizations'} | ${false} | ${organizations} | ${false} | ${false}
+ ${'when not loading and has no organizations'} | ${false} | ${[]} | ${MOCK_ORG_EMPTY_STATE_SVG} | ${MOCK_NEW_ORG_URL}
+ `('$description', ({ loading, orgsData, emptyStateSvg, emptyStateUrl }) => {
+ beforeEach(() => {
+ createComponent({ loading, organizations: orgsData });
+ });
+
+ it(`does ${loading ? '' : 'not '}render loading icon`, () => {
+ expect(findGlLoading().exists()).toBe(loading);
+ });
+
+ it(`does ${orgsData.length ? '' : 'not '}render organizations list`, () => {
+ expect(findOrganizationsList().exists()).toBe(Boolean(orgsData.length));
+ });
+
+ it(`does ${emptyStateSvg ? '' : 'not '}render empty state with SVG`, () => {
+ expect(findGlEmptyState().exists() && findGlEmptyState().attributes('svgpath')).toBe(
+ emptyStateSvg,
+ );
+ });
+
+ it(`does ${emptyStateUrl ? '' : 'not '}render empty state with URL`, () => {
+ expect(
+ findGlEmptyState().exists() && findGlEmptyState().attributes('primarybuttonlink'),
+ ).toBe(emptyStateUrl);
+ });
+ });
+});
diff --git a/spec/frontend/organizations/index/mock_data.js b/spec/frontend/organizations/index/mock_data.js
new file mode 100644
index 00000000000..50b20b4f79c
--- /dev/null
+++ b/spec/frontend/organizations/index/mock_data.js
@@ -0,0 +1,3 @@
+export const MOCK_NEW_ORG_URL = 'gitlab.com/organizations/new';
+
+export const MOCK_ORG_EMPTY_STATE_SVG = 'illustrations/empty-state/empty-organizations-md.svg';
diff --git a/spec/frontend/organizations/new/components/app_spec.js b/spec/frontend/organizations/new/components/app_spec.js
new file mode 100644
index 00000000000..06d30ad6b12
--- /dev/null
+++ b/spec/frontend/organizations/new/components/app_spec.js
@@ -0,0 +1,113 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import App from '~/organizations/new/components/app.vue';
+import resolvers from '~/organizations/shared/graphql/resolvers';
+import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
+import { visitUrlWithAlerts } from '~/lib/utils/url_utility';
+import { createOrganizationResponse } from '~/organizations/mock_data';
+import { createAlert } from '~/alert';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+Vue.use(VueApollo);
+jest.useFakeTimers();
+
+jest.mock('~/lib/utils/url_utility');
+jest.mock('~/alert');
+
+describe('OrganizationNewApp', () => {
+ let wrapper;
+ let mockApollo;
+
+ const createComponent = ({ mockResolvers = resolvers } = {}) => {
+ mockApollo = createMockApollo([], mockResolvers);
+
+ wrapper = shallowMountExtended(App, { apolloProvider: mockApollo });
+ };
+
+ const findForm = () => wrapper.findComponent(NewEditForm);
+ const submitForm = async () => {
+ findForm().vm.$emit('submit', { name: 'Foo bar', path: 'foo-bar' });
+ await nextTick();
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ it('renders form', () => {
+ createComponent();
+
+ expect(findForm().exists()).toBe(true);
+ });
+
+ describe('when form is submitted', () => {
+ describe('when API is loading', () => {
+ beforeEach(async () => {
+ const mockResolvers = {
+ Mutation: {
+ createOrganization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
+ },
+ };
+
+ createComponent({ mockResolvers });
+
+ await submitForm();
+ });
+
+ it('sets `NewEditForm` `loading` prop to `true`', () => {
+ expect(findForm().props('loading')).toBe(true);
+ });
+ });
+
+ describe('when API request is successful', () => {
+ beforeEach(async () => {
+ createComponent();
+ await submitForm();
+ jest.runAllTimers();
+ await waitForPromises();
+ });
+
+ it('redirects user to organization path', () => {
+ expect(visitUrlWithAlerts).toHaveBeenCalledWith(
+ createOrganizationResponse.organization.path,
+ [
+ {
+ id: 'organization-successfully-created',
+ title: 'Organization successfully created.',
+ message: 'You can now start using your new organization.',
+ variant: 'success',
+ },
+ ],
+ );
+ });
+ });
+
+ describe('when API request is not successful', () => {
+ const error = new Error();
+
+ beforeEach(async () => {
+ const mockResolvers = {
+ Mutation: {
+ createOrganization: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ await submitForm();
+ jest.runAllTimers();
+ await waitForPromises();
+ });
+
+ it('displays error alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred creating an organization. Please try again.',
+ error,
+ captureError: true,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/shared/components/new_edit_form_spec.js b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
new file mode 100644
index 00000000000..43c099fbb1c
--- /dev/null
+++ b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
@@ -0,0 +1,112 @@
+import { GlButton, GlInputGroupText, GlTruncate } from '@gitlab/ui';
+
+import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('NewEditForm', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ organizationsPath: '/-/organizations',
+ rootUrl: 'http://127.0.0.1:3000/',
+ };
+
+ const defaultPropsData = {
+ loading: false,
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = mountExtended(NewEditForm, {
+ attachTo: document.body,
+ provide: defaultProvide,
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ const findNameField = () => wrapper.findByLabelText('Organization name');
+ const findUrlField = () => wrapper.findByLabelText('Organization URL');
+ const submitForm = async () => {
+ await wrapper.findByRole('button', { name: 'Create organization' }).trigger('click');
+ };
+
+ it('renders `Organization name` field', () => {
+ createComponent();
+
+ expect(findNameField().exists()).toBe(true);
+ });
+
+ it('renders `Organization URL` field', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlInputGroupText).findComponent(GlTruncate).props('text')).toBe(
+ 'http://127.0.0.1:3000/-/organizations/',
+ );
+ expect(findUrlField().exists()).toBe(true);
+ });
+
+ describe('when form is submitted without filling in required fields', () => {
+ beforeEach(async () => {
+ createComponent();
+ await submitForm();
+ });
+
+ it('shows error messages', () => {
+ expect(wrapper.findByText('Organization name is required.').exists()).toBe(true);
+ expect(wrapper.findByText('Organization URL is required.').exists()).toBe(true);
+ });
+ });
+
+ describe('when form is submitted successfully', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findNameField().setValue('Foo bar');
+ await findUrlField().setValue('foo-bar');
+ await submitForm();
+ });
+
+ it('emits `submit` event with form values', () => {
+ expect(wrapper.emitted('submit')).toEqual([[{ name: 'Foo bar', path: 'foo-bar' }]]);
+ });
+ });
+
+ describe('when `Organization URL` has not been manually set', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findNameField().setValue('Foo bar');
+ await submitForm();
+ });
+
+ it('sets `Organization URL` when typing in `Organization name`', () => {
+ expect(findUrlField().element.value).toBe('foo-bar');
+ });
+ });
+
+ describe('when `Organization URL` has been manually set', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findUrlField().setValue('foo-bar-baz');
+ await findNameField().setValue('Foo bar');
+ await submitForm();
+ });
+
+ it('does not modify `Organization URL` when typing in `Organization name`', () => {
+ expect(findUrlField().element.value).toBe('foo-bar-baz');
+ });
+ });
+
+ describe('when `loading` prop is `true`', () => {
+ beforeEach(() => {
+ createComponent({ propsData: { loading: true } });
+ });
+
+ it('shows button with loading icon', () => {
+ expect(wrapper.findComponent(GlButton).props('loading')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
index 7f26ed778a5..6af9e38192e 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -9,51 +9,44 @@ exports[`packages_list_app renders 1`] = `
<infrastructure-search-stub />
<div>
<section
- class="empty-state gl-display-flex gl-flex-direction-column gl-text-center"
+ class="gl-display-flex gl-empty-state gl-flex-direction-column gl-text-center"
>
<div
class="gl-max-w-full"
>
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-dark-invert-keep-hue gl-max-w-full"
- role="img"
- src="helpSvg"
- />
- </div>
+ <img
+ alt=""
+ class="gl-dark-invert-keep-hue gl-max-w-full"
+ height="144"
+ role="img"
+ src="helpSvg"
+ />
</div>
<div
- class="gl-m-auto gl-max-w-full"
+ class="gl-empty-state-content gl-m-auto gl-mx-auto gl-my-0 gl-p-5"
data-testid="gl-empty-state-content"
>
- <div
- class="gl-mx-auto gl-my-0 gl-p-5"
+ <h1
+ class="gl-font-size-h-display gl-line-height-36 gl-mb-0 gl-mt-0 h4"
>
- <h1
- class="gl-font-size-h-display gl-line-height-36 h4"
- >
- There are no packages yet
- </h1>
- <p
- class="gl-mt-3"
+ There are no packages yet
+ </h1>
+ <p
+ class="gl-mb-0 gl-mt-4"
+ >
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ href="helpUrl"
+ target="_blank"
>
- Learn how to
- <b-link-stub
- class="gl-link"
- href="helpUrl"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
- <div
- class="gl-display-flex gl-flex-wrap gl-justify-content-center"
- />
- </div>
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+ <div
+ class="gl-display-flex gl-flex-wrap gl-justify-content-center gl-mt-5"
+ />
</div>
</section>
</div>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
index 05a5a718e52..17acf7381c0 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -78,7 +78,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
tabindex="-1"
>
<span
- class="gl-bg-gray-50! gl-new-dropdown-item-content"
+ class="gl-new-dropdown-item-content"
>
<svg
aria-hidden="true"
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
index 0037934cbc5..be50858bc88 100644
--- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -29,6 +29,7 @@ describe('BulkImportsHistoryApp', () => {
source_full_path: 'top-level-group-12',
destination_full_path: 'h5bp/top-level-group-12',
destination_name: 'top-level-group-12',
+ destination_slug: 'top-level-group-12',
destination_namespace: 'h5bp',
created_at: '2021-07-08T10:03:44.743Z',
failures: [],
@@ -40,6 +41,7 @@ describe('BulkImportsHistoryApp', () => {
entity_type: 'project',
source_full_path: 'autodevops-demo',
destination_name: 'autodevops-demo',
+ destination_slug: 'autodevops-demo',
destination_full_path: 'some-group/autodevops-demo',
destination_namespace: 'flightjs',
parent_id: null,
@@ -141,6 +143,25 @@ describe('BulkImportsHistoryApp', () => {
);
});
+ it('resets page to 1 when page size is changed', async () => {
+ const NEW_PAGE_SIZE = 4;
+
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ createComponent();
+ await axios.waitForAll();
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page', 2);
+ await axios.waitForAll();
+ mock.resetHistory();
+
+ wrapper.findComponent(PaginationBar).vm.$emit('set-page-size', NEW_PAGE_SIZE);
+ await axios.waitForAll();
+
+ expect(mock.history.get.length).toBe(1);
+ expect(mock.history.get[0].params).toStrictEqual(
+ expect.objectContaining({ per_page: NEW_PAGE_SIZE, page: 1 }),
+ );
+ });
+
it('sets up the local storage sync correctly', async () => {
const NEW_PAGE_SIZE = 4;
@@ -154,7 +175,7 @@ describe('BulkImportsHistoryApp', () => {
expect(findLocalStorageSync().props('value')).toBe(NEW_PAGE_SIZE);
});
- it('renders correct url for destination group when relative_url is empty', async () => {
+ it('renders link to destination_full_path for destination group', async () => {
createComponent({ shallow: false });
await axios.waitForAll();
@@ -163,14 +184,17 @@ describe('BulkImportsHistoryApp', () => {
);
});
- it('renders loading icon when destination namespace is not defined', async () => {
+ it('renders destination as text when destination_full_path is not defined', async () => {
const RESPONSE = [{ ...DUMMY_RESPONSE[0], destination_full_path: null }];
mock.onGet(API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
createComponent({ shallow: false });
await axios.waitForAll();
- expect(wrapper.find('tbody tr').findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find('tbody tr a').exists()).toBe(false);
+ expect(wrapper.find('tbody tr span').text()).toBe(
+ `${DUMMY_RESPONSE[0].destination_namespace}/${DUMMY_RESPONSE[0].destination_slug}/`,
+ );
});
it('adds slash to group urls', async () => {
diff --git a/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js b/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js
index ef2e5d779d8..62eae19ce4c 100644
--- a/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js
+++ b/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js
@@ -10,7 +10,7 @@ describe('generateRefDestinationPath', () => {
${`${projectRootPath}/-/find_file/flightjs/Flight`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}`}
${`${projectRootPath}/-/find_file/test/test1?test=something`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something`}
${`${projectRootPath}/-/find_file/simpletest?test=something&test=it`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something&test=it`}
- ${`${projectRootPath}/-/find_file/some_random_char?test=something&test[]=it&test[]=is`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something&test[]=it&test[]=is`}
+ ${`${projectRootPath}/-/find_file/some_random_char?test=something&test[]=it&test[]=is`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something&test%5B%5D=it&test%5B%5D=is`}
`('generates the correct destination path for $currentPath', ({ currentPath, result }) => {
setWindowLocation(currentPath);
expect(generateRefDestinationPath(selectedRef, '/-/find_file')).toBe(result);
@@ -36,4 +36,11 @@ describe('generateRefDestinationPath', () => {
`http://test.host/${projectRootPath}/-/find_file/flightjs/Flight`,
);
});
+
+ it('removes ref_type from the destination url if ref is neither a branch or tag', () => {
+ setWindowLocation(`${projectRootPath}/-/find_file/somebranch?ref_type=heads`);
+ expect(generateRefDestinationPath('8e90e533', '/-/find_file')).toBe(
+ `http://test.host/${projectRootPath}/-/find_file/8e90e533`,
+ );
+ });
});
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index f5a7dfe6d11..50d09481b93 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -1,6 +1,5 @@
-import { GlIcon } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
import IntervalPatternInput from '~/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue';
@@ -21,15 +20,15 @@ describe('Interval Pattern Input Component', () => {
const everyDayKey = 'everyDay';
const cronIntervalNotInPreset = `0 12 * * *`;
- const findEveryDayRadio = () => wrapper.find(`[data-testid=${everyDayKey}]`);
- const findEveryWeekRadio = () => wrapper.find('[data-testid="everyWeek"]');
- const findEveryMonthRadio = () => wrapper.find('[data-testid="everyMonth"]');
- const findCustomRadio = () => wrapper.find(`[data-testid="${customKey}"]`);
+ const findEveryDayRadio = () => wrapper.findByTestId(everyDayKey);
+ const findEveryWeekRadio = () => wrapper.findByTestId('everyWeek');
+ const findEveryMonthRadio = () => wrapper.findByTestId('everyMonth');
+ const findCustomRadio = () => wrapper.findByTestId(customKey);
const findCustomInput = () => wrapper.find('#schedule_cron');
const findAllLabels = () => wrapper.findAll('label');
const findSelectedRadio = () =>
wrapper.findAll('input[type="radio"]').wrappers.find((x) => x.element.checked);
- const findIcon = () => wrapper.findComponent(GlIcon);
+ const findIcon = () => wrapper.findByTestId('daily-limit');
const findSelectedRadioKey = () => findSelectedRadio()?.attributes('data-testid');
const selectEveryDayRadio = () => findEveryDayRadio().setChecked(true);
const selectEveryWeekRadio = () => findEveryWeekRadio().setChecked(true);
@@ -37,7 +36,7 @@ describe('Interval Pattern Input Component', () => {
const selectCustomRadio = () => findCustomRadio().setChecked(true);
const createWrapper = (props = {}, data = {}) => {
- wrapper = mount(IntervalPatternInput, {
+ wrapper = mountExtended(IntervalPatternInput, {
propsData: { ...props },
data() {
return {
@@ -132,7 +131,7 @@ describe('Interval Pattern Input Component', () => {
'Every day (at 4:00am)',
'Every week (Monday at 4:00am)',
'Every month (Day 1 at 4:00am)',
- 'Custom (Learn more.)',
+ 'Custom',
]);
});
});
diff --git a/spec/frontend/performance_bar/components/request_warning_spec.js b/spec/frontend/performance_bar/components/request_warning_spec.js
index 7b6d8ff695d..a4f0d388e33 100644
--- a/spec/frontend/performance_bar/components/request_warning_spec.js
+++ b/spec/frontend/performance_bar/components/request_warning_spec.js
@@ -1,6 +1,9 @@
+import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
+Vue.config.ignoredElements = ['gl-emoji'];
+
describe('request warning', () => {
let wrapper;
const htmlId = 'request-123';
@@ -16,8 +19,8 @@ describe('request warning', () => {
});
it('adds a warning emoji with the correct ID', () => {
- expect(wrapper.find('span[id]').attributes('id')).toEqual(htmlId);
- expect(wrapper.find('span[id] gl-emoji').element.dataset.name).toEqual('warning');
+ expect(wrapper.find('span gl-emoji[id]').attributes('id')).toEqual(htmlId);
+ expect(wrapper.find('span gl-emoji[id]').element.dataset.name).toEqual('warning');
});
});
diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js
index 1849c373326..cfc752655bd 100644
--- a/spec/frontend/performance_bar/index_spec.js
+++ b/spec/frontend/performance_bar/index_spec.js
@@ -1,3 +1,4 @@
+import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import axios from '~/lib/utils/axios_utils';
@@ -6,6 +7,8 @@ import '~/performance_bar/components/performance_bar_app.vue';
import performanceBar from '~/performance_bar';
import PerformanceBarService from '~/performance_bar/services/performance_bar_service';
+Vue.config.ignoredElements = ['gl-emoji'];
+
jest.mock('~/performance_bar/performance_bar_log');
describe('performance bar wrapper', () => {
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 479530c1d38..b39644c51eb 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -24,7 +24,7 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
<gl-button-stub
buttontextclasses=""
category="primary"
- data-qa-selector="delete_button"
+ data-testid="delete-button"
icon=""
size="medium"
variant="danger"
diff --git a/spec/frontend/projects/project_find_file_spec.js b/spec/frontend/projects/project_find_file_spec.js
index efc9d411a98..9dae2bdc5bb 100644
--- a/spec/frontend/projects/project_find_file_spec.js
+++ b/spec/frontend/projects/project_find_file_spec.js
@@ -30,12 +30,13 @@ describe('ProjectFindFile', () => {
let element;
let mock;
- const getProjectFindFileInstance = () =>
- new ProjectFindFile(element, {
- url: FILE_FIND_URL,
+ const getProjectFindFileInstance = (extraOptions) => {
+ return new ProjectFindFile(element, {
treeUrl: FIND_TREE_URL,
blobUrlTemplate: BLOB_URL_TEMPLATE,
+ ...extraOptions,
});
+ };
const findFiles = () =>
element
@@ -64,9 +65,6 @@ describe('ProjectFindFile', () => {
HTTP_STATUS_OK,
files.map((x) => x.path),
);
- getProjectFindFileInstance(); // This triggers a load / axios call + subsequent render in the constructor
-
- return waitForPromises();
});
afterEach(() => {
@@ -75,19 +73,44 @@ describe('ProjectFindFile', () => {
sanitize.mockClear();
});
- it('loads and renders elements from remote server', () => {
- expect(findFiles()).toEqual(
- files.map(({ path, escaped }) => ({
- text: path,
- href: `${BLOB_URL_TEMPLATE}/${escaped}`,
- })),
- );
+ describe('rendering without refType', () => {
+ beforeEach(() => {
+ const instance = getProjectFindFileInstance();
+ instance.load(FILE_FIND_URL); // axios call + subsequent render
+ return waitForPromises();
+ });
+
+ it('loads and renders elements from remote server', () => {
+ expect(findFiles()).toEqual(
+ files.map(({ path, escaped }) => ({
+ text: path,
+ href: `${BLOB_URL_TEMPLATE}/${escaped}`,
+ })),
+ );
+ });
+
+ it('sanitizes search text', () => {
+ const searchText = element.find('.file-finder-input').val();
+
+ expect(sanitize).toHaveBeenCalledTimes(1);
+ expect(sanitize).toHaveBeenCalledWith(searchText);
+ });
});
- it('sanitizes search text', () => {
- const searchText = element.find('.file-finder-input').val();
+ describe('with refType option', () => {
+ beforeEach(() => {
+ const instance = getProjectFindFileInstance({ refType: 'heads' });
+ instance.load(FILE_FIND_URL); // axios call + subsequent render
+ return waitForPromises();
+ });
- expect(sanitize).toHaveBeenCalledTimes(1);
- expect(sanitize).toHaveBeenCalledWith(searchText);
+ it('loads and renders elements from remote server', () => {
+ expect(findFiles()).toEqual(
+ files.map(({ path, escaped }) => ({
+ text: path,
+ href: `${BLOB_URL_TEMPLATE}/${escaped}?ref_type=heads`,
+ })),
+ );
+ });
});
});
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index 0ed2e51e8c3..7c8cc1bb38d 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -14,11 +14,13 @@ import AccessDropdown, { i18n } from '~/projects/settings/components/access_drop
import { ACCESS_LEVELS, LEVEL_TYPES } from '~/projects/settings/constants';
jest.mock('~/projects/settings/api/access_dropdown_api', () => ({
- getGroups: jest.fn().mockResolvedValue([
- { id: 4, name: 'group4' },
- { id: 5, name: 'group5' },
- { id: 6, name: 'group6' },
- ]),
+ getGroups: jest.fn().mockResolvedValue({
+ data: [
+ { id: 4, name: 'group4' },
+ { id: 5, name: 'group5' },
+ { id: 6, name: 'group6' },
+ ],
+ }),
getUsers: jest.fn().mockResolvedValue({
data: [
{ id: 7, name: 'user7' },
diff --git a/spec/frontend/ref/components/ambiguous_ref_modal_spec.js b/spec/frontend/ref/components/ambiguous_ref_modal_spec.js
new file mode 100644
index 00000000000..bb3fd0fa1f0
--- /dev/null
+++ b/spec/frontend/ref/components/ambiguous_ref_modal_spec.js
@@ -0,0 +1,64 @@
+import { GlModal, GlSprintf } from '@gitlab/ui';
+import AmbiguousRefModal from '~/ref/components/ambiguous_ref_modal.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent, RENDER_ALL_SLOTS_TEMPLATE } from 'helpers/stub_component';
+import { visitUrl } from '~/lib/utils/url_utility';
+import { TEST_HOST } from 'spec/test_constants';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('AmbiguousRefModal component', () => {
+ let wrapper;
+ const showModalSpy = jest.fn();
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(AmbiguousRefModal, {
+ propsData: { refName: 'main' },
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ methods: {
+ show: showModalSpy,
+ },
+ template: RENDER_ALL_SLOTS_TEMPLATE,
+ }),
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => createComponent());
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findByText = (text) => wrapper.findByText(text);
+ const findViewTagButton = () => findByText('View tag');
+ const findViewBranchButton = () => findByText('View branch');
+
+ it('renders a GlModal component with the correct props', () => {
+ expect(showModalSpy).toHaveBeenCalled();
+ expect(findModal().props('title')).toBe('Which reference do you want to view?');
+ });
+
+ it('renders a description', () => {
+ expect(wrapper.text()).toContain('There is a branch and a tag with the same name of main.');
+ expect(wrapper.text()).toContain('Which reference would you like to view?');
+ });
+
+ it('renders action buttons', () => {
+ expect(findViewTagButton().exists()).toBe(true);
+ expect(findViewBranchButton().exists()).toBe(true);
+ });
+
+ describe('when clicking the action buttons', () => {
+ it('redirects to the tag ref when tag button is clicked', () => {
+ findViewTagButton().vm.$emit('click');
+
+ expect(visitUrl).toHaveBeenCalledWith(`${TEST_HOST}/?ref_type=tags`);
+ });
+
+ it('redirects to the branch ref when branch button is clicked', () => {
+ findViewBranchButton().vm.$emit('click');
+
+ expect(visitUrl).toHaveBeenCalledWith(`${TEST_HOST}/?ref_type=heads`);
+ });
+ });
+});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 12ca0d053e9..26010a1cfa6 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -23,13 +23,17 @@ import {
REF_TYPE_BRANCHES,
REF_TYPE_TAGS,
REF_TYPE_COMMITS,
+ BRANCH_REF_TYPE_ICON,
+ TAG_REF_TYPE_ICON,
} from '~/ref/constants';
import createStore from '~/ref/stores/';
Vue.use(Vuex);
describe('Ref selector component', () => {
- const fixtures = { branches, tags, commit };
+ const branchRefTypeMock = { name: 'refs/heads/test_branch' };
+ const tagRefTypeMock = { name: 'refs/tags/test_tag' };
+ const fixtures = { branches: [branchRefTypeMock, tagRefTypeMock, ...branches], tags, commit };
const projectId = '8';
const totalBranchesCount = 123;
@@ -614,6 +618,19 @@ describe('Ref selector component', () => {
});
it.each`
+ selectedBranch | icon
+ ${branchRefTypeMock.name} | ${BRANCH_REF_TYPE_ICON}
+ ${tagRefTypeMock.name} | ${TAG_REF_TYPE_ICON}
+ ${branches[0].name} | ${''}
+ `('renders the correct icon for the selected ref', async ({ selectedBranch, icon }) => {
+ createComponent();
+ findListbox().vm.$emit('select', selectedBranch);
+ await nextTick();
+
+ expect(findListbox().props('icon')).toBe(icon);
+ });
+
+ it.each`
enabledRefType | findVisibleSection | findHiddenSections
${REF_TYPE_BRANCHES} | ${findBranchesSection} | ${[findTagsSection, findCommitsSection]}
${REF_TYPE_TAGS} | ${findTagsSection} | ${[findBranchesSection, findCommitsSection]}
diff --git a/spec/frontend/ref/init_ambiguous_ref_modal_spec.js b/spec/frontend/ref/init_ambiguous_ref_modal_spec.js
new file mode 100644
index 00000000000..322978f598f
--- /dev/null
+++ b/spec/frontend/ref/init_ambiguous_ref_modal_spec.js
@@ -0,0 +1,48 @@
+import Vue from 'vue';
+import initAmbiguousRefModal from '~/ref/init_ambiguous_ref_modal';
+import AmbiguousRefModal from '~/ref/components/ambiguous_ref_modal.vue';
+import { setHTMLFixture } from 'helpers/fixtures';
+import setWindowLocation from 'helpers/set_window_location_helper';
+
+const generateFixture = (isAmbiguous) => {
+ return `<div id="js-ambiguous-ref-modal" data-ambiguous="${isAmbiguous}" data-ref="main"></div>`;
+};
+
+const init = ({ isAmbiguous, htmlFixture = generateFixture(isAmbiguous) }) => {
+ setHTMLFixture(htmlFixture);
+ initAmbiguousRefModal();
+};
+
+beforeEach(() => jest.spyOn(Vue, 'extend'));
+
+describe('initAmbiguousRefModal', () => {
+ it('inits a new AmbiguousRefModal Vue component', () => {
+ init({ isAmbiguous: true });
+ expect(Vue.extend).toHaveBeenCalledWith(AmbiguousRefModal);
+ });
+
+ it.each(['<div></div>', '', null])(
+ 'does not render a new AmbiguousRefModal Vue component when root element is %s',
+ (htmlFixture) => {
+ init({ isAmbiguous: true, htmlFixture });
+
+ expect(Vue.extend).not.toHaveBeenCalledWith(AmbiguousRefModal);
+ },
+ );
+
+ it('does not render a new AmbiguousRefModal Vue component "ambiguous" data attribute is "false"', () => {
+ init({ isAmbiguous: false });
+
+ expect(Vue.extend).not.toHaveBeenCalledWith(AmbiguousRefModal);
+ });
+
+ it.each(['tags', 'heads'])(
+ 'does not render a new AmbiguousRefModal Vue component when "ref_type" param is set to %s',
+ (refType) => {
+ setWindowLocation(`?ref_type=${refType}`);
+ init({ isAmbiguous: true });
+
+ expect(Vue.extend).not.toHaveBeenCalledWith(AmbiguousRefModal);
+ },
+ );
+});
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
index 3468338b8a7..e155cdbbd3c 100644
--- a/spec/frontend/releases/components/tag_field_new_spec.js
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -1,4 +1,4 @@
-import { GlFormGroup, GlDropdown, GlPopover } from '@gitlab/ui';
+import { GlFormGroup, GlTruncate, GlPopover } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -60,7 +60,7 @@ describe('releases/components/tag_field_new', () => {
afterEach(() => mock.restore());
const findTagNameFormGroup = () => wrapper.findComponent(GlFormGroup);
- const findTagNameInput = () => wrapper.findComponent(GlDropdown);
+ const findTagNameInputText = () => wrapper.findComponent(GlTruncate);
const findTagNamePopover = () => wrapper.findComponent(GlPopover);
const findTagNameSearch = () => wrapper.findComponent(TagSearch);
const findTagNameCreate = () => wrapper.findComponent(TagCreate);
@@ -99,9 +99,10 @@ describe('releases/components/tag_field_new', () => {
it("updates the store's release.tagName property", async () => {
findTagNameCreate().vm.$emit('change', NONEXISTENT_TAG_NAME);
await findTagNameCreate().vm.$emit('create');
-
expect(store.state.editNew.release.tagName).toBe(NONEXISTENT_TAG_NAME);
- expect(findTagNameInput().props('text')).toBe(NONEXISTENT_TAG_NAME);
+
+ const text = findTagNameInputText();
+ expect(text.props('text')).toBe(NONEXISTENT_TAG_NAME);
});
});
@@ -114,8 +115,10 @@ describe('releases/components/tag_field_new', () => {
});
it("updates the store's release.tagName property", () => {
+ const buttonText = findTagNameInputText();
expect(store.state.editNew.release.tagName).toBe(updatedTagName);
- expect(findTagNameInput().props('text')).toBe(updatedTagName);
+
+ expect(buttonText.props('text')).toBe(updatedTagName);
});
it('hides the "Create from" field', () => {
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 1d164b9f5c1..d18437ccec3 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,9 +1,11 @@
import { cloneDeep } from 'lodash';
import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
import testAction from 'helpers/vuex_action_helper';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { getTag } from '~/api/tags_api';
import { createAlert } from '~/alert';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import AccessorUtilities from '~/lib/utils/accessor';
import { s__ } from '~/locale';
import { ASSET_LINK_TYPE } from '~/releases/constants';
import createReleaseAssetLinkMutation from '~/releases/graphql/mutations/create_release_link.mutation.graphql';
@@ -20,6 +22,7 @@ jest.mock('~/api/tags_api');
jest.mock('~/alert');
+jest.mock('~/lib/utils/accessor');
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
@@ -34,78 +37,203 @@ jest.mock('~/releases/util', () => ({
}));
describe('Release edit/new actions', () => {
+ useLocalStorageSpy();
+
let state;
let releaseResponse;
let error;
const projectPath = 'test/project-path';
+ const draftActions = [{ type: 'saveDraftRelease' }, { type: 'saveDraftCreateFrom' }];
const setupState = (updates = {}) => {
state = {
...createState({
projectPath,
projectId: '18',
- isExistingRelease: true,
+ isExistingRelease: false,
tagName: releaseResponse.tag_name,
releasesPagePath: 'path/to/releases/page',
markdownDocsPath: 'path/to/markdown/docs',
markdownPreviewPath: 'path/to/markdown/preview',
}),
+ localStorageKey: `${projectPath}/release/new`,
+ localStorageCreateFromKey: `${projectPath}/release/new/createFrom`,
...updates,
};
};
beforeEach(() => {
+ AccessorUtilities.canUseLocalStorage.mockReturnValue(true);
releaseResponse = cloneDeep(originalOneReleaseForEditingQueryResponse);
gon.api_version = 'v4';
error = new Error('Yikes!');
createAlert.mockClear();
});
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
describe('when creating a new release', () => {
beforeEach(() => {
setupState({ isExistingRelease: false });
});
describe('initializeRelease', () => {
- it(`commits ${types.INITIALIZE_EMPTY_RELEASE}`, () => {
- testAction(actions.initializeRelease, undefined, state, [
- { type: types.INITIALIZE_EMPTY_RELEASE },
- ]);
+ it('dispatches loadDraftRelease', () => {
+ return testAction({
+ action: actions.initializeRelease,
+ state,
+ expectedMutations: [],
+ expectedActions: [{ type: 'loadDraftRelease' }],
+ });
+ });
+ });
+
+ describe('loadDraftRelease', () => {
+ it(`with no saved release, it commits ${types.INITIALIZE_EMPTY_RELEASE}`, () => {
+ testAction({
+ action: actions.loadDraftRelease,
+ state,
+ expectedMutations: [{ type: types.INITIALIZE_EMPTY_RELEASE }],
+ });
+ });
+
+ it('with saved release, loads the release from local storage', () => {
+ const release = {
+ tagName: 'v1.3',
+ tagMessage: 'hello',
+ name: '',
+ description: '',
+ milestones: [],
+ groupMilestones: [],
+ releasedAt: new Date(),
+ assets: {
+ links: [],
+ },
+ };
+ const createFrom = 'main';
+
+ window.localStorage.setItem(`${state.projectPath}/release/new`, JSON.stringify(release));
+ window.localStorage.setItem(
+ `${state.projectPath}/release/new/createFrom`,
+ JSON.stringify(createFrom),
+ );
+
+ return testAction({
+ action: actions.loadDraftRelease,
+ state,
+ expectedMutations: [
+ { type: types.INITIALIZE_RELEASE, payload: release },
+ { type: types.UPDATE_CREATE_FROM, payload: createFrom },
+ ],
+ });
+ });
+ });
+
+ describe('clearDraftRelease', () => {
+ it('calls window.localStorage.clear', () => {
+ return testAction({ action: actions.clearDraftRelease, state }).then(() => {
+ expect(window.localStorage.removeItem).toHaveBeenCalledTimes(2);
+ expect(window.localStorage.removeItem).toHaveBeenCalledWith(state.localStorageKey);
+ expect(window.localStorage.removeItem).toHaveBeenCalledWith(
+ state.localStorageCreateFromKey,
+ );
+ });
+ });
+ });
+
+ describe('saveDraftCreateFrom', () => {
+ it('saves the create from to local storage', () => {
+ const createFrom = 'main';
+ setupState({ createFrom });
+ return testAction({ action: actions.saveDraftCreateFrom, state }).then(() => {
+ expect(window.localStorage.setItem).toHaveBeenCalledTimes(1);
+ expect(window.localStorage.setItem).toHaveBeenCalledWith(
+ state.localStorageCreateFromKey,
+ JSON.stringify(createFrom),
+ );
+ });
+ });
+ });
+
+ describe('saveDraftRelease', () => {
+ let release;
+
+ beforeEach(() => {
+ release = {
+ tagName: 'v1.3',
+ tagMessage: 'hello',
+ name: '',
+ description: '',
+ milestones: [],
+ groupMilestones: [],
+ releasedAt: new Date(),
+ assets: {
+ links: [],
+ },
+ };
+ });
+
+ it('saves the draft release to local storage', () => {
+ setupState({ release, releasedAtChanged: true });
+
+ return testAction({ action: actions.saveDraftRelease, state }).then(() => {
+ expect(window.localStorage.setItem).toHaveBeenCalledTimes(1);
+ expect(window.localStorage.setItem).toHaveBeenCalledWith(
+ state.localStorageKey,
+ JSON.stringify(state.release),
+ );
+ });
+ });
+
+ it('ignores the released at date if it has not been changed', () => {
+ setupState({ release, releasedAtChanged: false });
+
+ return testAction({ action: actions.saveDraftRelease, state }).then(() => {
+ expect(window.localStorage.setItem).toHaveBeenCalledTimes(1);
+ expect(window.localStorage.setItem).toHaveBeenCalledWith(
+ state.localStorageKey,
+ JSON.stringify({ ...state.release, releasedAt: undefined }),
+ );
+ });
});
});
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "createRelease"`, () => {
- testAction(
- actions.saveRelease,
- undefined,
+ testAction({
+ action: actions.saveRelease,
state,
- [{ type: types.REQUEST_SAVE_RELEASE }],
- [{ type: 'createRelease' }],
- );
+ expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],
+ expectedActions: [{ type: 'createRelease' }],
+ });
});
});
});
describe('when editing an existing release', () => {
- beforeEach(setupState);
+ beforeEach(() => setupState({ isExistingRelease: true }));
describe('initializeRelease', () => {
it('dispatches "fetchRelease"', () => {
- testAction(actions.initializeRelease, undefined, state, [], [{ type: 'fetchRelease' }]);
+ testAction({
+ action: actions.initializeRelease,
+ state,
+ expectedActions: [{ type: 'fetchRelease' }],
+ });
});
});
describe('saveRelease', () => {
it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "updateRelease"`, () => {
- testAction(
- actions.saveRelease,
- undefined,
+ testAction({
+ action: actions.saveRelease,
state,
- [{ type: types.REQUEST_SAVE_RELEASE }],
- [{ type: 'updateRelease' }],
- );
+ expectedMutations: [{ type: types.REQUEST_SAVE_RELEASE }],
+ expectedActions: [{ type: 'updateRelease' }],
+ });
});
});
});
@@ -120,15 +248,19 @@ describe('Release edit/new actions', () => {
});
it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_SUCCESS} with the converted release object`, () => {
- return testAction(actions.fetchRelease, undefined, state, [
- {
- type: types.REQUEST_RELEASE,
- },
- {
- type: types.RECEIVE_RELEASE_SUCCESS,
- payload: convertOneReleaseGraphQLResponse(releaseResponse).data,
- },
- ]);
+ return testAction({
+ action: actions.fetchRelease,
+ state,
+ expectedMutations: [
+ {
+ type: types.REQUEST_RELEASE,
+ },
+ {
+ type: types.RECEIVE_RELEASE_SUCCESS,
+ payload: convertOneReleaseGraphQLResponse(releaseResponse).data,
+ },
+ ],
+ });
});
});
@@ -138,15 +270,19 @@ describe('Release edit/new actions', () => {
});
it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_ERROR} with an error object`, () => {
- return testAction(actions.fetchRelease, undefined, state, [
- {
- type: types.REQUEST_RELEASE,
- },
- {
- type: types.RECEIVE_RELEASE_ERROR,
- payload: expect.any(Error),
- },
- ]);
+ return testAction({
+ action: actions.fetchRelease,
+ state,
+ expectedMutations: [
+ {
+ type: types.REQUEST_RELEASE,
+ },
+ {
+ type: types.RECEIVE_RELEASE_ERROR,
+ payload: expect.any(Error),
+ },
+ ],
+ });
});
it(`shows an alert message`, () => {
@@ -163,89 +299,140 @@ describe('Release edit/new actions', () => {
describe('updateReleaseTagName', () => {
it(`commits ${types.UPDATE_RELEASE_TAG_NAME} with the updated tag name`, () => {
const newTag = 'updated-tag-name';
- return testAction(actions.updateReleaseTagName, newTag, state, [
- { type: types.UPDATE_RELEASE_TAG_NAME, payload: newTag },
- ]);
+ return testAction({
+ action: actions.updateReleaseTagName,
+ payload: newTag,
+ state,
+ expectedMutations: [{ type: types.UPDATE_RELEASE_TAG_NAME, payload: newTag }],
+ expectedActions: draftActions,
+ });
+ });
+ it('does not save drafts when editing', () => {
+ const newTag = 'updated-tag-name';
+ return testAction({
+ action: actions.updateReleaseTagName,
+ payload: newTag,
+ state: { ...state, isExistingRelease: true },
+ expectedMutations: [{ type: types.UPDATE_RELEASE_TAG_NAME, payload: newTag }],
+ });
});
});
describe('updateReleaseTagMessage', () => {
it(`commits ${types.UPDATE_RELEASE_TAG_MESSAGE} with the updated tag name`, () => {
const newMessage = 'updated-tag-message';
- return testAction(actions.updateReleaseTagMessage, newMessage, state, [
- { type: types.UPDATE_RELEASE_TAG_MESSAGE, payload: newMessage },
- ]);
+ return testAction({
+ action: actions.updateReleaseTagMessage,
+ payload: newMessage,
+ state,
+ expectedMutations: [{ type: types.UPDATE_RELEASE_TAG_MESSAGE, payload: newMessage }],
+ expectedActions: draftActions,
+ });
});
});
describe('updateReleasedAt', () => {
it(`commits ${types.UPDATE_RELEASED_AT} with the updated date`, () => {
const newDate = new Date();
- return testAction(actions.updateReleasedAt, newDate, state, [
- { type: types.UPDATE_RELEASED_AT, payload: newDate },
- ]);
+ return testAction({
+ action: actions.updateReleasedAt,
+ payload: newDate,
+ state,
+ expectedMutations: [{ type: types.UPDATE_RELEASED_AT, payload: newDate }],
+ expectedActions: draftActions,
+ });
});
});
describe('updateCreateFrom', () => {
it(`commits ${types.UPDATE_CREATE_FROM} with the updated ref`, () => {
const newRef = 'my-feature-branch';
- return testAction(actions.updateCreateFrom, newRef, state, [
- { type: types.UPDATE_CREATE_FROM, payload: newRef },
- ]);
+ return testAction({
+ action: actions.updateCreateFrom,
+ payload: newRef,
+ state,
+ expectedMutations: [{ type: types.UPDATE_CREATE_FROM, payload: newRef }],
+ expectedActions: draftActions,
+ });
});
});
describe('updateShowCreateFrom', () => {
it(`commits ${types.UPDATE_SHOW_CREATE_FROM} with the updated ref`, () => {
const newRef = 'my-feature-branch';
- return testAction(actions.updateShowCreateFrom, newRef, state, [
- { type: types.UPDATE_SHOW_CREATE_FROM, payload: newRef },
- ]);
+ return testAction({
+ action: actions.updateShowCreateFrom,
+ payload: newRef,
+ state,
+ expectedMutations: [{ type: types.UPDATE_SHOW_CREATE_FROM, payload: newRef }],
+ });
});
});
describe('updateReleaseTitle', () => {
it(`commits ${types.UPDATE_RELEASE_TITLE} with the updated release title`, () => {
const newTitle = 'The new release title';
- return testAction(actions.updateReleaseTitle, newTitle, state, [
- { type: types.UPDATE_RELEASE_TITLE, payload: newTitle },
- ]);
+ return testAction({
+ action: actions.updateReleaseTitle,
+ payload: newTitle,
+ state,
+ expectedMutations: [{ type: types.UPDATE_RELEASE_TITLE, payload: newTitle }],
+ expectedActions: draftActions,
+ });
});
});
describe('updateReleaseNotes', () => {
it(`commits ${types.UPDATE_RELEASE_NOTES} with the updated release notes`, () => {
const newReleaseNotes = 'The new release notes';
- return testAction(actions.updateReleaseNotes, newReleaseNotes, state, [
- { type: types.UPDATE_RELEASE_NOTES, payload: newReleaseNotes },
- ]);
+ return testAction({
+ action: actions.updateReleaseNotes,
+ payload: newReleaseNotes,
+ state,
+ expectedMutations: [{ type: types.UPDATE_RELEASE_NOTES, payload: newReleaseNotes }],
+ expectedActions: draftActions,
+ });
});
});
describe('updateReleaseMilestones', () => {
it(`commits ${types.UPDATE_RELEASE_MILESTONES} with the updated release milestones`, () => {
const newReleaseMilestones = ['v0.0', 'v0.1'];
- return testAction(actions.updateReleaseMilestones, newReleaseMilestones, state, [
- { type: types.UPDATE_RELEASE_MILESTONES, payload: newReleaseMilestones },
- ]);
+ return testAction({
+ action: actions.updateReleaseMilestones,
+ payload: newReleaseMilestones,
+ state,
+ expectedMutations: [
+ { type: types.UPDATE_RELEASE_MILESTONES, payload: newReleaseMilestones },
+ ],
+ expectedActions: draftActions,
+ });
});
});
describe('updateReleaseGroupMilestones', () => {
it(`commits ${types.UPDATE_RELEASE_GROUP_MILESTONES} with the updated release group milestones`, () => {
const newReleaseGroupMilestones = ['v0.0', 'v0.1'];
- return testAction(actions.updateReleaseGroupMilestones, newReleaseGroupMilestones, state, [
- { type: types.UPDATE_RELEASE_GROUP_MILESTONES, payload: newReleaseGroupMilestones },
- ]);
+ return testAction({
+ action: actions.updateReleaseGroupMilestones,
+ payload: newReleaseGroupMilestones,
+ state,
+ expectedMutations: [
+ { type: types.UPDATE_RELEASE_GROUP_MILESTONES, payload: newReleaseGroupMilestones },
+ ],
+ expectedActions: draftActions,
+ });
});
});
describe('addEmptyAssetLink', () => {
it(`commits ${types.ADD_EMPTY_ASSET_LINK}`, () => {
- return testAction(actions.addEmptyAssetLink, undefined, state, [
- { type: types.ADD_EMPTY_ASSET_LINK },
- ]);
+ return testAction({
+ action: actions.addEmptyAssetLink,
+ state,
+ expectedMutations: [{ type: types.ADD_EMPTY_ASSET_LINK }],
+ expectedActions: draftActions,
+ });
});
});
@@ -256,9 +443,13 @@ describe('Release edit/new actions', () => {
newUrl: 'https://example.com/updated',
};
- return testAction(actions.updateAssetLinkUrl, params, state, [
- { type: types.UPDATE_ASSET_LINK_URL, payload: params },
- ]);
+ return testAction({
+ action: actions.updateAssetLinkUrl,
+ payload: params,
+ state,
+ expectedMutations: [{ type: types.UPDATE_ASSET_LINK_URL, payload: params }],
+ expectedActions: draftActions,
+ });
});
});
@@ -269,9 +460,13 @@ describe('Release edit/new actions', () => {
newName: 'Updated link name',
};
- return testAction(actions.updateAssetLinkName, params, state, [
- { type: types.UPDATE_ASSET_LINK_NAME, payload: params },
- ]);
+ return testAction({
+ action: actions.updateAssetLinkName,
+ payload: params,
+ state,
+ expectedMutations: [{ type: types.UPDATE_ASSET_LINK_NAME, payload: params }],
+ expectedActions: draftActions,
+ });
});
});
@@ -282,30 +477,45 @@ describe('Release edit/new actions', () => {
newType: ASSET_LINK_TYPE.RUNBOOK,
};
- return testAction(actions.updateAssetLinkType, params, state, [
- { type: types.UPDATE_ASSET_LINK_TYPE, payload: params },
- ]);
+ return testAction({
+ action: actions.updateAssetLinkType,
+ payload: params,
+ state,
+ expectedMutations: [{ type: types.UPDATE_ASSET_LINK_TYPE, payload: params }],
+ expectedActions: draftActions,
+ });
});
});
describe('removeAssetLink', () => {
it(`commits ${types.REMOVE_ASSET_LINK} with the ID of the asset link to remove`, () => {
const idToRemove = 2;
- return testAction(actions.removeAssetLink, idToRemove, state, [
- { type: types.REMOVE_ASSET_LINK, payload: idToRemove },
- ]);
+ return testAction({
+ action: actions.removeAssetLink,
+ payload: idToRemove,
+ state,
+ expectedMutations: [{ type: types.REMOVE_ASSET_LINK, payload: idToRemove }],
+ expectedActions: draftActions,
+ });
});
});
describe('receiveSaveReleaseSuccess', () => {
- it(`commits ${types.RECEIVE_SAVE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveSaveReleaseSuccess, releaseResponse, state, [
- { type: types.RECEIVE_SAVE_RELEASE_SUCCESS },
- ]));
+ it(`commits ${types.RECEIVE_SAVE_RELEASE_SUCCESS} and dispatches clearDraftRelease`, () =>
+ testAction({
+ action: actions.receiveSaveReleaseSuccess,
+ payload: releaseResponse,
+ state,
+ expectedMutations: [{ type: types.RECEIVE_SAVE_RELEASE_SUCCESS }],
+ expectedActions: [{ type: 'clearDraftRelease' }],
+ }));
it("redirects to the release's dedicated page", () => {
const { selfUrl } = releaseResponse.data.project.release.links;
- actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state }, selfUrl);
+ actions.receiveSaveReleaseSuccess(
+ { commit: jest.fn(), state, dispatch: jest.fn() },
+ selfUrl,
+ );
expect(redirectTo).toHaveBeenCalledTimes(1); // eslint-disable-line import/no-deprecated
expect(redirectTo).toHaveBeenCalledWith(selfUrl); // eslint-disable-line import/no-deprecated
});
@@ -346,18 +556,16 @@ describe('Release edit/new actions', () => {
});
it(`dispatches "receiveSaveReleaseSuccess" with the converted release object`, () => {
- return testAction(
- actions.createRelease,
- undefined,
+ return testAction({
+ action: actions.createRelease,
state,
- [],
- [
+ expectedActions: [
{
type: 'receiveSaveReleaseSuccess',
payload: selfUrl,
},
],
- );
+ });
});
});
@@ -367,12 +575,16 @@ describe('Release edit/new actions', () => {
});
it(`commits ${types.RECEIVE_SAVE_RELEASE_ERROR} with an error object`, () => {
- return testAction(actions.createRelease, undefined, state, [
- {
- type: types.RECEIVE_SAVE_RELEASE_ERROR,
- payload: expect.any(Error),
- },
- ]);
+ return testAction({
+ action: actions.createRelease,
+ state,
+ expectedMutations: [
+ {
+ type: types.RECEIVE_SAVE_RELEASE_ERROR,
+ payload: expect.any(Error),
+ },
+ ],
+ });
});
it(`shows an alert message`, () => {
@@ -393,12 +605,16 @@ describe('Release edit/new actions', () => {
});
it(`commits ${types.RECEIVE_SAVE_RELEASE_ERROR} with an error object`, () => {
- return testAction(actions.createRelease, undefined, state, [
- {
- type: types.RECEIVE_SAVE_RELEASE_ERROR,
- payload: expect.any(Error),
- },
- ]);
+ return testAction({
+ action: actions.createRelease,
+ state,
+ expectedMutations: [
+ {
+ type: types.RECEIVE_SAVE_RELEASE_ERROR,
+ payload: expect.any(Error),
+ },
+ ],
+ });
});
it(`shows an alert message`, () => {
@@ -760,16 +976,15 @@ describe('Release edit/new actions', () => {
const tag = { message: 'this is a tag' };
getTag.mockResolvedValue({ data: tag });
- await testAction(
- actions.fetchTagNotes,
- tagName,
+ await testAction({
+ action: actions.fetchTagNotes,
+ payload: tagName,
state,
- [
+ expectedMutations: [
{ type: types.REQUEST_TAG_NOTES },
{ type: types.RECEIVE_TAG_NOTES_SUCCESS, payload: tag },
],
- [],
- );
+ });
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
});
@@ -777,16 +992,15 @@ describe('Release edit/new actions', () => {
error = new Error();
getTag.mockRejectedValue(error);
- await testAction(
- actions.fetchTagNotes,
- tagName,
+ await testAction({
+ action: actions.fetchTagNotes,
+ payload: tagName,
state,
- [
+ expectedMutations: [
{ type: types.REQUEST_TAG_NOTES },
{ type: types.RECEIVE_TAG_NOTES_ERROR, payload: error },
],
- [],
- );
+ });
expect(createAlert).toHaveBeenCalledWith({
message: s__('Release|Unable to fetch the tag notes.'),
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 736eae13fb3..24490e19296 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -470,4 +470,20 @@ describe('Release edit/new getters', () => {
expect(getters.releasedAtChanged({ originalReleasedAt, release: { releasedAt } })).toBe(true);
});
});
+
+ describe('localStorageKey', () => {
+ it('returns a string key with the project path for local storage', () => {
+ const projectPath = 'test/project';
+ expect(getters.localStorageKey({ projectPath })).toBe('test/project/release/new');
+ });
+ });
+
+ describe('localStorageCreateFromKey', () => {
+ it('returns a string key with the project path for local storage', () => {
+ const projectPath = 'test/project';
+ expect(getters.localStorageCreateFromKey({ projectPath })).toBe(
+ 'test/project/release/new/createFrom',
+ );
+ });
+ });
});
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 3f901dc61b8..1a5301c5525 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -1,97 +1,50 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Repository last commit component renders commit widget 1`] = `
-<div
- class="commit gl-display-flex gl-p-5 gl-w-full well-segment"
+<commit-info-stub
+ commit="[object Object]"
>
- <user-avatar-link-stub
- class="gl-mr-4 gl-my-2"
- imgalt=""
- imgcssclasses=""
- imgcsswrapperclasses=""
- imgsize="32"
- imgsrc="https://test.com"
- linkhref="/test"
- popoveruserid=""
- popoverusername=""
- tooltipplacement="top"
- tooltiptext=""
- username=""
- />
<div
- class="commit-detail flex-list gl-align-items-center gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-min-w-0"
+ class="commit-actions gl-align-items-center gl-display-flex gl-flex-align gl-flex-direction-row"
>
<div
- class="commit-content"
- data-qa-selector="commit_content"
+ class="ci-status-link"
>
- <gl-link-stub
- class="commit-row-message item-title"
- href="/commit/123"
- >
- Commit title
- </gl-link-stub>
- <div
- class="committer"
- >
- <gl-link-stub
- class="commit-author-link js-user-link"
- href="/test"
- >
- Test
- </gl-link-stub>
- authored
- <timeago-tooltip-stub
- cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
- time="2019-01-01"
- tooltipplacement="bottom"
- />
- </div>
+ <ci-badge-link-stub
+ aria-label="Pipeline: failed"
+ class="js-commit-pipeline"
+ details-path="https://test.com/pipeline"
+ showtooltip="true"
+ size="md"
+ status="[object Object]"
+ uselink="true"
+ />
</div>
- <div
- class="gl-flex-grow-1"
- />
- <div
- class="commit-actions gl-align-items-center gl-display-flex gl-flex-align gl-flex-direction-row"
+ <gl-button-group-stub
+ class="gl-ml-4 js-commit-sha-group"
>
- <div
- class="ci-status-link"
- >
- <ci-badge-link-stub
- aria-label="Pipeline: failed"
- class="js-commit-pipeline"
- details-path="https://test.com/pipeline"
- size="lg"
- status="[object Object]"
- />
- </div>
- <gl-button-group-stub
- class="gl-ml-4 js-commit-sha-group"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-font-monospace"
+ data-testid="last-commit-id-label"
+ icon=""
+ label="true"
+ size="medium"
+ variant="default"
>
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="gl-font-monospace"
- data-testid="last-commit-id-label"
- icon=""
- label="true"
- size="medium"
- variant="default"
- >
- 12345678
- </gl-button-stub>
- <clipboard-button-stub
- category="secondary"
- class="input-group-text"
- size="medium"
- text="123456789"
- title="Copy commit SHA"
- tooltipplacement="top"
- variant="default"
- />
- </gl-button-group-stub>
- </div>
+ 12345678
+ </gl-button-stub>
+ <clipboard-button-stub
+ category="secondary"
+ class="input-group-text"
+ size="medium"
+ text="123456789"
+ title="Copy commit SHA"
+ tooltipplacement="top"
+ variant="default"
+ />
+ </gl-button-group-stub>
</div>
-</div>
+</commit-info-stub>
`;
diff --git a/spec/frontend/repository/components/commit_info_spec.js b/spec/frontend/repository/components/commit_info_spec.js
new file mode 100644
index 00000000000..34e941aa858
--- /dev/null
+++ b/spec/frontend/repository/components/commit_info_spec.js
@@ -0,0 +1,87 @@
+import { nextTick } from 'vue';
+import { GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CommitInfo from '~/repository/components/commit_info.vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
+
+let wrapper;
+const commit = {
+ title: 'Commit title',
+ titleHtml: 'Commit title html',
+ message: 'Commit message',
+ authoredDate: '2019-01-01',
+ authorName: 'Test authorName',
+ author: { name: 'Test name', avatarUrl: 'https://test.com', webPath: '/test' },
+};
+
+const findTextExpander = () => wrapper.findComponent(GlButton);
+const findUserLink = () => wrapper.findByText(commit.author.name);
+const findUserAvatarLink = () => wrapper.findComponent(UserAvatarLink);
+const findAuthorName = () => wrapper.findByText(`${commit.authorName} authored`);
+const findCommitRowDescription = () => wrapper.find('pre');
+const findTitleHtml = () => wrapper.findByText(commit.titleHtml);
+
+const createComponent = async ({ commitMock = {} } = {}) => {
+ wrapper = shallowMountExtended(CommitInfo, {
+ propsData: { commit: { ...commit, ...commitMock } },
+ });
+
+ await nextTick();
+};
+
+describe('Repository last commit component', () => {
+ it('renders author info', () => {
+ createComponent();
+
+ expect(findUserLink().exists()).toBe(true);
+ expect(findUserAvatarLink().exists()).toBe(true);
+ });
+
+ it('hides author component when author does not exist', () => {
+ createComponent({ commitMock: { author: null } });
+
+ expect(findUserLink().exists()).toBe(false);
+ expect(findUserAvatarLink().exists()).toBe(false);
+ expect(findAuthorName().exists()).toBe(true);
+ });
+
+ it('does not render description expander when description is null', () => {
+ createComponent();
+
+ expect(findTextExpander().exists()).toBe(false);
+ expect(findCommitRowDescription().exists()).toBe(false);
+ });
+
+ describe('when the description is present', () => {
+ beforeEach(() => {
+ createComponent({ commitMock: { descriptionHtml: '&#x000A;Update ADOPTERS.md' } });
+ });
+
+ it('strips the first newline of the description', () => {
+ expect(findCommitRowDescription().html()).toBe(
+ '<pre class="commit-row-description gl-mb-3 gl-white-space-pre-line">Update ADOPTERS.md</pre>',
+ );
+ });
+
+ it('renders commit description collapsed by default', () => {
+ expect(findCommitRowDescription().classes('gl-display-block!')).toBe(false);
+ expect(findTextExpander().classes('open')).toBe(false);
+ expect(findTextExpander().props('selected')).toBe(false);
+ });
+
+ it('expands commit description when clicking expander', async () => {
+ findTextExpander().vm.$emit('click');
+ await nextTick();
+
+ expect(findCommitRowDescription().classes('gl-display-block!')).toBe(true);
+ expect(findTextExpander().classes('open')).toBe(true);
+ expect(findTextExpander().props('selected')).toBe(true);
+ });
+ });
+
+ it('sets correct CSS class if the commit message is empty', () => {
+ createComponent({ commitMock: { message: '' } });
+
+ expect(findTitleHtml().classes()).toContain('gl-font-style-italic');
+ });
+});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index c207d32d61d..d5ec34b1f6d 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -1,29 +1,26 @@
import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import LastCommit from '~/repository/components/last_commit.vue';
+import CommitInfo from '~/repository/components/commit_info.vue';
import SignatureBadge from '~/commit/components/signature_badge.vue';
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import eventHub from '~/repository/event_hub';
import pathLastCommitQuery from 'shared_queries/repository/path_last_commit.query.graphql';
import { FORK_UPDATED_EVENT } from '~/repository/constants';
import { refMock } from '../mock_data';
let wrapper;
+let commitData;
let mockResolver;
const findPipeline = () => wrapper.find('.js-commit-pipeline');
-const findTextExpander = () => wrapper.find('.text-expander');
-const findUserLink = () => wrapper.find('.js-user-link');
-const findUserAvatarLink = () => wrapper.findComponent(UserAvatarLink);
const findLastCommitLabel = () => wrapper.findByTestId('last-commit-id-label');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
-const findCommitRowDescription = () => wrapper.find('.commit-row-description');
const findStatusBox = () => wrapper.findComponent(SignatureBadge);
-const findItemTitle = () => wrapper.find('.item-title');
+const findCommitInfo = () => wrapper.findComponent(CommitInfo);
const defaultPipelineEdges = [
{
@@ -44,23 +41,7 @@ const defaultPipelineEdges = [
},
];
-const defaultAuthor = {
- __typename: 'UserCore',
- id: 'gid://gitlab/User/1',
- name: 'Test',
- avatarUrl: 'https://test.com',
- webPath: '/test',
-};
-
-const defaultMessage = 'Commit title';
-
-const createCommitData = ({
- pipelineEdges = defaultPipelineEdges,
- author = defaultAuthor,
- descriptionHtml = '',
- signature = null,
- message = defaultMessage,
-}) => {
+const createCommitData = ({ pipelineEdges = defaultPipelineEdges, signature = null }) => {
return {
data: {
project: {
@@ -79,13 +60,19 @@ const createCommitData = ({
sha: '123456789',
title: 'Commit title',
titleHtml: 'Commit title',
- descriptionHtml,
- message,
+ descriptionHtml: '',
+ message: '',
webPath: '/commit/123',
authoredDate: '2019-01-01',
authorName: 'Test',
authorGravatar: 'https://test.com',
- author,
+ author: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ name: 'Test',
+ avatarUrl: 'https://test.com',
+ webPath: '/test',
+ },
signature,
pipelines: {
__typename: 'PipelineConnection',
@@ -101,12 +88,13 @@ const createCommitData = ({
};
};
-const createComponent = (data = {}) => {
+const createComponent = async (data = {}) => {
Vue.use(VueApollo);
const currentPath = 'path';
- mockResolver = jest.fn().mockResolvedValue(createCommitData(data));
+ commitData = createCommitData(data);
+ mockResolver = jest.fn().mockResolvedValue(commitData);
wrapper = shallowMountExtended(LastCommit, {
apolloProvider: createMockApollo([[pathLastCommitQuery, mockResolver]]),
@@ -116,8 +104,13 @@ const createComponent = (data = {}) => {
SignatureBadge,
},
});
+
+ await waitForPromises();
+ await nextTick();
};
+beforeEach(() => createComponent());
+
afterEach(() => {
mockResolver = null;
});
@@ -137,17 +130,17 @@ describe('Repository last commit component', () => {
expect(findLoadingIcon().exists()).toBe(loading);
});
- it('renders commit widget', async () => {
- createComponent();
- await waitForPromises();
+ it('renders a CommitInfo component', () => {
+ const commit = { ...commitData.project?.repository.paginatedTree.nodes[0].lastCommit };
- expect(wrapper.element).toMatchSnapshot();
+ expect(findCommitInfo().props().commit).toMatchObject(commit);
});
- it('renders short commit ID', async () => {
- createComponent();
- await waitForPromises();
+ it('renders commit widget', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ it('renders short commit ID', () => {
expect(findLastCommitLabel().text()).toBe('12345678');
});
@@ -158,29 +151,10 @@ describe('Repository last commit component', () => {
expect(findPipeline().exists()).toBe(false);
});
- it('renders pipeline components when pipeline exists', async () => {
- createComponent();
- await waitForPromises();
-
+ it('renders pipeline components when pipeline exists', () => {
expect(findPipeline().exists()).toBe(true);
});
- it('hides author component when author does not exist', async () => {
- createComponent({ author: null });
- await waitForPromises();
-
- expect(findUserLink().exists()).toBe(false);
- expect(findUserAvatarLink().exists()).toBe(false);
- });
-
- it('does not render description expander when description is null', async () => {
- createComponent();
- await waitForPromises();
-
- expect(findTextExpander().exists()).toBe(false);
- expect(findCommitRowDescription().exists()).toBe(false);
- });
-
describe('created', () => {
it('binds `epicsListScrolled` event listener via eventHub', () => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
@@ -200,32 +174,6 @@ describe('Repository last commit component', () => {
});
});
- describe('when the description is present', () => {
- beforeEach(async () => {
- createComponent({ descriptionHtml: '&#x000A;Update ADOPTERS.md' });
- await waitForPromises();
- });
-
- it('strips the first newline of the description', () => {
- expect(findCommitRowDescription().html()).toBe(
- '<pre class="commit-row-description gl-mb-3 gl-white-space-pre-line">Update ADOPTERS.md</pre>',
- );
- });
-
- it('expands commit description when clicking expander', async () => {
- expect(findCommitRowDescription().classes('d-block')).toBe(false);
- expect(findTextExpander().classes('open')).toBe(false);
- expect(findTextExpander().props('selected')).toBe(false);
-
- findTextExpander().vm.$emit('click');
- await nextTick();
-
- expect(findCommitRowDescription().classes('d-block')).toBe(true);
- expect(findTextExpander().classes('open')).toBe(true);
- expect(findTextExpander().props('selected')).toBe(true);
- });
- });
-
it('renders the signature HTML as returned by the backend', async () => {
const signatureResponse = {
__typename: 'GpgSignature',
@@ -241,11 +189,4 @@ describe('Repository last commit component', () => {
expect(findStatusBox().props()).toMatchObject({ signature: signatureResponse });
});
-
- it('sets correct CSS class if the commit message is empty', async () => {
- createComponent({ message: '' });
- await waitForPromises();
-
- expect(findItemTitle().classes()).toContain('font-italic');
- });
});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index 17ebdf8725d..af7eca6a52d 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -9,7 +9,7 @@ exports[`Repository table row component renders a symlink table row 1`] = `
>
<a
class="str-truncated tree-item-link"
- data-qa-selector="file_name_link"
+ data-testid="file-name-link"
href="https://test.com"
title="test"
>
@@ -65,7 +65,7 @@ exports[`Repository table row component renders table row 1`] = `
>
<a
class="str-truncated tree-item-link"
- data-qa-selector="file_name_link"
+ data-testid="file-name-link"
href="https://test.com"
title="test"
>
@@ -121,7 +121,7 @@ exports[`Repository table row component renders table row for path with special
>
<a
class="str-truncated tree-item-link"
- data-qa-selector="file_name_link"
+ data-testid="file-name-link"
href="https://test.com"
title="test"
>
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index 8e23f9c1680..d8d2492209e 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -16,6 +16,7 @@ import BlobsFilters from '~/search/sidebar/components/blobs_filters.vue';
import ProjectsFilters from '~/search/sidebar/components/projects_filters.vue';
import NotesFilters from '~/search/sidebar/components/notes_filters.vue';
import CommitsFilters from '~/search/sidebar/components/commits_filters.vue';
+import MilestonesFilters from '~/search/sidebar/components/milestones_filters.vue';
import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
import SmallScreenDrawerNavigation from '~/search/sidebar/components/small_screen_drawer_navigation.vue';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
@@ -47,6 +48,7 @@ describe('GlobalSearchSidebar', () => {
glFeatures: {
searchNotesHideArchivedProjects: true,
searchCommitsHideArchivedProjects: true,
+ searchMilestonesHideArchivedProjects: true,
},
},
});
@@ -59,6 +61,7 @@ describe('GlobalSearchSidebar', () => {
const findProjectsFilters = () => wrapper.findComponent(ProjectsFilters);
const findNotesFilters = () => wrapper.findComponent(NotesFilters);
const findCommitsFilters = () => wrapper.findComponent(CommitsFilters);
+ const findMilestonesFilters = () => wrapper.findComponent(MilestonesFilters);
const findScopeLegacyNavigation = () => wrapper.findComponent(ScopeLegacyNavigation);
const findSmallScreenDrawerNavigation = () => wrapper.findComponent(SmallScreenDrawerNavigation);
const findScopeSidebarNavigation = () => wrapper.findComponent(ScopeSidebarNavigation);
@@ -83,10 +86,12 @@ describe('GlobalSearchSidebar', () => {
${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ZOEKT} | ${false}
- ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
- ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
+ ${'milestones'} | ${findMilestonesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
+ ${'milestones'} | ${findMilestonesFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
`('with sidebar $scope scope:', ({ scope, filter, searchType, isShown }) => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => scope);
diff --git a/spec/frontend/search/sidebar/components/archived_filter_spec.js b/spec/frontend/search/sidebar/components/archived_filter_spec.js
index 69bf2ebd72e..9ed677ca297 100644
--- a/spec/frontend/search/sidebar/components/archived_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/archived_filter_spec.js
@@ -1,8 +1,9 @@
-import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlFormCheckboxGroup } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
import { archivedFilterData } from '~/search/sidebar/components/archived_filter/data';
@@ -12,17 +13,26 @@ Vue.use(Vuex);
describe('ArchivedFilter', () => {
let wrapper;
+ const defaultActions = {
+ setQuery: jest.fn(),
+ };
+
const createComponent = (state) => {
const store = new Vuex.Store({
state,
+ actions: defaultActions,
});
- wrapper = shallowMount(ArchivedFilter, {
+ wrapper = shallowMountExtended(ArchivedFilter, {
store,
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
});
};
const findCheckboxFilter = () => wrapper.findComponent(GlFormCheckboxGroup);
+ const findCheckboxFilterLabel = () => wrapper.findByTestId('label');
const findH5 = () => wrapper.findComponent('h5');
describe('old sidebar', () => {
@@ -38,6 +48,12 @@ describe('ArchivedFilter', () => {
expect(findH5().exists()).toBe(true);
expect(findH5().text()).toBe(archivedFilterData.headerLabel);
});
+
+ it('wraps the label element with a tooltip', () => {
+ const tooltip = getBinding(findCheckboxFilterLabel().element, 'gl-tooltip');
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe('Include search results from archived projects');
+ });
});
describe('new sidebar', () => {
@@ -53,6 +69,12 @@ describe('ArchivedFilter', () => {
expect(findH5().exists()).toBe(true);
expect(findH5().text()).toBe(archivedFilterData.headerLabel);
});
+
+ it('wraps the label element with a tooltip', () => {
+ const tooltip = getBinding(findCheckboxFilterLabel().element, 'gl-tooltip');
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe('Include search results from archived projects');
+ });
});
describe.each`
@@ -70,4 +92,20 @@ describe('ArchivedFilter', () => {
expect(findCheckboxFilter().attributes('checked')).toBe(checkboxState);
});
});
+
+ describe('selectedFilter logic', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('correctly executes setQuery without mutating the input', () => {
+ const selectedFilter = [false];
+ findCheckboxFilter().vm.$emit('input', selectedFilter);
+ expect(defaultActions.setQuery).toHaveBeenCalledWith(expect.any(Object), {
+ key: 'include_archived',
+ value: 'false',
+ });
+ expect(selectedFilter).toEqual([false]);
+ });
+ });
});
diff --git a/spec/frontend/search/sidebar/components/issues_filters_spec.js b/spec/frontend/search/sidebar/components/issues_filters_spec.js
index 39d10cbb8b4..c3b3a93e362 100644
--- a/spec/frontend/search/sidebar/components/issues_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/issues_filters_spec.js
@@ -111,11 +111,11 @@ describe('GlobalSearch IssuesFilters', () => {
});
it("doesn't render ArchivedFilter", () => {
- expect(findArchivedFilter().exists()).toBe(false);
+ expect(findArchivedFilter().exists()).toBe(true);
});
it('renders 1 divider', () => {
- expect(findDividers()).toHaveLength(1);
+ expect(findDividers()).toHaveLength(2);
});
});
diff --git a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
index b50f348be69..278249c2660 100644
--- a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
@@ -79,12 +79,12 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findStatusFilter().exists()).toBe(true);
});
- it("doesn't render ArchivedFilter", () => {
- expect(findArchivedFilter().exists()).toBe(false);
+ it('renders render ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
});
it('renders 1 divider', () => {
- expect(findDividers()).toHaveLength(0);
+ expect(findDividers()).toHaveLength(1);
});
});
diff --git a/spec/frontend/search/sidebar/components/milestones_filters_spec.js b/spec/frontend/search/sidebar/components/milestones_filters_spec.js
new file mode 100644
index 00000000000..e7fcfb030f4
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/milestones_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import MilestonesFilters from '~/search/sidebar/components/milestones_filters.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch MilestonesFilters', () => {
+ let wrapper;
+
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(MilestonesFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index 62d0e377d74..9704277c86b 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -9,7 +9,10 @@ import GlobalSearchTopbar from '~/search/topbar/components/app.vue';
import GroupFilter from '~/search/topbar/components/group_filter.vue';
import ProjectFilter from '~/search/topbar/components/project_filter.vue';
import MarkdownDrawer from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue';
-import { SYNTAX_OPTIONS_DOCUMENT } from '~/search/topbar/constants';
+import {
+ SYNTAX_OPTIONS_ADVANCED_DOCUMENT,
+ SYNTAX_OPTIONS_ZOEKT_DOCUMENT,
+} from '~/search/topbar/constants';
Vue.use(Vuex);
@@ -22,7 +25,7 @@ describe('GlobalSearchTopbar', () => {
preloadStoredFrequentItems: jest.fn(),
};
- const createComponent = (initialState, props, stubs) => {
+ const createComponent = (initialState = {}, defaultBranchName = '', stubs = {}) => {
const store = new Vuex.Store({
state: {
query: MOCK_QUERY,
@@ -33,7 +36,7 @@ describe('GlobalSearchTopbar', () => {
wrapper = shallowMount(GlobalSearchTopbar, {
store,
- propsData: props,
+ propsData: { defaultBranchName },
stubs,
});
};
@@ -76,80 +79,82 @@ describe('GlobalSearchTopbar', () => {
});
});
- describe('syntax option feature', () => {
- describe('template', () => {
- beforeEach(() => {
- createComponent(
- { query: { repository_ref: '' } },
- { elasticsearchEnabled: true, defaultBranchName: '' },
- );
- });
+ describe.each`
+ searchType | showSyntaxOptions
+ ${'basic'} | ${false}
+ ${'advanced'} | ${true}
+ ${'zoekt'} | ${true}
+ `('syntax options drawer with searchType: $searchType', ({ searchType, showSyntaxOptions }) => {
+ beforeEach(() => {
+ createComponent({ query: { repository_ref: '' }, searchType });
+ });
- it('renders button correctly', () => {
- expect(findSyntaxOptionButton().exists()).toBe(true);
- });
+ it('renders button correctly', () => {
+ expect(findSyntaxOptionButton().exists()).toBe(showSyntaxOptions);
+ });
- it('renders drawer correctly', () => {
- expect(findSyntaxOptionDrawer().exists()).toBe(true);
- expect(findSyntaxOptionDrawer().attributes('documentpath')).toBe(SYNTAX_OPTIONS_DOCUMENT);
- });
+ it('renders drawer correctly', () => {
+ expect(findSyntaxOptionDrawer().exists()).toBe(showSyntaxOptions);
+ });
+ });
+
+ describe.each`
+ searchType | documentPath
+ ${'advanced'} | ${SYNTAX_OPTIONS_ADVANCED_DOCUMENT}
+ ${'zoekt'} | ${SYNTAX_OPTIONS_ZOEKT_DOCUMENT}
+ `('syntax options drawer with searchType: $searchType', ({ searchType, documentPath }) => {
+ beforeEach(() => {
+ createComponent({ query: { repository_ref: '' }, searchType });
+ });
- it('dispatched correct click action', () => {
- const drawerToggleSpy = jest.fn();
-
- createComponent(
- { query: { repository_ref: '' } },
- { elasticsearchEnabled: true, defaultBranchName: '' },
- {
- MarkdownDrawer: stubComponent(MarkdownDrawer, {
- methods: { toggleDrawer: drawerToggleSpy },
- }),
- },
- );
-
- findSyntaxOptionButton().vm.$emit('click');
- expect(drawerToggleSpy).toHaveBeenCalled();
+ it('renders drawer with correct document', () => {
+ expect(findSyntaxOptionDrawer()?.attributes('documentpath')).toBe(documentPath);
+ });
+ });
+
+ describe('actions', () => {
+ it('dispatched correct click action', () => {
+ const drawerToggleSpy = jest.fn();
+
+ createComponent({ query: { repository_ref: '' }, searchType: 'advanced' }, '', {
+ MarkdownDrawer: stubComponent(MarkdownDrawer, {
+ methods: { toggleDrawer: drawerToggleSpy },
+ }),
});
+
+ findSyntaxOptionButton().vm.$emit('click');
+ expect(drawerToggleSpy).toHaveBeenCalled();
});
+ });
- describe.each`
- query | propsData | hasSyntaxOptions
- ${null} | ${{ elasticsearchEnabled: false, defaultBranchName: '' }} | ${false}
- ${{ query: { repository_ref: '' } }} | ${{ elasticsearchEnabled: false, defaultBranchName: '' }} | ${false}
- ${{ query: { repository_ref: 'master' } }} | ${{ elasticsearchEnabled: false, defaultBranchName: 'master' }} | ${false}
- ${{ query: { repository_ref: 'master' } }} | ${{ elasticsearchEnabled: true, defaultBranchName: '' }} | ${false}
- ${{ query: { repository_ref: '' } }} | ${{ elasticsearchEnabled: true, defaultBranchName: 'master' }} | ${true}
- ${{ query: { repository_ref: '' } }} | ${{ elasticsearchEnabled: true, defaultBranchName: '' }} | ${true}
- ${{ query: { repository_ref: 'master' } }} | ${{ elasticsearchEnabled: true, defaultBranchName: 'master' }} | ${true}
- `(
- 'renders the syntax option based on component state',
- ({ query, propsData, hasSyntaxOptions }) => {
- beforeEach(() => {
- createComponent(query, { ...propsData });
- });
+ describe.each`
+ state | defaultBranchName | hasSyntaxOptions
+ ${{ query: { repository_ref: '' }, searchType: 'basic' }} | ${'master'} | ${false}
+ ${{ query: { repository_ref: 'v0.1' }, searchType: 'basic' }} | ${''} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'basic' }} | ${'master'} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'advanced' }} | ${''} | ${false}
+ ${{ query: { repository_ref: '' }, searchType: 'advanced' }} | ${'master'} | ${true}
+ ${{ query: { repository_ref: 'v0.1' }, searchType: 'advanced' }} | ${''} | ${false}
+ ${{ query: { repository_ref: 'master' }, searchType: 'advanced' }} | ${'master'} | ${true}
+ ${{ query: { repository_ref: 'master' }, searchType: 'zoekt' }} | ${'master'} | ${true}
+ `(
+ `the syntax option based on component state`,
+ ({ state, defaultBranchName, hasSyntaxOptions }) => {
+ beforeEach(() => {
+ createComponent({ ...state }, defaultBranchName);
+ });
- it(`does${
- hasSyntaxOptions ? '' : ' not'
- } have syntax option button when repository_ref: '${
- query?.query?.repository_ref
- }', elasticsearchEnabled: ${propsData.elasticsearchEnabled}, defaultBranchName: '${
- propsData.defaultBranchName
- }'`, () => {
+ describe(`repository: ${state.query.repository_ref}, searchType: ${state.searchType}`, () => {
+ it(`renders correctly button`, () => {
expect(findSyntaxOptionButton().exists()).toBe(hasSyntaxOptions);
});
- it(`does${
- hasSyntaxOptions ? '' : ' not'
- } have syntax option drawer when repository_ref: '${
- query?.query?.repository_ref
- }', elasticsearchEnabled: ${propsData.elasticsearchEnabled}, defaultBranchName: '${
- propsData.defaultBranchName
- }'`, () => {
+ it(`renders correctly drawer when branch name is ${state.query.repository_ref}`, () => {
expect(findSyntaxOptionDrawer().exists()).toBe(hasSyntaxOptions);
});
- },
- );
- });
+ });
+ },
+ );
});
describe('actions', () => {
diff --git a/spec/frontend/sentry/init_sentry_spec.js b/spec/frontend/sentry/init_sentry_spec.js
index e31068b935b..fb0dba35759 100644
--- a/spec/frontend/sentry/init_sentry_spec.js
+++ b/spec/frontend/sentry/init_sentry_spec.js
@@ -3,11 +3,10 @@ import {
defaultStackParser,
makeFetchTransport,
defaultIntegrations,
+ BrowserTracing,
// exports
captureException,
- captureMessage,
- withScope,
SDK_VERSION,
} from 'sentrybrowser';
import * as Sentry from 'sentrybrowser';
@@ -96,11 +95,17 @@ describe('SentryConfig', () => {
transport: makeFetchTransport,
stackParser: defaultStackParser,
- integrations: defaultIntegrations,
+ integrations: [...defaultIntegrations, expect.any(BrowserTracing)],
}),
);
});
+ it('Uses data-page to set BrowserTracing transaction name', () => {
+ const context = BrowserTracing.mock.calls[0][0].beforeNavigate();
+
+ expect(context).toMatchObject({ name: mockPage });
+ });
+
it('binds the BrowserClient to the hub', () => {
expect(mockBindClient).toHaveBeenCalledTimes(1);
expect(mockBindClient).toHaveBeenCalledWith(expect.any(BrowserClient));
@@ -126,8 +131,6 @@ describe('SentryConfig', () => {
// eslint-disable-next-line no-underscore-dangle
expect(window._Sentry).toEqual({
captureException,
- captureMessage,
- withScope,
SDK_VERSION,
});
});
@@ -173,5 +176,27 @@ describe('SentryConfig', () => {
expect(window._Sentry).toBe(undefined);
});
});
+
+ describe('when data-page is not defined in the body', () => {
+ beforeEach(() => {
+ delete document.body.dataset.page;
+ initSentry();
+ });
+
+ it('calls Sentry.setTags with gon values', () => {
+ expect(mockSetTags).toHaveBeenCalledTimes(1);
+ expect(mockSetTags).toHaveBeenCalledWith(
+ expect.objectContaining({
+ page: undefined,
+ }),
+ );
+ });
+
+ it('Uses location.path to set BrowserTracing transaction name', () => {
+ const context = BrowserTracing.mock.calls[0][0].beforeNavigate({ op: 'pageload' });
+
+ expect(context).toEqual({ op: 'pageload', name: window.location.pathname });
+ });
+ });
});
});
diff --git a/spec/frontend/sentry/sentry_browser_wrapper_spec.js b/spec/frontend/sentry/sentry_browser_wrapper_spec.js
index 55354eceb8d..d98286e1371 100644
--- a/spec/frontend/sentry/sentry_browser_wrapper_spec.js
+++ b/spec/frontend/sentry/sentry_browser_wrapper_spec.js
@@ -1,8 +1,6 @@
import * as Sentry from '~/sentry/sentry_browser_wrapper';
const mockError = new Error('error!');
-const mockMsg = 'msg!';
-const mockFn = () => {};
describe('SentryBrowserWrapper', () => {
afterEach(() => {
@@ -14,27 +12,19 @@ describe('SentryBrowserWrapper', () => {
it('methods fail silently', () => {
expect(() => {
Sentry.captureException(mockError);
- Sentry.captureMessage(mockMsg);
- Sentry.withScope(mockFn);
}).not.toThrow();
});
});
describe('when _Sentry is defined', () => {
let mockCaptureException;
- let mockCaptureMessage;
- let mockWithScope;
beforeEach(() => {
mockCaptureException = jest.fn();
- mockCaptureMessage = jest.fn();
- mockWithScope = jest.fn();
// eslint-disable-next-line no-underscore-dangle
window._Sentry = {
captureException: mockCaptureException,
- captureMessage: mockCaptureMessage,
- withScope: mockWithScope,
};
});
@@ -43,17 +33,5 @@ describe('SentryBrowserWrapper', () => {
expect(mockCaptureException).toHaveBeenCalledWith(mockError);
});
-
- it('captureMessage is called', () => {
- Sentry.captureMessage(mockMsg);
-
- expect(mockCaptureMessage).toHaveBeenCalledWith(mockMsg);
- });
-
- it('withScope is called', () => {
- Sentry.withScope(mockFn);
-
- expect(mockWithScope).toHaveBeenCalledWith(mockFn);
- });
});
});
diff --git a/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
index 39b480b295c..b2477e9b41c 100644
--- a/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
+++ b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
@@ -22,6 +22,7 @@ describe('Sidebar Todo Widget', () => {
const createComponent = ({
todosQueryHandler = jest.fn().mockResolvedValue(noTodosResponse),
+ provide = {},
} = {}) => {
fakeApollo = createMockApollo([[epicTodoQuery, todosQueryHandler]]);
@@ -30,6 +31,7 @@ describe('Sidebar Todo Widget', () => {
provide: {
canUpdate: true,
isClassicSidebar: true,
+ ...provide,
},
propsData: {
fullPath: 'group',
@@ -122,4 +124,23 @@ describe('Sidebar Todo Widget', () => {
expect(wrapper.emitted('todoUpdated')).toEqual([[false]]);
});
});
+
+ describe('when the query is pending', () => {
+ it('is in the loading state', () => {
+ createComponent();
+
+ expect(findTodoButton().attributes('loading')).toBe('true');
+ });
+
+ it('is not in the loading state if notificationsTodosButtons and movedMrSidebar feature flags are enabled', () => {
+ createComponent({
+ provide: {
+ glFeatures: { notificationsTodosButtons: true, movedMrSidebar: true },
+ },
+ });
+
+ expect(findTodoButton().attributes('loading')).toBeUndefined();
+ expect(findTodoButton().attributes('disabled')).toBe('true');
+ });
+ });
});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 1c60c3af310..6414ab6dfba 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -3,11 +3,11 @@
exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<div
class="file-holder snippet"
- data-qa-selector="file_holder_container"
+ data-testid="file-holder-container"
>
<blob-header-edit-stub
candelete="true"
- data-qa-selector="file_name_field"
+ data-testid="file-name-field"
id="reference-0"
showdelete="true"
value="foo/bar/test.md"
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 5ed3b520b70..92511acc4f8 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -17,7 +17,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
>
<gl-form-input-stub
class="form-control"
- data-qa-selector="description_placeholder"
+ data-testid="description-placeholder"
placeholder="Describe what your snippet does or how to use it…"
/>
</div>
@@ -46,8 +46,8 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
<textarea
aria-label="Description"
class="js-autosize js-gfm-input js-gfm-input-initialized markdown-area note-textarea"
- data-qa-selector="snippet_description_field"
data-supports-quick-actions="false"
+ data-testid="snippet-description-field"
dir="auto"
id="reference-0"
placeholder="Write a comment or drag your files here…"
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
index 2b2335036f6..7c5fbf4cfb7 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Snippet Description component matches the snapshot 1`] = `
<markdown-field-view-stub
class="snippet-description"
- data-qa-selector="snippet_description_content"
+ data-testid="snippet-description-content"
>
<div
class="js-snippet-description md"
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
index 3274f41e4af..ab96d1a3653 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
@@ -44,8 +44,8 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
/>
<span
class="font-weight-bold js-visibility-option ml-1"
- data-qa-selector="visibility_content"
data-qa-visibility="Private"
+ data-testid="visibility-content"
>
Private
</span>
@@ -64,8 +64,8 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
/>
<span
class="font-weight-bold js-visibility-option ml-1"
- data-qa-selector="visibility_content"
data-qa-visibility="Internal"
+ data-testid="visibility-content"
>
Internal
</span>
@@ -84,8 +84,8 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
/>
<span
class="font-weight-bold js-visibility-option ml-1"
- data-qa-selector="visibility_content"
data-qa-visibility="Public"
+ data-testid="visibility-content"
>
Public
</span>
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index 17862953920..5fbc16ff430 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -117,7 +117,8 @@ describe('Snippet Edit app', () => {
.map((path) => `<input name="files[]" value="${path}">`)
.join('');
};
- const setTitle = (val) => wrapper.findByTestId('snippet-title-input').vm.$emit('input', val);
+ const setTitle = (val) =>
+ wrapper.findByTestId('snippet-title-input-field').vm.$emit('input', val);
const setDescription = (val) =>
wrapper.findComponent(SnippetDescriptionEdit).vm.$emit('input', val);
diff --git a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
index cb11e98cd35..fab65434c3a 100644
--- a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
@@ -40,7 +40,7 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
classes: x.classes(),
}));
const findFirstBlobEdit = () => findBlobEdits().at(0);
- const findAddButton = () => wrapper.find('[data-testid="add_button"]');
+ const findAddButton = () => wrapper.find('[data-testid="add-button"]');
const findLimitationsText = () => wrapper.find('[data-testid="limitations_text"]');
const getLastActions = () => {
const events = wrapper.emitted().actions;
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 4bf64bfd3cd..3932675aa52 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -331,7 +331,7 @@ describe('Snippet header component', () => {
expect(findDeleteModal().props().visible).toBe(true);
// Click delete button in delete modal
- document.querySelector('[data-testid="delete-snippet"').click();
+ document.querySelector('[data-testid="delete-snippet-button"').click();
await waitForPromises();
};
diff --git a/spec/frontend/super_sidebar/components/create_menu_spec.js b/spec/frontend/super_sidebar/components/create_menu_spec.js
index b967fb18a39..ffbc789d220 100644
--- a/spec/frontend/super_sidebar/components/create_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/create_menu_spec.js
@@ -47,7 +47,7 @@ describe('CreateMenu component', () => {
createWrapper();
expect(findGlDisclosureDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -179,
+ crossAxis: -177,
mainAxis: 4,
});
});
@@ -98,7 +98,7 @@ describe('CreateMenu component', () => {
createWrapper({ provide: { isImpersonating: true } });
expect(findGlDisclosureDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -147,
+ crossAxis: -143,
mainAxis: 4,
});
});
diff --git a/spec/frontend/super_sidebar/components/help_center_spec.js b/spec/frontend/super_sidebar/components/help_center_spec.js
index c92f8a68678..39537b65fa5 100644
--- a/spec/frontend/super_sidebar/components/help_center_spec.js
+++ b/spec/frontend/super_sidebar/components/help_center_spec.js
@@ -104,7 +104,7 @@ describe('HelpCenter component', () => {
createWrapper({ ...sidebarData, show_tanuki_bot: true });
});
- it('shows Ask GitLab Duo with the help items', () => {
+ it('shows GitLab Duo Chat with the help items', () => {
expect(findDropdownGroup(0).props('group').items).toEqual([
expect.objectContaining({
icon: 'tanuki-ai',
@@ -115,9 +115,9 @@ describe('HelpCenter component', () => {
]);
});
- describe('when Ask GitLab Duo button is clicked', () => {
+ describe('when GitLab Duo Chat button is clicked', () => {
beforeEach(() => {
- findButton('Ask GitLab Duo').click();
+ findButton('GitLab Duo Chat').click();
});
it('sets helpCenterState.showTanukiBotChatDrawer to true', () => {
diff --git a/spec/frontend/super_sidebar/components/nav_item_spec.js b/spec/frontend/super_sidebar/components/nav_item_spec.js
index 89d774c4b43..e6de9b1de22 100644
--- a/spec/frontend/super_sidebar/components/nav_item_spec.js
+++ b/spec/frontend/super_sidebar/components/nav_item_spec.js
@@ -10,6 +10,7 @@ import {
TRACKING_UNKNOWN_ID,
TRACKING_UNKNOWN_PANEL,
} from '~/super_sidebar/constants';
+import eventHub from '~/super_sidebar/event_hub';
describe('NavItem component', () => {
let wrapper;
@@ -49,7 +50,7 @@ describe('NavItem component', () => {
it.each([0, 5, 3.4, 'foo', '10%'])('item with pill_data `%p` renders a pill', (pillCount) => {
createWrapper({ item: { title: 'Foo', pill_count: pillCount } });
- expect(findPill().text()).toEqual(pillCount.toString());
+ expect(findPill().text()).toBe(pillCount.toString());
});
it.each([null, undefined, false, true, '', NaN, Number.POSITIVE_INFINITY])(
@@ -57,9 +58,49 @@ describe('NavItem component', () => {
(pillCount) => {
createWrapper({ item: { title: 'Foo', pill_count: pillCount } });
- expect(findPill().exists()).toEqual(false);
+ expect(findPill().exists()).toBe(false);
},
);
+
+ describe('updating pill value', () => {
+ const initialPillValue = '20%';
+ const updatedPillValue = '50%';
+ const itemIdForUpdate = '_some_item_id_';
+ const triggerPillValueUpdate = async ({
+ value = updatedPillValue,
+ itemId = itemIdForUpdate,
+ } = {}) => {
+ eventHub.$emit('updatePillValue', { value, itemId });
+ await nextTick();
+ };
+
+ it('updates the pill count', async () => {
+ createWrapper({ item: { id: itemIdForUpdate, pill_count: initialPillValue } });
+
+ await triggerPillValueUpdate();
+
+ expect(findPill().text()).toBe(updatedPillValue);
+ });
+
+ it('does not update the pill count for non matching item id', async () => {
+ createWrapper({ item: { id: '_non_matching_id_', pill_count: initialPillValue } });
+
+ await triggerPillValueUpdate();
+
+ expect(findPill().text()).toBe(initialPillValue);
+ });
+ });
+ });
+
+ describe('destroyed', () => {
+ it('should unbind event listeners on eventHub', async () => {
+ jest.spyOn(eventHub, '$off');
+
+ createWrapper({ item: {} });
+ await wrapper.destroy();
+
+ expect(eventHub.$off).toHaveBeenCalledWith('updatePillValue', expect.any(Function));
+ });
});
describe('pins', () => {
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
index 1371f8f00a7..92736b99e14 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
@@ -45,6 +45,7 @@ const peekHintClass = 'super-sidebar-peek-hint';
describe('SuperSidebar component', () => {
let wrapper;
+ const findSkipToLink = () => wrapper.findByTestId('super-sidebar-skip-to');
const findSidebar = () => wrapper.findByTestId('super-sidebar');
const findUserBar = () => wrapper.findComponent(UserBar);
const findNavContainer = () => wrapper.findByTestId('nav-container');
@@ -89,6 +90,24 @@ describe('SuperSidebar component', () => {
});
describe('default', () => {
+ it('renders skip to main content link when logged in', () => {
+ createWrapper();
+ expect(findSkipToLink().attributes('href')).toBe('#content-body');
+ });
+
+ it('does not render skip to main content link when logged out', () => {
+ createWrapper({ sidebarData: { is_logged_in: false } });
+ expect(findSkipToLink().exists()).toBe(false);
+ });
+
+ it('has accessible role and name', () => {
+ createWrapper();
+ const nav = wrapper.findByRole('navigation');
+ const heading = wrapper.findByText('Primary navigation');
+ expect(nav.attributes('aria-labelledby')).toBe('super-sidebar-heading');
+ expect(heading.attributes('id')).toBe('super-sidebar-heading');
+ });
+
it('adds inert attribute when collapsed', () => {
createWrapper({ sidebarState: { isCollapsed: true } });
expect(findSidebar().attributes('inert')).toBe('inert');
@@ -295,11 +314,4 @@ describe('SuperSidebar component', () => {
expect(findTrialStatusPopover().exists()).toBe(true);
});
});
-
- describe('ARIA attributes', () => {
- it('adds aria-label attribute to nav element', () => {
- createWrapper();
- expect(wrapper.find('nav').attributes('aria-label')).toBe('Primary');
- });
- });
});
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
index 1f2e5602d10..974eb529113 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
@@ -18,13 +18,8 @@ describe('SuperSidebarToggle component', () => {
const findButton = () => wrapper.findComponent(GlButton);
const getTooltip = () => getBinding(wrapper.element, 'gl-tooltip').value;
- const createWrapper = ({ props = {}, sidebarState = {} } = {}) => {
+ const createWrapper = (props = {}) => {
wrapper = shallowMountExtended(SuperSidebarToggle, {
- data() {
- return {
- ...sidebarState,
- };
- },
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
@@ -40,18 +35,15 @@ describe('SuperSidebarToggle component', () => {
expect(findButton().attributes('aria-controls')).toBe('super-sidebar');
});
- it('has aria-expanded as true when expanded', () => {
- createWrapper();
+ it('has aria-expanded as true when type is collapse', () => {
+ createWrapper({ type: 'collapse' });
expect(findButton().attributes('aria-expanded')).toBe('true');
});
- it.each(['isCollapsed', 'isPeek', 'isHoverPeek'])(
- 'has aria-expanded as false when %s is `true`',
- (stateProp) => {
- createWrapper({ sidebarState: { [stateProp]: true } });
- expect(findButton().attributes('aria-expanded')).toBe('false');
- },
- );
+ it('has aria-expanded as false when type is expand', () => {
+ createWrapper();
+ expect(findButton().attributes('aria-expanded')).toBe('false');
+ });
it('has aria-label attribute', () => {
createWrapper();
@@ -60,13 +52,13 @@ describe('SuperSidebarToggle component', () => {
});
describe('tooltip', () => {
- it('displays collapse when expanded', () => {
- createWrapper();
+ it('displays "Hide sidebar" when type is collapse', () => {
+ createWrapper({ type: 'collapse' });
expect(getTooltip().title).toBe('Hide sidebar');
});
- it('displays expand when collapsed', () => {
- createWrapper({ sidebarState: { isCollapsed: true } });
+ it('displays "Keep sidebar visible" when type is expand', () => {
+ createWrapper();
expect(getTooltip().title).toBe('Keep sidebar visible');
});
});
@@ -88,13 +80,11 @@ describe('SuperSidebarToggle component', () => {
});
it('collapses the sidebar and focuses the other toggle', async () => {
- createWrapper();
+ createWrapper({ type: 'collapse' });
findButton().vm.$emit('click');
await nextTick();
expect(toggleSuperSidebarCollapsed).toHaveBeenCalledWith(true, true);
- expect(document.activeElement).toEqual(
- document.querySelector(`.${JS_TOGGLE_COLLAPSE_CLASS}`),
- );
+ expect(document.activeElement).toEqual(document.querySelector(`.${JS_TOGGLE_EXPAND_CLASS}`));
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'nav_hide', {
label: 'nav_toggle',
property: 'nav_sidebar',
@@ -102,11 +92,13 @@ describe('SuperSidebarToggle component', () => {
});
it('expands the sidebar and focuses the other toggle', async () => {
- createWrapper({ sidebarState: { isCollapsed: true } });
+ createWrapper();
findButton().vm.$emit('click');
await nextTick();
expect(toggleSuperSidebarCollapsed).toHaveBeenCalledWith(false, true);
- expect(document.activeElement).toEqual(document.querySelector(`.${JS_TOGGLE_EXPAND_CLASS}`));
+ expect(document.activeElement).toEqual(
+ document.querySelector(`.${JS_TOGGLE_COLLAPSE_CLASS}`),
+ );
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'nav_show', {
label: 'nav_toggle',
property: 'nav_sidebar',
diff --git a/spec/frontend/super_sidebar/components/user_name_group_spec.js b/spec/frontend/super_sidebar/components/user_menu_profile_item_spec.js
index a31ad93d143..9cf55154a59 100644
--- a/spec/frontend/super_sidebar/components/user_name_group_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_profile_item_spec.js
@@ -1,12 +1,11 @@
-import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlTooltip } from '@gitlab/ui';
+import { GlDisclosureDropdownItem, GlTooltip } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import UserNameGroup from '~/super_sidebar/components/user_name_group.vue';
+import UserMenuProfileItem from '~/super_sidebar/components/user_menu_profile_item.vue';
import { userMenuMockData, userMenuMockStatus } from '../mock_data';
-describe('UserNameGroup component', () => {
+describe('UserMenuProfileItem component', () => {
let wrapper;
- const findGlDisclosureDropdownGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup);
const findGlDisclosureDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
const findGlTooltip = () => wrapper.findComponent(GlTooltip);
const findUserStatus = () => wrapper.findByTestId('user-menu-status');
@@ -14,7 +13,7 @@ describe('UserNameGroup component', () => {
const GlEmoji = { template: '<img/>' };
const createWrapper = (userDataChanges = {}) => {
- wrapper = shallowMountExtended(UserNameGroup, {
+ wrapper = shallowMountExtended(UserMenuProfileItem, {
propsData: {
user: {
...userMenuMockData,
@@ -32,10 +31,6 @@ describe('UserNameGroup component', () => {
createWrapper();
});
- it('renders the menu item in a separate group', () => {
- expect(findGlDisclosureDropdownGroup().exists()).toBe(true);
- });
-
it('renders menu item', () => {
expect(findGlDisclosureDropdownItem().exists()).toBe(true);
});
diff --git a/spec/frontend/super_sidebar/components/user_menu_spec.js b/spec/frontend/super_sidebar/components/user_menu_spec.js
index bcc3383bcd4..79a31492f3f 100644
--- a/spec/frontend/super_sidebar/components/user_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_spec.js
@@ -2,7 +2,7 @@ import { GlAvatar, GlDisclosureDropdown } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import UserMenu from '~/super_sidebar/components/user_menu.vue';
-import UserNameGroup from '~/super_sidebar/components/user_name_group.vue';
+import UserMenuProfileItem from '~/super_sidebar/components/user_menu_profile_item.vue';
import NewNavToggle from '~/nav/components/new_nav_toggle.vue';
import invalidUrl from '~/lib/utils/invalid_url';
import { mockTracking } from 'helpers/tracking_helper';
@@ -56,7 +56,7 @@ describe('UserMenu component', () => {
createWrapper(null, null, { isImpersonating: true });
expect(findDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -179,
+ crossAxis: -177,
mainAxis: 4,
});
});
@@ -86,9 +86,9 @@ describe('UserMenu component', () => {
describe('User Menu Group', () => {
it('renders and passes data to it', () => {
createWrapper();
- const userNameGroup = wrapper.findComponent(UserNameGroup);
- expect(userNameGroup.exists()).toBe(true);
- expect(userNameGroup.props('user')).toEqual(userMenuMockData);
+ const userMenuProfileItem = wrapper.findComponent(UserMenuProfileItem);
+ expect(userMenuProfileItem.exists()).toBe(true);
+ expect(userMenuProfileItem.props('user')).toEqual(userMenuMockData);
});
});
diff --git a/spec/frontend/super_sidebar/utils_spec.js b/spec/frontend/super_sidebar/utils_spec.js
index 85f45de06ba..85c13a4c892 100644
--- a/spec/frontend/super_sidebar/utils_spec.js
+++ b/spec/frontend/super_sidebar/utils_spec.js
@@ -11,7 +11,7 @@ import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK, HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
import { unsortedFrequentItems, sortedFrequentItems } from '../frequent_items/mock_data';
import { cachedFrequentProjects } from './mock_data';
@@ -58,7 +58,6 @@ describe('Super sidebar utils spec', () => {
const storageKey = `${username}/frequent-${context.namespace}`;
beforeEach(() => {
- gon.features = { serverSideFrecentNamespaces: true };
axiosMock = new MockAdapter(axios);
axiosMock.onPost(trackVisitsPath).reply(HTTP_STATUS_OK);
});
@@ -99,12 +98,12 @@ describe('Super sidebar utils spec', () => {
expect(axiosMock.history.post[0].url).toBe(trackVisitsPath);
});
- it('does not send a POST request when the serverSideFrecentNamespaces feature flag is disabled', async () => {
- gon.features = { serverSideFrecentNamespaces: false };
+ it('logs an error to Sentry when the request fails', async () => {
+ axiosMock.onPost(trackVisitsPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
trackContextAccess(username, context, trackVisitsPath);
await waitForPromises();
- expect(axiosMock.history.post).toHaveLength(0);
+ expect(Sentry.captureException).toHaveBeenCalled();
});
it('updates existing item frequency/access time if it was persisted to the local storage over 15 minutes ago', () => {
diff --git a/spec/frontend/tags/components/sort_dropdown_spec.js b/spec/frontend/tags/components/sort_dropdown_spec.js
index ebf79c93f9b..a0ba263e832 100644
--- a/spec/frontend/tags/components/sort_dropdown_spec.js
+++ b/spec/frontend/tags/components/sort_dropdown_spec.js
@@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import * as urlUtils from '~/lib/utils/url_utility';
import SortDropdown from '~/tags/components/sort_dropdown.vue';
+import setWindowLocation from 'helpers/set_window_location_helper';
describe('Tags sort dropdown', () => {
let wrapper;
@@ -45,20 +46,33 @@ describe('Tags sort dropdown', () => {
});
});
+ describe('when url contains a search param', () => {
+ const branchName = 'branch-1';
+
+ beforeEach(() => {
+ setWindowLocation(`/root/ci-cd-project-demo/-/branches?search=${branchName}`);
+ wrapper = createWrapper();
+ });
+
+ it('should set the default the input value to search param', () => {
+ expect(findSearchBox().props('value')).toBe(branchName);
+ });
+ });
+
describe('when submitting a search term', () => {
beforeEach(() => {
urlUtils.visitUrl = jest.fn();
-
wrapper = createWrapper();
});
it('should call visitUrl', () => {
+ const searchTerm = 'branch-1';
const searchBox = findSearchBox();
-
+ searchBox.vm.$emit('input', searchTerm);
searchBox.vm.$emit('submit');
expect(urlUtils.visitUrl).toHaveBeenCalledWith(
- '/root/ci-cd-project-demo/-/tags?sort=updated_desc',
+ '/root/ci-cd-project-demo/-/tags?search=branch-1&sort=updated_desc',
);
});
diff --git a/spec/frontend/tracking/internal_events_spec.js b/spec/frontend/tracking/internal_events_spec.js
index 6e773fde4db..44a048a4b5f 100644
--- a/spec/frontend/tracking/internal_events_spec.js
+++ b/spec/frontend/tracking/internal_events_spec.js
@@ -6,7 +6,6 @@ import {
GITLAB_INTERNAL_EVENT_CATEGORY,
SERVICE_PING_SCHEMA,
LOAD_INTERNAL_EVENTS_SELECTOR,
- USER_CONTEXT_SCHEMA,
} from '~/tracking/constants';
import * as utils from '~/tracking/utils';
import { Tracker } from '~/tracking/tracker';
@@ -26,18 +25,27 @@ Tracker.enabled = jest.fn();
const event = 'TestEvent';
describe('InternalEvents', () => {
- describe('track_event', () => {
- it('track_event calls API.trackInternalEvent with correct arguments', () => {
- InternalEvents.track_event(event);
+ describe('trackEvent', () => {
+ it('trackEvent calls API.trackInternalEvent with correct arguments', () => {
+ InternalEvents.trackEvent(event);
expect(API.trackInternalEvent).toHaveBeenCalledTimes(1);
expect(API.trackInternalEvent).toHaveBeenCalledWith(event);
});
- it('track_event calls tracking.event functions with correct arguments', () => {
+ it('trackEvent calls trackBrowserSDK with correct arguments', () => {
+ jest.spyOn(InternalEvents, 'trackBrowserSDK').mockImplementation(() => {});
+
+ InternalEvents.trackEvent(event);
+
+ expect(InternalEvents.trackBrowserSDK).toHaveBeenCalledTimes(1);
+ expect(InternalEvents.trackBrowserSDK).toHaveBeenCalledWith(event);
+ });
+
+ it('trackEvent calls tracking.event functions with correct arguments', () => {
const trackingSpy = mockTracking(GITLAB_INTERNAL_EVENT_CATEGORY, undefined, jest.spyOn);
- InternalEvents.track_event(event, { context: extraContext });
+ InternalEvents.trackEvent(event, { context: extraContext });
expect(trackingSpy).toHaveBeenCalledTimes(1);
expect(trackingSpy).toHaveBeenCalledWith(GITLAB_INTERNAL_EVENT_CATEGORY, event, {
@@ -66,10 +74,10 @@ describe('InternalEvents', () => {
`,
methods: {
handleButton1Click() {
- this.track_event(event);
+ this.trackEvent(event);
},
handleButton2Click() {
- this.track_event(event, extraContext);
+ this.trackEvent(event, extraContext);
},
},
mixins: [InternalEvents.mixin()],
@@ -79,8 +87,8 @@ describe('InternalEvents', () => {
wrapper = shallowMountExtended(Component);
});
- it('this.track_event function calls InternalEvent`s track function with an event', async () => {
- const trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
+ it('this.trackEvent function calls InternalEvent`s track function with an event', async () => {
+ const trackEventSpy = jest.spyOn(InternalEvents, 'trackEvent');
await wrapper.findByTestId('button1').trigger('click');
@@ -88,9 +96,9 @@ describe('InternalEvents', () => {
expect(trackEventSpy).toHaveBeenCalledWith(event, {});
});
- it("this.track_event function calls InternalEvent's track function with an event and data", async () => {
+ it("this.trackEvent function calls InternalEvent's track function with an event and data", async () => {
const data = extraContext;
- const trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
+ const trackEventSpy = jest.spyOn(InternalEvents, 'trackEvent');
await wrapper.findByTestId('button2').trigger('click');
@@ -147,7 +155,7 @@ describe('InternalEvents', () => {
describe('tracking', () => {
let trackEventSpy;
beforeEach(() => {
- trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
+ trackEventSpy = jest.spyOn(InternalEvents, 'trackEvent');
});
it('should track event if action exists', () => {
@@ -181,16 +189,6 @@ describe('InternalEvents', () => {
environment: 'testing',
key: 'value',
};
- window.gl.snowplowStandardContext = {
- schema: 'iglu:com.gitlab/gitlab_standard',
- data: {
- environment: 'testing',
- key: 'value',
- google_analytics_id: '',
- source: 'gitlab-javascript',
- extra: {},
- },
- };
});
it('should not call setDocumentTitle or page methods when window.glClient is undefined', () => {
@@ -203,55 +201,48 @@ describe('InternalEvents', () => {
});
it('should call setDocumentTitle and page methods on window.glClient when it is defined', () => {
- const mockStandardContext = window.gl.snowplowStandardContext;
- const userContext = {
- schema: USER_CONTEXT_SCHEMA,
- data: mockStandardContext?.data,
- };
-
InternalEvents.initBrowserSDK();
expect(window.glClient.setDocumentTitle).toHaveBeenCalledWith('GitLab');
expect(window.glClient.page).toHaveBeenCalledWith({
title: 'GitLab',
- context: [userContext],
});
});
- it('should call page method with combined standard and experiment contexts', () => {
- const mockStandardContext = window.gl.snowplowStandardContext;
- const userContext = {
- schema: USER_CONTEXT_SCHEMA,
- data: mockStandardContext?.data,
- };
+ it('should call setDocumentTitle and page methods with default data when window.gl is undefined', () => {
+ window.gl = undefined;
InternalEvents.initBrowserSDK();
+ expect(window.glClient.setDocumentTitle).toHaveBeenCalledWith('GitLab');
expect(window.glClient.page).toHaveBeenCalledWith({
title: 'GitLab',
- context: [userContext],
});
});
+ });
- it('should call setDocumentTitle and page methods with default data when window.gl is undefined', () => {
- window.gl = undefined;
+ describe('trackBrowserSDK', () => {
+ beforeEach(() => {
+ window.glClient = {
+ track: jest.fn(),
+ };
+ });
- InternalEvents.initBrowserSDK();
+ it('should not call glClient.track if Tracker is not enabled', () => {
+ Tracker.enabled.mockReturnValue(false);
- expect(window.glClient.setDocumentTitle).toHaveBeenCalledWith('GitLab');
- expect(window.glClient.page).toHaveBeenCalledWith({
- title: 'GitLab',
- context: [
- {
- schema: USER_CONTEXT_SCHEMA,
- data: {
- google_analytics_id: '',
- source: 'gitlab-javascript',
- extra: {},
- },
- },
- ],
- });
+ InternalEvents.trackBrowserSDK(event);
+
+ expect(window.glClient.track).not.toHaveBeenCalled();
+ });
+
+ it('should call glClient.track with correct arguments if Tracker is enabled', () => {
+ Tracker.enabled.mockReturnValue(true);
+
+ InternalEvents.trackBrowserSDK(event);
+
+ expect(window.glClient.track).toHaveBeenCalledTimes(1);
+ expect(window.glClient.track).toHaveBeenCalledWith(event);
});
});
});
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index 5aae922fec2..0d8e3275aa5 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -147,6 +147,7 @@ export const createInputsModelExpectation = (users) =>
name: user.name,
show_status: user.show_status.toString(),
state: user.state,
+ locked: user.locked.toString(),
username: user.username,
web_url: user.web_url,
},
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
index de2faa09438..be4a45639cf 100644
--- a/spec/frontend/vue_alerts_spec.js
+++ b/spec/frontend/vue_alerts_spec.js
@@ -1,4 +1,5 @@
import { nextTick } from 'vue';
+import { alertVariantOptions } from '@gitlab/ui/dist/utils/constants';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import initVueAlerts from '~/vue_alerts';
@@ -55,7 +56,11 @@ describe('VueAlerts', () => {
primaryButtonText: alert.querySelector('.gl-alert-action').textContent.trim(),
primaryButtonLink: alert.querySelector('.gl-alert-action').href,
variant: [...alert.classList]
- .find((x) => x.match(/gl-alert-(?!not-dismissible)/))
+ .find((cssClass) => {
+ return Object.values(alertVariantOptions).some(
+ (variant) => cssClass === `gl-alert-${variant}`,
+ );
+ })
.replace('gl-alert-', ''),
});
diff --git a/spec/frontend/vue_merge_request_widget/components/checks/conflicts_spec.js b/spec/frontend/vue_merge_request_widget/components/checks/conflicts_spec.js
new file mode 100644
index 00000000000..57dcd2fd819
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/checks/conflicts_spec.js
@@ -0,0 +1,90 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ConflictsComponent from '~/vue_merge_request_widget/components/checks/conflicts.vue';
+import conflictsStateQuery from '~/vue_merge_request_widget/queries/states/conflicts.query.graphql';
+
+Vue.use(VueApollo);
+
+let wrapper;
+let apolloProvider;
+
+function factory({
+ result = 'passed',
+ canMerge = true,
+ pushToSourceBranch = true,
+ shouldBeRebased = false,
+ sourceBranchProtected = false,
+ mr = {},
+} = {}) {
+ apolloProvider = createMockApollo([
+ [
+ conflictsStateQuery,
+ jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ id: 1,
+ mergeRequest: {
+ id: 1,
+ shouldBeRebased,
+ sourceBranchProtected,
+ userPermissions: { canMerge, pushToSourceBranch },
+ },
+ },
+ },
+ }),
+ ],
+ ]);
+
+ wrapper = mountExtended(ConflictsComponent, {
+ apolloProvider,
+ propsData: {
+ mr,
+ check: { result, failureReason: 'Conflicts message' },
+ },
+ });
+}
+
+describe('Merge request merge checks conflicts component', () => {
+ afterEach(() => {
+ apolloProvider = null;
+ });
+
+ it('renders failure reason text', () => {
+ factory();
+
+ expect(wrapper.text()).toEqual('Conflicts message');
+ });
+
+ it.each`
+ conflictResolutionPath | pushToSourceBranch | sourceBranchProtected | rendersConflictButton | rendersConflictButtonText
+ ${'https://gitlab.com'} | ${true} | ${false} | ${true} | ${'renders'}
+ ${undefined} | ${true} | ${false} | ${false} | ${'does not render'}
+ ${'https://gitlab.com'} | ${false} | ${false} | ${false} | ${'does not render'}
+ ${'https://gitlab.com'} | ${true} | ${true} | ${false} | ${'does not render'}
+ ${'https://gitlab.com'} | ${false} | ${false} | ${false} | ${'does not render'}
+ ${undefined} | ${false} | ${false} | ${false} | ${'does not render'}
+ `(
+ '$rendersConflictButtonText the conflict button for $conflictResolutionPath $pushToSourceBranch $sourceBranchProtected $rendersConflictButton',
+ async ({
+ conflictResolutionPath,
+ pushToSourceBranch,
+ sourceBranchProtected,
+ rendersConflictButton,
+ }) => {
+ factory({ mr: { conflictResolutionPath }, pushToSourceBranch, sourceBranchProtected });
+
+ await waitForPromises();
+
+ expect(wrapper.findAllByTestId('extension-actions-button').length).toBe(
+ rendersConflictButton ? 2 : 1,
+ );
+
+ expect(wrapper.findAllByTestId('extension-actions-button').at(-1).text()).toBe(
+ rendersConflictButton ? 'Resolve conflicts' : 'Resolve locally',
+ );
+ },
+ );
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/checks/message_spec.js b/spec/frontend/vue_merge_request_widget/components/checks/message_spec.js
new file mode 100644
index 00000000000..4446eb7324b
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/checks/message_spec.js
@@ -0,0 +1,30 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import MessageComponent from '~/vue_merge_request_widget/components/checks/message.vue';
+import StatusIcon from '~/vue_merge_request_widget/components/widget/status_icon.vue';
+
+let wrapper;
+
+function factory(propsData = {}) {
+ wrapper = mountExtended(MessageComponent, {
+ propsData,
+ });
+}
+
+describe('Merge request merge checks message component', () => {
+ it('renders failure reason text', () => {
+ factory({ check: { result: 'passed', failureReason: 'Failed message' } });
+
+ expect(wrapper.text()).toEqual('Failed message');
+ });
+
+ it.each`
+ result | icon
+ ${'passed'} | ${'success'}
+ ${'failed'} | ${'failed'}
+ ${'allowed_to_fail'} | ${'neutral'}
+ `('renders $icon icon for $result result', ({ result, icon }) => {
+ factory({ check: { result, failureReason: 'Failed message' } });
+
+ expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe(icon);
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
new file mode 100644
index 00000000000..c86fe6d0a10
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/merge_checks_spec.js
@@ -0,0 +1,92 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import MergeChecksComponent from '~/vue_merge_request_widget/components/merge_checks.vue';
+import mergeChecksQuery from '~/vue_merge_request_widget/queries/merge_checks.query.graphql';
+import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
+
+Vue.use(VueApollo);
+
+let wrapper;
+let apolloProvider;
+
+function factory({ canMerge = true, mergeChecks = [] } = {}) {
+ apolloProvider = createMockApollo([
+ [
+ mergeChecksQuery,
+ jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ id: 1,
+ mergeRequest: { id: 1, userPermissions: { canMerge }, mergeChecks },
+ },
+ },
+ }),
+ ],
+ ]);
+
+ wrapper = mountExtended(MergeChecksComponent, {
+ apolloProvider,
+ propsData: {
+ mr: {},
+ },
+ });
+}
+
+describe('Merge request merge checks component', () => {
+ afterEach(() => {
+ apolloProvider = null;
+ });
+
+ it('renders ready to merge text if user can merge', async () => {
+ factory({ canMerge: true });
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('Ready to merge!');
+ });
+
+ it('renders ready to merge by members text if user can not merge', async () => {
+ factory({ canMerge: false });
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('Ready to merge by members who can write to the target branch.');
+ });
+
+ it.each`
+ mergeChecks | text
+ ${[{ identifier: 'discussions', result: 'failed' }]} | ${'Merge blocked: 1 check failed'}
+ ${[{ identifier: 'discussions', result: 'failed' }, { identifier: 'rebase', result: 'failed' }]} | ${'Merge blocked: 2 checks failed'}
+ `('renders $text for $mergeChecks', async ({ mergeChecks, text }) => {
+ factory({ mergeChecks });
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe(text);
+ });
+
+ it.each`
+ result | statusIcon
+ ${'failed'} | ${'failed'}
+ ${'passed'} | ${'success'}
+ `('renders $statusIcon for $result result', async ({ result, statusIcon }) => {
+ factory({ mergeChecks: [{ result, identifier: 'discussions' }] });
+
+ await waitForPromises();
+
+ expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe(statusIcon);
+ });
+
+ it('expands collapsed area', async () => {
+ factory();
+
+ await waitForPromises();
+
+ await wrapper.findByTestId('widget-toggle').trigger('click');
+
+ expect(wrapper.findByTestId('merge-checks-full').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/action_buttons_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/action_buttons_spec.js
index adefce9060c..86e3922ec8b 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/action_buttons_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/action_buttons_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDropdownItem } from '@gitlab/ui';
+import { GlButton, GlDisclosureDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Actions from '~/vue_merge_request_widget/components/widget/action_buttons.vue';
@@ -37,7 +37,7 @@ describe('~/vue_merge_request_widget/components/widget/action_buttons.vue', () =
tertiaryButtons: [{ text: 'hello world', href: 'https://gitlab.com', target: '_blank' }],
});
- expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(1);
+ expect(wrapper.findAllComponents(GlDisclosureDropdown)).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index 09f58f17fd9..eb3d624dc04 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -21,7 +21,7 @@ import {
registerExtension,
registeredExtensions,
} from '~/vue_merge_request_widget/components/extensions';
-import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
+import { STATUS_CLOSED, STATUS_OPEN, STATUS_MERGED } from '~/issues/constants';
import { STATE_QUERY_POLLING_INTERVAL_BACKOFF } from '~/vue_merge_request_widget/constants';
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -30,6 +30,7 @@ import Approvals from '~/vue_merge_request_widget/components/approvals/approvals
import ConflictsState from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
import Preparing from '~/vue_merge_request_widget/components/states/mr_widget_preparing.vue';
import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue';
+import MergedState from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue';
import WidgetContainer from '~/vue_merge_request_widget/components/widget/app.vue';
import WidgetSuggestPipeline from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
import MrWidgetAlertMessage from '~/vue_merge_request_widget/components/mr_widget_alert_message.vue';
@@ -78,23 +79,13 @@ describe('MrWidgetOptions', () => {
const COLLABORATION_MESSAGE = 'Members who can merge are allowed to add commits';
- const setInitialData = (data) => {
- gl.mrWidgetData = { ...mockData, ...data };
- mock
- .onGet(mockData.merge_request_widget_path)
- .reply(() => [HTTP_STATUS_OK, { ...mockData, ...data }]);
- mock
- .onGet(mockData.merge_request_cached_widget_path)
- .reply(() => [HTTP_STATUS_OK, { ...mockData, ...data }]);
- };
-
const createComponent = ({
updatedMrData = {},
options = {},
data = {},
mountFn = shallowMountExtended,
} = {}) => {
- setInitialData(updatedMrData);
+ gl.mrWidgetData = { ...mockData, ...updatedMrData };
const mrData = { ...mockData, ...updatedMrData };
const mockedApprovalsSubscription = createMockApolloSubscription();
queryResponse = {
@@ -172,8 +163,10 @@ describe('MrWidgetOptions', () => {
const findWidgetContainer = () => wrapper.findComponent(WidgetContainer);
beforeEach(() => {
- gon.features = { asyncMrWidget: true };
+ gon.features = {};
mock = new MockAdapter(axios);
+ mock.onGet(mockData.merge_request_widget_path).reply(HTTP_STATUS_OK, {});
+ mock.onGet(mockData.merge_request_cached_widget_path).reply(HTTP_STATUS_OK, {});
});
afterEach(() => {
@@ -186,25 +179,13 @@ describe('MrWidgetOptions', () => {
describe('default', () => {
describe('computed', () => {
describe('componentName', () => {
- beforeEach(async () => {
- await createComponent();
- });
-
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/409365
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip.each`
- ${'merged'} | ${'mr-widget-merged'}
- `('should translate $state into $componentName', ({ state, componentName }) => {
- wrapper.vm.mr.state = state;
-
- expect(wrapper.vm.componentName).toEqual(componentName);
- });
-
it.each`
state | componentName | component
+ ${STATUS_MERGED} | ${'MergedState'} | ${MergedState}
${'conflicts'} | ${'ConflictsState'} | ${ConflictsState}
${'shaMismatch'} | ${'ShaMismatch'} | ${ShaMismatch}
`('should translate $state into $componentName component', async ({ state, component }) => {
+ await createComponent();
Vue.set(wrapper.vm.mr, 'state', state);
await nextTick();
expect(wrapper.findComponent(component).exists()).toBe(true);
@@ -336,13 +317,23 @@ describe('MrWidgetOptions', () => {
describe('methods', () => {
describe('checkStatus', () => {
+ const updatedMrData = { foo: 1 };
+ beforeEach(() => {
+ mock
+ .onGet(mockData.merge_request_widget_path)
+ .reply(HTTP_STATUS_OK, { ...mockData, ...updatedMrData });
+ mock
+ .onGet(mockData.merge_request_cached_widget_path)
+ .reply(HTTP_STATUS_OK, { ...mockData, ...updatedMrData });
+ });
+
it('checks the status of the pipelines', async () => {
const callback = jest.fn();
- await createComponent({ updatedMrData: { foo: 1 } });
+ await createComponent({ updatedMrData });
await waitForPromises();
eventHub.$emit('MRWidgetUpdateRequested', callback);
await waitForPromises();
- expect(callback).toHaveBeenCalledWith(expect.objectContaining({ foo: 1 }));
+ expect(callback).toHaveBeenCalledWith(expect.objectContaining(updatedMrData));
});
it('notifies the user of the pipeline status', async () => {
@@ -515,29 +506,42 @@ describe('MrWidgetOptions', () => {
});
describe('handleNotification', () => {
+ const updatedMrData = { gitlabLogo: 'logo.png' };
beforeEach(() => {
jest.spyOn(notify, 'notifyMe').mockImplementation(() => {});
});
- it('should call notifyMe', async () => {
- const logoFilename = 'logo.png';
- await createComponent({ updatedMrData: { gitlabLogo: logoFilename } });
- expect(notify.notifyMe).toHaveBeenCalledWith(
- `Pipeline passed`,
- `Pipeline passed for "${mockData.title}"`,
- logoFilename,
- );
- });
+ describe('when pipeline has passed', () => {
+ beforeEach(() => {
+ mock
+ .onGet(mockData.merge_request_widget_path)
+ .reply(HTTP_STATUS_OK, { ...mockData, ...updatedMrData });
+ mock
+ .onGet(mockData.merge_request_cached_widget_path)
+ .reply(HTTP_STATUS_OK, { ...mockData, ...updatedMrData });
+ });
- it('should not call notifyMe if the status has not changed', async () => {
- await createComponent({ updatedMrData: { ci_status: undefined } });
- await eventHub.$emit('MRWidgetUpdateRequested');
- expect(notify.notifyMe).not.toHaveBeenCalled();
+ it('should call notifyMe', async () => {
+ await createComponent({ updatedMrData });
+ expect(notify.notifyMe).toHaveBeenCalledWith(
+ `Pipeline passed`,
+ `Pipeline passed for "${mockData.title}"`,
+ updatedMrData.gitlabLogo,
+ );
+ });
});
- it('should not notify if no pipeline provided', async () => {
- await createComponent({ updatedMrData: { pipeline: undefined } });
- expect(notify.notifyMe).not.toHaveBeenCalled();
+ describe('when pipeline has not passed', () => {
+ it('should not call notifyMe if the status has not changed', async () => {
+ await createComponent({ updatedMrData: { ci_status: undefined } });
+ await eventHub.$emit('MRWidgetUpdateRequested');
+ expect(notify.notifyMe).not.toHaveBeenCalled();
+ });
+
+ it('should not notify if no pipeline provided', async () => {
+ await createComponent({ updatedMrData: { pipeline: undefined } });
+ expect(notify.notifyMe).not.toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/vue_shared/alert_details/alert_status_spec.js b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
index 90d29f0bfd4..478df81a966 100644
--- a/spec/frontend/vue_shared/alert_details/alert_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import updateAlertStatusMutation from '~/graphql_shared//mutations/alert_status_update.mutation.graphql';
@@ -34,13 +34,13 @@ describe('AlertManagementStatus', () => {
},
});
- const findStatusDropdown = () => wrapper.findComponent(GlDropdown);
- const findFirstStatusOption = () => findStatusDropdown().findComponent(GlDropdownItem);
- const findAllStatusOptions = () => findStatusDropdown().findAllComponents(GlDropdownItem);
- const findStatusDropdownHeader = () => wrapper.findByTestId('dropdown-header');
+ const findStatusDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findFirstStatusOption = () => findStatusDropdown().findComponent(GlListboxItem);
+ const findAllStatusOptions = () => findStatusDropdown().findAllComponents(GlListboxItem);
+ const findStatusDropdownHeader = () => wrapper.findByTestId('listbox-header-text');
const selectFirstStatusOption = () => {
- findFirstStatusOption().vm.$emit('click');
+ findFirstStatusOption().vm.$emit('select', new Event('click'));
return waitForPromises();
};
@@ -57,7 +57,7 @@ describe('AlertManagementStatus', () => {
provide = {},
handler = mockUpdatedMutationResult(),
} = {}) {
- wrapper = shallowMountExtended(AlertManagementStatus, {
+ wrapper = mountExtended(AlertManagementStatus, {
apolloProvider: createMockApolloProvider(handler),
propsData: {
alert: { ...mockAlert },
@@ -82,7 +82,7 @@ describe('AlertManagementStatus', () => {
it('shows the dropdown', () => {
mountComponent({ props: { isSidebar: true, isDropdownShowing: true } });
- expect(wrapper.classes()).toContain('show');
+ expect(wrapper.classes()).not.toContain('gl-display-none');
});
});
@@ -92,8 +92,7 @@ describe('AlertManagementStatus', () => {
});
it('calls `$apollo.mutate` with `updateAlertStatus` mutation and variables containing `iid`, `status`, & `projectPath`', async () => {
- findFirstStatusOption().vm.$emit('click');
- await waitForPromises();
+ await selectFirstStatusOption();
expect(requestHandler).toHaveBeenCalledWith({
iid,
@@ -194,9 +193,7 @@ describe('AlertManagementStatus', () => {
handler: mockUpdatedMutationResult({ nodes: mockAlerts }),
});
Tracking.event.mockClear();
- findFirstStatusOption().vm.$emit('click');
-
- await waitForPromises();
+ await selectFirstStatusOption();
const status = findFirstStatusOption().text();
const { category, action, label } = trackAlertStatusUpdateOptions;
diff --git a/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
index 359aaacde0b..499a971d791 100644
--- a/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
+++ b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
@@ -2,22 +2,15 @@
exports[`Beta badge component renders the badge 1`] = `
<div>
- <gl-badge-stub
- class="gl-cursor-pointer"
+ <a
+ class="badge badge-neutral badge-pill gl-badge gl-cursor-pointer md"
href="#"
- iconsize="md"
- size="md"
- variant="neutral"
+ target="_self"
>
Beta
- </gl-badge-stub>
- <gl-popover-stub
- cssclasses=""
- data-testid="beta-badge"
- showclosebutton="true"
- target="[Function]"
- title="What's Beta?"
- triggers="hover focus click"
+ </a>
+ <div
+ class="gl-popover"
>
<p>
A Beta feature is not production-ready, but is unlikely to change drastically before it's released. We encourage users to try Beta features and provide feedback.
@@ -43,6 +36,6 @@ exports[`Beta badge component renders the badge 1`] = `
Is complete or near completion.
</li>
</ul>
- </gl-popover-stub>
+ </div>
</div>
`;
diff --git a/spec/frontend/vue_shared/components/badges/__snapshots__/experiment_badge_spec.js.snap b/spec/frontend/vue_shared/components/badges/__snapshots__/experiment_badge_spec.js.snap
new file mode 100644
index 00000000000..4ad70338f3c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/badges/__snapshots__/experiment_badge_spec.js.snap
@@ -0,0 +1,41 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Experiment badge component renders the badge 1`] = `
+<div>
+ <a
+ class="badge badge-neutral badge-pill gl-badge gl-cursor-pointer md"
+ href="#"
+ target="_self"
+ >
+ Experiment
+ </a>
+ <div
+ class="gl-popover"
+ >
+ <p>
+ An Experiment is a feature that's in the process of being developed. It's not production-ready. We encourage users to try Experimental features and provide feedback.
+ </p>
+ <p
+ class="gl-mb-0"
+ >
+ An Experiment:
+ </p>
+ <ul
+ class="gl-pl-4"
+ >
+ <li>
+ May be unstable.
+ </li>
+ <li>
+ Can cause data loss.
+ </li>
+ <li>
+ Has no support and might not be documented.
+ </li>
+ <li>
+ Can be removed at any time.
+ </li>
+ </ul>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/badges/beta_badge_spec.js b/spec/frontend/vue_shared/components/badges/beta_badge_spec.js
index c930c6d5708..d826ca5c7c0 100644
--- a/spec/frontend/vue_shared/components/badges/beta_badge_spec.js
+++ b/spec/frontend/vue_shared/components/badges/beta_badge_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { GlBadge } from '@gitlab/ui';
import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
@@ -7,7 +7,7 @@ describe('Beta badge component', () => {
const findBadge = () => wrapper.findComponent(GlBadge);
const createWrapper = (props = {}) => {
- wrapper = shallowMount(BetaBadge, {
+ wrapper = mount(BetaBadge, {
propsData: { ...props },
});
};
diff --git a/spec/frontend/vue_shared/components/badges/experiment_badge_spec.js b/spec/frontend/vue_shared/components/badges/experiment_badge_spec.js
new file mode 100644
index 00000000000..3239578a173
--- /dev/null
+++ b/spec/frontend/vue_shared/components/badges/experiment_badge_spec.js
@@ -0,0 +1,32 @@
+import { mount } from '@vue/test-utils';
+import { GlBadge } from '@gitlab/ui';
+import ExperimentBadge from '~/vue_shared/components/badges/experiment_badge.vue';
+
+describe('Experiment badge component', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const createWrapper = (props = {}) => {
+ wrapper = mount(ExperimentBadge, {
+ propsData: { ...props },
+ });
+ };
+
+ it('renders the badge', () => {
+ createWrapper();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('passes default size to badge', () => {
+ createWrapper();
+
+ expect(findBadge().props('size')).toBe('md');
+ });
+
+ it('passes given size to badge', () => {
+ createWrapper({ size: 'sm' });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/badges/hover_badge_spec.js b/spec/frontend/vue_shared/components/badges/hover_badge_spec.js
new file mode 100644
index 00000000000..68f368215c0
--- /dev/null
+++ b/spec/frontend/vue_shared/components/badges/hover_badge_spec.js
@@ -0,0 +1,50 @@
+import { mount } from '@vue/test-utils';
+import { GlBadge, GlPopover } from '@gitlab/ui';
+import HoverBadge from '~/vue_shared/components/badges/hover_badge.vue';
+
+describe('Hover badge component', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findPopover = () => wrapper.findComponent(GlPopover);
+ const createWrapper = ({ props = {}, slots } = {}) => {
+ wrapper = mount(HoverBadge, {
+ propsData: {
+ label: 'Label',
+ title: 'Title',
+ ...props,
+ },
+ slots,
+ });
+ };
+
+ it('passes label to popover', () => {
+ createWrapper();
+
+ expect(findBadge().text()).toBe('Label');
+ });
+
+ it('passes title to popover', () => {
+ createWrapper();
+
+ expect(findPopover().props('title')).toBe('Title');
+ });
+
+ it('renders the default slot', () => {
+ createWrapper({ slots: { default: '<p>This is an awesome content</p>' } });
+
+ expect(findPopover().text()).toContain('This is an awesome content');
+ });
+
+ it('passes default size to badge', () => {
+ createWrapper();
+
+ expect(findBadge().props('size')).toBe('md');
+ });
+
+ it('passes given size to badge', () => {
+ createWrapper({ props: { size: 'sm' } });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index eadcd452929..c1109f21b47 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -60,6 +60,7 @@ describe('Blob Rich Viewer component', () => {
expect(wrapper.text()).toContain('Line: 10');
expect(wrapper.text()).toContain('Line: 50');
expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
+ expect(handleLocationHash).toHaveBeenCalled();
expect(findMarkdownFieldView().props('isLoading')).toBe(false);
});
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index c74964c13f5..e1660225a5c 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -149,4 +149,10 @@ describe('CI Badge Link Component', () => {
expect(findBadge().props('size')).toBe('lg');
});
+
+ it('should have class `gl-px-2` when `showText` is false', () => {
+ createComponent({ status: statuses.success, size: 'md', showText: false });
+
+ expect(findBadge().classes()).toContain('gl-px-2');
+ });
});
diff --git a/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js
index e0dfa084f3e..341afa03f80 100644
--- a/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js
+++ b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js
@@ -6,11 +6,11 @@ describe('Clone Dropdown Button', () => {
let wrapper;
const link = 'ssh://foo.bar';
const label = 'SSH';
- const qaSelector = 'some-selector';
+ const testId = 'some-selector';
const defaultPropsData = {
link,
label,
- qaSelector,
+ testId,
};
const findCopyButton = () => wrapper.findComponent(GlButton);
@@ -46,7 +46,7 @@ describe('Clone Dropdown Button', () => {
});
it('sets the qa selector', () => {
- expect(findCopyButton().attributes('data-qa-selector')).toBe(qaSelector);
+ expect(findCopyButton().attributes('data-testid')).toBe(testId);
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index a22ad4c450e..7c9f3a3546a 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -97,6 +97,19 @@ export const projectMilestonesResponse = {
},
};
+export const projectUsersResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ attributes: {
+ nodes: mockUsers,
+ __typename: 'UserConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
export const mockCrmContacts = [
{
__typename: 'CustomerRelationsContact',
@@ -247,8 +260,8 @@ export const mockAuthorToken = {
symbol: '@',
token: UserToken,
operators: OPERATORS_IS,
- fetchPath: 'gitlab-org/gitlab-test',
- fetchUsers: Api.projectUsers.bind(Api),
+ fullPath: 'gitlab-org/gitlab-test',
+ isProject: true,
};
export const mockLabelToken = {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index 63eacaabd0c..72e3475df75 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -420,6 +420,12 @@ describe('BaseToken', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
+ it('renders `footer` slot when present', () => {
+ wrapper = createComponent({ slots: { footer: "<div class='custom-footer' />" } });
+
+ expect(wrapper.find('.custom-footer').exists()).toBe(true);
+ });
+
describe('events', () => {
describe('when activeToken has been selected', () => {
beforeEach(() => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
index e4ca7dcb19a..0229d00eb91 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/user_token_spec.js
@@ -6,16 +6,21 @@ import {
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
+import usersAutocompleteQuery from '~/graphql_shared/queries/users_autocomplete.query.graphql';
import { OPTIONS_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import UserToken from '~/vue_shared/components/filtered_search_bar/tokens/user_token.vue';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-import { mockAuthorToken, mockUsers } from '../mock_data';
+import { mockAuthorToken, mockUsers, projectUsersResponse } from '../mock_data';
+
+Vue.use(VueApollo);
jest.mock('~/alert');
const defaultStubs = {
@@ -37,6 +42,9 @@ const mockPreloadedUsers = [
},
];
+const usersQueryHandler = jest.fn().mockResolvedValue(projectUsersResponse);
+const mockApollo = createMockApollo([[usersAutocompleteQuery, usersQueryHandler]]);
+
function createComponent(options = {}) {
const {
config = mockAuthorToken,
@@ -47,6 +55,7 @@ function createComponent(options = {}) {
listeners = {},
} = options;
return mount(UserToken, {
+ apolloProvider: mockApollo,
propsData: {
config,
value,
@@ -145,6 +154,33 @@ describe('UserToken', () => {
expect(findBaseToken().props('suggestionsLoading')).toBe(false);
});
});
+
+ describe('default - when fetchMilestones function is not provided in config', () => {
+ beforeEach(() => {
+ wrapper = createComponent({});
+ return triggerFetchUsers();
+ });
+
+ it('calls searchMilestonesQuery to fetch milestones', () => {
+ expect(usersQueryHandler).toHaveBeenCalledWith({
+ fullPath: mockAuthorToken.fullPath,
+ isProject: mockAuthorToken.isProject,
+ search: null,
+ });
+ });
+
+ it('calls searchMilestonesQuery with search parameter when provided', async () => {
+ const searchTerm = 'foo';
+
+ await triggerFetchUsers(searchTerm);
+
+ expect(usersQueryHandler).toHaveBeenCalledWith({
+ fullPath: mockAuthorToken.fullPath,
+ isProject: mockAuthorToken.isProject,
+ search: searchTerm,
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
index eee85ce4fd3..72a0eb98a07 100644
--- a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
+++ b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
@@ -363,13 +363,13 @@ describe('InputCopyToggleVisibility', () => {
it('passes no `size` prop', () => {
createComponent();
- expect(findFormInput().props('size')).toBe(null);
+ expect(findFormInput().props('width')).toBe(null);
});
it('passes `size` prop to the input', () => {
createComponent({ props: { size: 'md' } });
- expect(findFormInput().props('size')).toBe('md');
+ expect(findFormInput().props('width')).toBe('md');
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/editor_mode_switcher_spec.js b/spec/frontend/vue_shared/components/markdown/editor_mode_switcher_spec.js
index 712e78458c6..57f54f7e7d3 100644
--- a/spec/frontend/vue_shared/components/markdown/editor_mode_switcher_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/editor_mode_switcher_spec.js
@@ -1,41 +1,22 @@
import { nextTick } from 'vue';
-import { GlButton, GlLink, GlPopover } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import EditorModeSwitcher from '~/vue_shared/components/markdown/editor_mode_switcher.vue';
-import { counter } from '~/vue_shared/components/markdown/utils';
-import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser.vue';
-import { stubComponent } from 'helpers/stub_component';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-jest.mock('~/vue_shared/components/markdown/utils', () => ({
- counter: jest.fn().mockReturnValue(0),
-}));
-
describe('vue_shared/component/markdown/editor_mode_switcher', () => {
let wrapper;
useLocalStorageSpy();
- const createComponent = ({
- value,
- userCalloutDismisserSlotProps = { dismiss: jest.fn() },
- } = {}) => {
+ const createComponent = ({ value } = {}) => {
wrapper = mount(EditorModeSwitcher, {
propsData: {
value,
},
- stubs: {
- UserCalloutDismisser: stubComponent(UserCalloutDismisser, {
- render() {
- return this.$scopedSlots.default(userCalloutDismisserSlotProps);
- },
- }),
- },
});
};
const findSwitcherButton = () => wrapper.findComponent(GlButton);
- const findUserCalloutDismisser = () => wrapper.findComponent(UserCalloutDismisser);
- const findCalloutPopover = () => wrapper.findComponent(GlPopover);
describe.each`
value | buttonText
@@ -54,62 +35,7 @@ describe('vue_shared/component/markdown/editor_mode_switcher', () => {
await nextTick();
findSwitcherButton().vm.$emit('click');
- expect(wrapper.emitted().switch).toEqual([[false]]);
- });
- });
-
- describe('rich text editor callout', () => {
- let dismiss;
-
- beforeEach(() => {
- dismiss = jest.fn();
- createComponent({ value: 'markdown', userCalloutDismisserSlotProps: { dismiss } });
- });
-
- it('does not skip the user_callout_dismisser query', () => {
- expect(findUserCalloutDismisser().props()).toMatchObject({
- skipQuery: false,
- featureName: 'rich_text_editor',
- });
- });
-
- it('mounts new rich text editor popover', () => {
- expect(findCalloutPopover().props()).toMatchObject({
- showCloseButton: '',
- triggers: 'manual',
- target: 'switch-to-rich-text-editor',
- });
- });
-
- it('dismisses the callout and emits "switch" event when popover close button is clicked', async () => {
- await findCalloutPopover().findComponent(GlLink).vm.$emit('click');
-
- expect(wrapper.emitted().switch).toEqual([[true]]);
- expect(dismiss).toHaveBeenCalled();
- });
-
- it('dismisses the callout when action button is clicked', () => {
- findSwitcherButton().vm.$emit('click');
-
- expect(dismiss).toHaveBeenCalled();
- });
-
- it('does not show the callout if rich text is already enabled', async () => {
- await wrapper.setProps({ value: 'richText' });
-
- expect(findCalloutPopover().props()).toMatchObject({
- show: false,
- });
- });
-
- it('does not show the callout if already displayed once on the page', () => {
- counter.mockReturnValue(1);
-
- createComponent({ value: 'markdown' });
-
- expect(findCalloutPopover().props()).toMatchObject({
- show: false,
- });
+ expect(wrapper.emitted().switch).toEqual([[]]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
index c69b18bca88..b4c90fe49d1 100644
--- a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -7,6 +7,8 @@ import {
EDITING_MODE_MARKDOWN_FIELD,
EDITING_MODE_CONTENT_EDITOR,
CLEAR_AUTOSAVE_ENTRY_EVENT,
+ CONTENT_EDITOR_READY_EVENT,
+ MARKDOWN_EDITOR_READY_EVENT,
} from '~/vue_shared/constants';
import markdownEditorEventHub from '~/vue_shared/components/markdown/eventhub';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
@@ -83,22 +85,23 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const findContentEditor = () => {
const result = wrapper.findComponent(ContentEditor);
-
// In Vue.js 3 there are nuances stubbing component with custom stub on mount
// So we try to search for stub also
return result.exists() ? result : wrapper.findComponent(ContentEditorStub);
};
- const enableContentEditor = async () => {
- findMarkdownField().vm.$emit('enableContentEditor');
- await nextTick();
- await waitForPromises();
+ const enableContentEditor = () => {
+ return new Promise((resolve) => {
+ markdownEditorEventHub.$once(CONTENT_EDITOR_READY_EVENT, resolve);
+ findMarkdownField().vm.$emit('enableContentEditor');
+ });
};
- const enableMarkdownEditor = async () => {
- findContentEditor().vm.$emit('enableMarkdownEditor');
- await nextTick();
- await waitForPromises();
+ const enableMarkdownEditor = () => {
+ return new Promise((resolve) => {
+ markdownEditorEventHub.$once(MARKDOWN_EDITOR_READY_EVENT, resolve);
+ findContentEditor().vm.$emit('enableMarkdownEditor');
+ });
};
beforeEach(() => {
@@ -128,9 +131,7 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
});
});
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/412618
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('passes render_quick_actions param to renderMarkdownPath if quick actions are enabled', async () => {
+ it('passes render_quick_actions param to renderMarkdownPath if quick actions are enabled', async () => {
buildWrapper({ propsData: { supportsQuickActions: true } });
await enableContentEditor();
@@ -139,9 +140,7 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(mock.history.post[0].url).toContain(`render_quick_actions=true`);
});
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/411565
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('does not pass render_quick_actions param to renderMarkdownPath if quick actions are disabled', async () => {
+ it('does not pass render_quick_actions param to renderMarkdownPath if quick actions are disabled', async () => {
buildWrapper({ propsData: { supportsQuickActions: false } });
await enableContentEditor();
@@ -213,9 +212,7 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(findMarkdownField().find('textarea').attributes('disabled')).toBe(undefined);
});
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/404734
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('disables content editor when disabled prop is true', async () => {
+ it('disables content editor when disabled prop is true', async () => {
buildWrapper({ propsData: { disabled: true } });
await enableContentEditor();
@@ -358,9 +355,7 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
});
it(`emits ${EDITING_MODE_MARKDOWN_FIELD} event when enableMarkdownEditor emitted from content editor`, async () => {
- buildWrapper({
- stubs: { ContentEditor: ContentEditorStub },
- });
+ buildWrapper();
await enableContentEditor();
await enableMarkdownEditor();
@@ -494,12 +489,62 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(findContentEditor().props().autofocus).toBe(false);
});
- it('bubbles up keydown event', () => {
- const event = new Event('keydown');
+ describe('when keydown event is fired', () => {
+ let event;
+ beforeEach(() => {
+ event = new Event('keydown');
+ window.getSelection = jest.fn(() => ({
+ toString: jest.fn(() => 'test'),
+ removeAllRanges: jest.fn(),
+ }));
+ Object.assign(event, { preventDefault: jest.fn() });
+ });
+ it('bubbles up keydown event', () => {
+ findContentEditor().vm.$emit('keydown', event);
+
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ });
+
+ it('bubbles up keydown event for meta key with default behaviour intact', () => {
+ event.metaKey = true;
+ findContentEditor().vm.$emit('keydown', event);
- findContentEditor().vm.$emit('keydown', event);
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ expect(event.preventDefault).toHaveBeenCalledTimes(0);
+ });
+
+ it('bubbles up keydown event for meta + k key on selected text with default behaviour prevented', () => {
+ event.metaKey = true;
+ event.key = 'k';
+ findContentEditor().vm.$emit('keydown', event);
+
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ expect(event.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('bubbles up keydown event for meta + k key without text selection with default behaviour prevented', () => {
+ event.metaKey = true;
+ event.key = 'k';
+ window.getSelection = jest.fn(() => ({
+ toString: jest.fn(() => ''),
+ removeAllRanges: jest.fn(),
+ }));
+
+ findContentEditor().vm.$emit('keydown', event);
- expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ expect(event.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('bubbles up keydown event for meta + non-k key with default behaviour intact', () => {
+ event.metaKey = true;
+ event.key = 'l';
+
+ findContentEditor().vm.$emit('keydown', event);
+
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ expect(event.preventDefault).toHaveBeenCalledTimes(0);
+ });
});
describe(`when richText editor triggers enableMarkdownEditor event`, () => {
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
index 90d8ce3b500..59f01b7ff7f 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
@@ -3,7 +3,6 @@ import Toolbar from '~/vue_shared/components/markdown/toolbar.vue';
import EditorModeSwitcher from '~/vue_shared/components/markdown/editor_mode_switcher.vue';
import { updateText } from '~/lib/utils/text_markdown';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { PROMO_URL } from 'jh_else_ce/lib/utils/url_utility';
jest.mock('~/lib/utils/text_markdown');
@@ -83,28 +82,5 @@ describe('toolbar', () => {
expect(wrapper.emitted('enableContentEditor')).toEqual([[]]);
expect(updateText).not.toHaveBeenCalled();
});
-
- it('does not insert a template text if textarea has some value', () => {
- wrapper.findComponent(EditorModeSwitcher).vm.$emit('switch', true);
-
- expect(updateText).not.toHaveBeenCalled();
- });
-
- it('inserts a "getting started with rich text" template when switched for the first time', () => {
- document.querySelector('textarea').value = '';
-
- wrapper.findComponent(EditorModeSwitcher).vm.$emit('switch', true);
-
- expect(updateText).toHaveBeenCalledWith(
- expect.objectContaining({
- tag: `### Rich text editor
-
-Try out **styling** _your_ content right here or read the [direction](${PROMO_URL}/direction/plan/knowledge/content_editor/).`,
- textArea: document.querySelector('textarea'),
- cursorOffset: 0,
- wrap: false,
- }),
- );
- });
});
});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js
index c6cd963fc33..67aa57a019b 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js
@@ -1,5 +1,5 @@
-import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlListboxItem, GlLoadingIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -32,7 +32,7 @@ describe('RunnerCliInstructions component', () => {
const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
+ const findArchitectureDropdownItems = () => wrapper.findAllComponents(GlListboxItem);
const findBinaryDownloadButton = () => wrapper.findByTestId('binary-download-button');
const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
const findRegisterCommand = () => wrapper.findByTestId('register-command');
@@ -43,7 +43,7 @@ describe('RunnerCliInstructions component', () => {
fakeApollo = createMockApollo(requestHandlers);
wrapper = extendedWrapper(
- shallowMount(RunnerCliInstructions, {
+ mount(RunnerCliInstructions, {
propsData: {
platform: mockPlatform,
registrationToken: 'MY_TOKEN',
diff --git a/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js b/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
index c1feb64dacb..623a8739907 100644
--- a/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
+++ b/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
@@ -10,6 +10,7 @@ const DEFAULT_OPTIONS = [
];
describe('~/vue_shared/components/segmented_control_button_group.vue', () => {
+ let consoleSpy;
let wrapper;
const createComponent = (props = {}, scopedSlots = {}) => {
@@ -97,4 +98,34 @@ describe('~/vue_shared/components/segmented_control_button_group.vue', () => {
);
});
});
+
+ describe('options prop validation', () => {
+ beforeEach(() => {
+ consoleSpy = jest.spyOn(console, 'error').mockImplementation();
+ });
+
+ it.each([
+ [[{ disabled: true }]],
+ [[{ value: '1', disabled: 'false' }]],
+ [[{ value: null, disabled: 'true' }]],
+ [[[{ value: true }, null]]],
+ ])('with options=%j, fails validation', (options) => {
+ createComponent({ options });
+
+ expect(consoleSpy).toHaveBeenCalledTimes(1);
+ expect(consoleSpy).toHaveBeenCalledWith(
+ expect.stringContaining('Invalid prop: custom validator check failed for prop "options"'),
+ );
+ });
+
+ it.each([
+ [[{ value: '1' }]],
+ [[{ value: 1, disabled: true }]],
+ [[{ value: true, disabled: false }]],
+ ])('with options=%j, passes validation', (options) => {
+ createComponent({ options });
+
+ expect(consoleSpy).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/__snapshots__/utils_spec.js.snap b/spec/frontend/vue_shared/components/source_viewer/__snapshots__/utils_spec.js.snap
new file mode 100644
index 00000000000..e75b07dcf71
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/__snapshots__/utils_spec.js.snap
@@ -0,0 +1,88 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`SourceViewer utils toggleBlameClasses adds classes 1`] = `
+<div
+ class="content"
+>
+ <div
+ class="gl-border-gray-500 gl-border-t gl-pt-3!"
+ >
+ <div
+ id="reference-0"
+ >
+ 1
+ </div>
+ <div
+ id="reference-1"
+ >
+ 2
+ </div>
+ <div
+ id="reference-2"
+ >
+ 3
+ </div>
+ </div>
+ <div>
+ <div
+ class="gl-border-gray-500 gl-border-t gl-pt-3!"
+ id="reference-3"
+ >
+ Content 1
+ </div>
+ <div
+ class="gl-border-gray-500 gl-border-t gl-pt-3!"
+ id="reference-4"
+ >
+ Content 2
+ </div>
+ <div
+ class="gl-border-gray-500 gl-border-t gl-pt-3!"
+ id="reference-5"
+ >
+ Content 3
+ </div>
+ </div>
+</div>
+`;
+
+exports[`SourceViewer utils toggleBlameClasses removes classes 1`] = `
+<div
+ class="content"
+>
+ <div>
+ <div
+ id="reference-0"
+ >
+ 1
+ </div>
+ <div
+ id="reference-1"
+ >
+ 2
+ </div>
+ <div
+ id="reference-2"
+ >
+ 3
+ </div>
+ </div>
+ <div>
+ <div
+ id="reference-3"
+ >
+ Content 1
+ </div>
+ <div
+ id="reference-4"
+ >
+ Content 2
+ </div>
+ <div
+ id="reference-5"
+ >
+ Content 3
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/blame_info_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/blame_info_spec.js
new file mode 100644
index 00000000000..ff8b2be9634
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/components/blame_info_spec.js
@@ -0,0 +1,63 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { setHTMLFixture } from 'helpers/fixtures';
+import CommitInfo from '~/repository/components/commit_info.vue';
+import BlameInfo from '~/vue_shared/components/source_viewer/components/blame_info.vue';
+import * as utils from '~/vue_shared/components/source_viewer/utils';
+import { SOURCE_CODE_CONTENT_MOCK, BLAME_DATA_MOCK } from '../mock_data';
+
+describe('BlameInfo component', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(BlameInfo, {
+ propsData: { blameData: BLAME_DATA_MOCK },
+ });
+ };
+
+ beforeEach(() => {
+ setHTMLFixture(SOURCE_CODE_CONTENT_MOCK);
+ jest.spyOn(utils, 'toggleBlameClasses');
+ createComponent();
+ });
+
+ const findCommitInfoComponents = () => wrapper.findAllComponents(CommitInfo);
+
+ it('adds the necessary classes to the DOM', () => {
+ expect(utils.toggleBlameClasses).toHaveBeenCalledWith(BLAME_DATA_MOCK, true);
+ });
+
+ it('renders a CommitInfo component for each blame entry', () => {
+ expect(findCommitInfoComponents().length).toBe(BLAME_DATA_MOCK.length);
+ });
+
+ it.each(BLAME_DATA_MOCK)(
+ 'sets the correct data and positioning for the commitInfo',
+ ({ lineno, commit, index }) => {
+ const commitInfoComponent = findCommitInfoComponents().at(index);
+
+ expect(commitInfoComponent.props('commit')).toEqual(commit);
+ expect(commitInfoComponent.element.style.top).toBe(utils.calculateBlameOffset(lineno));
+ },
+ );
+
+ describe('commitInfo component styling', () => {
+ const borderTopClassName = 'gl-border-t';
+
+ it('does not add a top border for the first entry', () => {
+ expect(findCommitInfoComponents().at(0).element.classList).not.toContain(borderTopClassName);
+ });
+
+ it('add a top border for the rest of the entries', () => {
+ expect(findCommitInfoComponents().at(1).element.classList).toContain(borderTopClassName);
+ expect(findCommitInfoComponents().at(2).element.classList).toContain(borderTopClassName);
+ });
+ });
+
+ describe('when component is destroyed', () => {
+ beforeEach(() => wrapper.destroy());
+
+ it('resets the DOM to its original state', () => {
+ expect(utils.toggleBlameClasses).toHaveBeenCalledWith(BLAME_DATA_MOCK, false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/mock_data.js b/spec/frontend/vue_shared/components/source_viewer/mock_data.js
index f35e9607d5c..b3516f7ed72 100644
--- a/spec/frontend/vue_shared/components/source_viewer/mock_data.js
+++ b/spec/frontend/vue_shared/components/source_viewer/mock_data.js
@@ -22,3 +22,24 @@ export const CHUNK_2 = {
startingFrom: 70,
blamePath,
};
+
+export const SOURCE_CODE_CONTENT_MOCK = `
+<div class="content">
+ <div>
+ <div id="L1">1</div>
+ <div id="L2">2</div>
+ <div id="L3">3</div>
+ </div>
+
+ <div>
+ <div id="LC1">Content 1</div>
+ <div id="LC2">Content 2</div>
+ <div id="LC3">Content 3</div>
+ </div>
+</div>`;
+
+export const BLAME_DATA_MOCK = [
+ { lineno: 1, commit: { author: 'Peter' }, index: 0 },
+ { lineno: 2, commit: { author: 'Sarah' }, index: 1 },
+ { lineno: 3, commit: { author: 'Peter' }, index: 2 },
+];
diff --git a/spec/frontend/vue_shared/components/source_viewer/utils_spec.js b/spec/frontend/vue_shared/components/source_viewer/utils_spec.js
new file mode 100644
index 00000000000..0ac72aa9afb
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/utils_spec.js
@@ -0,0 +1,35 @@
+import { setHTMLFixture } from 'helpers/fixtures';
+import {
+ calculateBlameOffset,
+ toggleBlameClasses,
+} from '~/vue_shared/components/source_viewer/utils';
+import { SOURCE_CODE_CONTENT_MOCK, BLAME_DATA_MOCK } from './mock_data';
+
+describe('SourceViewer utils', () => {
+ beforeEach(() => setHTMLFixture(SOURCE_CODE_CONTENT_MOCK));
+
+ const findContent = () => document.querySelector('.content');
+
+ describe('calculateBlameOffset', () => {
+ it('returns an offset of zero if line number === 1', () => {
+ expect(calculateBlameOffset(1)).toBe('0px');
+ });
+
+ it('calculates an offset for the blame component', () => {
+ const { offsetTop } = document.querySelector('#LC3');
+ expect(calculateBlameOffset(3)).toBe(`${offsetTop}px`);
+ });
+ });
+
+ describe('toggleBlameClasses', () => {
+ it('adds classes', () => {
+ toggleBlameClasses(BLAME_DATA_MOCK, true);
+ expect(findContent()).toMatchSnapshot();
+ });
+
+ it('removes classes', () => {
+ toggleBlameClasses(BLAME_DATA_MOCK, false);
+ expect(findContent()).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 17a363ad8b1..41cf1d2b2e8 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlTruncate } from '@gitlab/ui';
import timezoneMock from 'timezone-mock';
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
@@ -36,6 +37,14 @@ describe('Time ago with tooltip component', () => {
expect(vm.text()).toEqual(timeAgoTimestamp);
});
+ it('should render truncated value with gl-truncate as true', () => {
+ buildVm({
+ enableTruncation: true,
+ });
+
+ expect(vm.findComponent(GlTruncate).exists()).toBe(true);
+ });
+
it('should render provided html class', () => {
buildVm({
cssClass: 'foo',
diff --git a/spec/frontend/vue_shared/components/toggle_labels_spec.js b/spec/frontend/vue_shared/components/toggle_labels_spec.js
new file mode 100644
index 00000000000..e4b4b7f9e0c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/toggle_labels_spec.js
@@ -0,0 +1,56 @@
+import { GlToggle } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import ToggleLabels from '~/vue_shared/components/toggle_labels.vue';
+import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
+
+Vue.use(VueApollo);
+
+describe('ToggleLabels', () => {
+ let wrapper;
+
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ const mockSetIsShowingLabelsResolver = jest.fn();
+ const mockApollo = createMockApollo([], {
+ Mutation: {
+ setIsShowingLabels: mockSetIsShowingLabelsResolver,
+ },
+ });
+
+ const createComponent = () => {
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: isShowingLabelsQuery,
+ data: {
+ isShowingLabels: true,
+ },
+ });
+ wrapper = shallowMountExtended(ToggleLabels, {
+ apolloProvider: mockApollo,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('calls setIsShowingLabelsMutation on toggle', async () => {
+ expect(findToggle().props('value')).toBe(true);
+ findToggle().vm.$emit('change', false);
+
+ await waitForPromises();
+
+ expect(mockSetIsShowingLabelsResolver).toHaveBeenCalledWith(
+ {},
+ {
+ isShowingLabels: false,
+ },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
index e24c5a4609d..95f557b10c1 100644
--- a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
+++ b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
@@ -1,6 +1,4 @@
import { mount } from '@vue/test-utils';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue';
const TestComponent = {
@@ -38,12 +36,4 @@ describe('~/vue_shared/components/vuex_module_provider', () => {
});
expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
});
-
- it('does not blow up when used with vue-apollo', () => {
- // See https://github.com/vuejs/vue-apollo/pull/1153 for details
- Vue.use(VueApollo);
-
- createComponent();
- expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
- });
});
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
index 03f509a3fa3..35e3564c599 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
@@ -5,6 +5,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import IssuableCreateRoot from '~/vue_shared/issuable/create/components/issuable_create_root.vue';
import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue';
+import { TYPE_TEST_CASE } from '~/issues/constants';
Vue.use(VueApollo);
@@ -13,6 +14,7 @@ const createComponent = ({
descriptionHelpPath = '/help/user/markdown',
labelsFetchPath = '/gitlab-org/gitlab-shell/-/labels.json',
labelsManagePath = '/gitlab-org/gitlab-shell/-/labels',
+ issuableType = TYPE_TEST_CASE,
} = {}) => {
return mount(IssuableCreateRoot, {
propsData: {
@@ -20,6 +22,7 @@ const createComponent = ({
descriptionHelpPath,
labelsFetchPath,
labelsManagePath,
+ issuableType,
},
apolloProvider: createMockApollo(),
slots: {
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
index 62361705843..61185f913d9 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
@@ -1,9 +1,10 @@
-import { GlFormInput } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlFormInput, GlFormGroup, GlFormCheckbox } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import LabelsSelect from '~/sidebar/components/labels/labels_select_vue/labels_select_root.vue';
+import { TYPE_TEST_CASE } from '~/issues/constants';
import { __ } from '~/locale';
const createComponent = ({
@@ -11,13 +12,15 @@ const createComponent = ({
descriptionHelpPath = '/help/user/markdown',
labelsFetchPath = '/gitlab-org/gitlab-shell/-/labels.json',
labelsManagePath = '/gitlab-org/gitlab-shell/-/labels',
+ issuableType = TYPE_TEST_CASE,
} = {}) => {
- return shallowMount(IssuableForm, {
+ return shallowMountExtended(IssuableForm, {
propsData: {
descriptionPreviewPath,
descriptionHelpPath,
labelsFetchPath,
labelsManagePath,
+ issuableType,
},
slots: {
actions: `
@@ -58,7 +61,7 @@ describe('IssuableForm', () => {
describe('template', () => {
it('renders issuable title input field', () => {
- const titleFieldEl = wrapper.find('[data-testid="issuable-title"]');
+ const titleFieldEl = wrapper.findByTestId('issuable-title');
expect(titleFieldEl.exists()).toBe(true);
expect(titleFieldEl.find('label').text()).toBe('Title');
@@ -68,7 +71,7 @@ describe('IssuableForm', () => {
});
it('renders issuable description input field', () => {
- const descriptionFieldEl = wrapper.find('[data-testid="issuable-description"]');
+ const descriptionFieldEl = wrapper.findByTestId('issuable-description');
expect(descriptionFieldEl.exists()).toBe(true);
expect(descriptionFieldEl.find('label').text()).toBe('Description');
@@ -88,8 +91,23 @@ describe('IssuableForm', () => {
});
});
+ it('renders issuable confidential checkbox', () => {
+ const confidentialCheckboxEl = wrapper.findByTestId('issuable-confidential');
+ expect(confidentialCheckboxEl.exists()).toBe(true);
+
+ expect(confidentialCheckboxEl.findComponent(GlFormGroup).exists()).toBe(true);
+ expect(confidentialCheckboxEl.findComponent(GlFormGroup).attributes('label')).toBe(
+ 'Confidentiality',
+ );
+
+ expect(confidentialCheckboxEl.findComponent(GlFormCheckbox).exists()).toBe(true);
+ expect(confidentialCheckboxEl.findComponent(GlFormCheckbox).text()).toBe(
+ 'This test case is confidential and should only be visible to team members with at least Reporter access.',
+ );
+ });
+
it('renders labels select field', () => {
- const labelsSelectEl = wrapper.find('[data-testid="issuable-labels"]');
+ const labelsSelectEl = wrapper.findByTestId('issuable-labels');
expect(labelsSelectEl.exists()).toBe(true);
expect(labelsSelectEl.find('label').text()).toBe('Labels');
@@ -111,7 +129,7 @@ describe('IssuableForm', () => {
it('renders contents for slot "actions"', () => {
const buttonEl = wrapper
- .find('[data-testid="issuable-create-actions"]')
+ .findByTestId('issuable-create-actions')
.find('button.js-issuable-save');
expect(buttonEl.exists()).toBe(true);
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 9f7254ba0e6..47da111b604 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -1,6 +1,5 @@
import { GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
import { nextTick } from 'vue';
-import { escape } from 'lodash';
import { useFakeDate } from 'helpers/fake_date';
import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper';
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
@@ -288,23 +287,10 @@ describe('IssuableItem', () => {
expect(titleEl.exists()).toBe(true);
expect(titleEl.findComponent(GlLink).attributes('href')).toBe(expectedHref);
expect(titleEl.findComponent(GlLink).attributes('target')).toBe(expectedTarget);
- expect(titleEl.findComponent(GlLink).html()).toContain(mockIssuable.titleHtml);
+ expect(titleEl.findComponent(GlLink).text()).toBe(mockIssuable.title);
},
);
- it('renders issuable title with escaped markup when issue tracker is external', () => {
- const mockTitle = '<script>foobar</script>';
- wrapper = createComponent({
- issuable: {
- ...mockIssuable,
- title: mockTitle,
- externalTracker: 'jira',
- },
- });
-
- expect(wrapper.findByTestId('issuable-title').html()).toContain(escape(mockTitle));
- });
-
it('renders checkbox when `showCheckbox` prop is true', async () => {
wrapper = createComponent({
showCheckbox: true,
@@ -366,7 +352,7 @@ describe('IssuableItem', () => {
expect(hiddenIcon.props('name')).toBe('spam');
expect(hiddenIcon.attributes()).toMatchObject({
- title: 'This issue is hidden because its author has been banned',
+ title: 'This issue is hidden because its author has been banned.',
arialabel: 'Hidden',
});
});
diff --git a/spec/frontend/vue_shared/issuable/list/mock_data.js b/spec/frontend/vue_shared/issuable/list/mock_data.js
index b39d177f292..f8cf3ba5271 100644
--- a/spec/frontend/vue_shared/issuable/list/mock_data.js
+++ b/spec/frontend/vue_shared/issuable/list/mock_data.js
@@ -42,7 +42,7 @@ export const mockCurrentUserTodo = {
export const mockIssuable = {
iid: '30',
title: 'Dismiss Cipher with no integrity',
- titleHtml: '<gl-emoji title="party-parrot"></gl-emoji>Dismiss Cipher with no integrity',
+ titleHtml: 'Dismiss Cipher with no integrity',
description: 'fortitudinis _fomentis_ dolor mitigari solet.',
descriptionHtml: 'fortitudinis <i>fomentis</i> dolor mitigari solet.',
state: 'opened',
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
index 3b6f06d835b..03395e5dfc0 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
@@ -2,6 +2,8 @@ import { GlBadge, GlButton, GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import HiddenBadge from '~/issuable/components/hidden_badge.vue';
+import LockedBadge from '~/issuable/components/locked_badge.vue';
import { STATUS_CLOSED, STATUS_OPEN, STATUS_REOPENED, TYPE_ISSUE } from '~/issues/constants';
import { __ } from '~/locale';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
@@ -23,8 +25,8 @@ describe('IssuableHeader component', () => {
wrapper.findAllComponents(GlIcon).filter((component) => component.props('name') === name);
const findIcon = (name) =>
findGlIconWithName(name).exists() ? findGlIconWithName(name).at(0) : undefined;
- const findBlockedIcon = () => findIcon('lock');
- const findHiddenIcon = () => findIcon('spam');
+ const findBlockedBadge = () => wrapper.findComponent(LockedBadge);
+ const findHiddenBadge = () => wrapper.findComponent(HiddenBadge);
const findExternalLinkIcon = () => findIcon('external-link');
const findFirstContributionIcon = () => findIcon('first-contribution');
const findComponentTooltip = (component) => getBinding(component.element, 'gl-tooltip');
@@ -111,49 +113,31 @@ describe('IssuableHeader component', () => {
});
});
- describe('blocked icon', () => {
+ describe('blocked badge', () => {
it('renders when issuable is blocked', () => {
createComponent({ blocked: true });
- expect(findBlockedIcon().props('ariaLabel')).toBe('Blocked');
- });
-
- it('has tooltip', () => {
- createComponent({ blocked: true });
-
- expect(findComponentTooltip(findBlockedIcon())).toBeDefined();
- expect(findBlockedIcon().attributes('title')).toBe(
- 'This issue is locked. Only project members can comment.',
- );
+ expect(findBlockedBadge().props('issuableType')).toBe('issue');
});
it('does not render when issuable is not blocked', () => {
createComponent({ blocked: false });
- expect(findBlockedIcon()).toBeUndefined();
+ expect(findBlockedBadge().exists()).toBe(false);
});
});
- describe('hidden icon', () => {
+ describe('hidden badge', () => {
it('renders when issuable is hidden', () => {
createComponent({ isHidden: true });
- expect(findHiddenIcon().props('ariaLabel')).toBe('Hidden');
- });
-
- it('has tooltip', () => {
- createComponent({ isHidden: true });
-
- expect(findComponentTooltip(findHiddenIcon())).toBeDefined();
- expect(findHiddenIcon().attributes('title')).toBe(
- 'This issue is hidden because its author has been banned',
- );
+ expect(findHiddenBadge().props('issuableType')).toBe('issue');
});
it('does not render when issuable is not hidden', () => {
createComponent({ isHidden: false });
- expect(findHiddenIcon()).toBeUndefined();
+ expect(findHiddenBadge().exists()).toBe(false);
});
});
diff --git a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
index 826fc2b2230..b2b372d9d0d 100644
--- a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
@@ -10,9 +10,11 @@ import WorkItemCommentLocked from '~/work_items/components/notes/work_item_comme
import WorkItemCommentForm from '~/work_items/components/notes/work_item_comment_form.vue';
import createNoteMutation from '~/work_items/graphql/notes/create_work_item_note.mutation.graphql';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import {
createWorkItemNoteResponse,
+ groupWorkItemByIidResponseFactory,
workItemByIidResponseFactory,
workItemQueryResponse,
} from '../../mock_data';
@@ -29,6 +31,7 @@ describe('Work item add note', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(createWorkItemNoteResponse);
let workItemResponseHandler;
+ let groupWorkItemResponseHandler;
const findCommentForm = () => wrapper.findComponent(WorkItemCommentForm);
const findTextarea = () => wrapper.findByTestId('note-reply-textarea');
@@ -40,29 +43,32 @@ describe('Work item add note', () => {
canCreateNote = true,
workItemIid = '1',
workItemResponse = workItemByIidResponseFactory({ canUpdate, canCreateNote }),
+ groupWorkItemResponse = groupWorkItemByIidResponseFactory({ canUpdate, canCreateNote }),
signedIn = true,
isEditing = true,
+ isGroup = false,
workItemType = 'Task',
isInternalThread = false,
} = {}) => {
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
+ groupWorkItemResponseHandler = jest.fn().mockResolvedValue(groupWorkItemResponse);
if (signedIn) {
window.gon.current_user_id = '1';
window.gon.current_user_avatar_url = 'avatar.png';
}
- const apolloProvider = createMockApollo([
- [workItemByIidQuery, workItemResponseHandler],
- [createNoteMutation, mutationHandler],
- ]);
-
const { id } = workItemQueryResponse.data.workItem;
wrapper = shallowMountExtended(WorkItemAddNote, {
- apolloProvider,
+ apolloProvider: createMockApollo([
+ [workItemByIidQuery, workItemResponseHandler],
+ [groupWorkItemByIidQuery, groupWorkItemResponseHandler],
+ [createNoteMutation, mutationHandler],
+ ]),
provide: {
- fullPath: 'test-project-path',
+ isGroup,
},
propsData: {
+ fullPath: 'test-project-path',
workItemId: id,
workItemIid,
workItemType,
@@ -272,16 +278,44 @@ describe('Work item add note', () => {
});
});
- it('calls the work item query', async () => {
- await createComponent();
+ describe('when project context', () => {
+ it('calls the project work item query', async () => {
+ await createComponent();
+
+ expect(workItemResponseHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', async () => {
+ await createComponent();
+
+ expect(groupWorkItemResponseHandler).not.toHaveBeenCalled();
+ });
+
+ it('skips calling the project work item query when missing workItemIid', async () => {
+ await createComponent({ workItemIid: '', isEditing: false });
- expect(workItemResponseHandler).toHaveBeenCalled();
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ });
});
- it('skips calling the work item query when missing workItemIid', async () => {
- await createComponent({ workItemIid: '', isEditing: false });
+ describe('when group context', () => {
+ it('skips calling the project work item query', async () => {
+ await createComponent({ isGroup: true });
+
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', async () => {
+ await createComponent({ isGroup: true });
- expect(workItemResponseHandler).not.toHaveBeenCalled();
+ expect(groupWorkItemResponseHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query when missing workItemIid', async () => {
+ await createComponent({ isGroup: true, workItemIid: '', isEditing: false });
+
+ expect(groupWorkItemResponseHandler).not.toHaveBeenCalled();
+ });
});
it('wrapper adds `internal-note` class when internal thread', async () => {
diff --git a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
index dd88f34ae4f..ee2b434bd75 100644
--- a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
@@ -48,6 +48,7 @@ describe('Work item comment form component', () => {
} = {}) => {
wrapper = shallowMount(WorkItemCommentForm, {
propsData: {
+ fullPath: 'test-project-path',
workItemState,
workItemId,
workItemType,
@@ -59,9 +60,6 @@ describe('Work item comment form component', () => {
autocompleteDataSources: {},
isNewDiscussion,
},
- provide: {
- fullPath: 'test-project-path',
- },
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
diff --git a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
index 9d22a64f2cb..fa53ba54faa 100644
--- a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
@@ -31,10 +31,8 @@ describe('Work Item Discussion', () => {
workItemType = 'Task',
} = {}) => {
wrapper = shallowMount(WorkItemDiscussion, {
- provide: {
- fullPath: 'gitlab-org',
- },
propsData: {
+ fullPath: 'gitlab-org',
discussion,
workItemId,
workItemIid: '1',
diff --git a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
index e4180b2d178..6a24987b737 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
@@ -48,6 +48,7 @@ describe('Work Item Note Actions', () => {
} = {}) => {
wrapper = shallowMountExtended(WorkItemNoteActions, {
propsData: {
+ fullPath: 'gitlab-org',
showReply,
showEdit,
workItemIid: '1',
@@ -63,7 +64,6 @@ describe('Work Item Note Actions', () => {
projectName,
},
provide: {
- fullPath: 'gitlab-org',
glFeatures: {
workItemsMvc2: true,
},
diff --git a/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js b/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
index d425f1e50dc..ce915635946 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_awards_list_spec.js
@@ -61,10 +61,8 @@ describe('Work Item Note Awards List', () => {
});
wrapper = shallowMount(WorkItemNoteAwardsList, {
- provide: {
- fullPath,
- },
propsData: {
+ fullPath,
workItemIid,
note,
isModal: false,
diff --git a/spec/frontend/work_items/components/notes/work_item_note_spec.js b/spec/frontend/work_items/components/notes/work_item_note_spec.js
index 9049a69656a..2b4c9604382 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_spec.js
@@ -15,8 +15,10 @@ import NoteActions from '~/work_items/components/notes/work_item_note_actions.vu
import WorkItemCommentForm from '~/work_items/components/notes/work_item_comment_form.vue';
import updateWorkItemNoteMutation from '~/work_items/graphql/notes/update_work_item_note.mutation.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import {
+ groupWorkItemByIidResponseFactory,
mockAssignees,
mockWorkItemCommentNote,
updateWorkItemMutationResponse,
@@ -68,6 +70,9 @@ describe('Work Item Note', () => {
});
const workItemResponseHandler = jest.fn().mockResolvedValue(workItemByIidResponseFactory());
+ const groupWorkItemResponseHandler = jest
+ .fn()
+ .mockResolvedValue(groupWorkItemByIidResponseFactory());
const workItemByAuthoredByDifferentUser = jest
.fn()
.mockResolvedValue(mockWorkItemByDifferentUser);
@@ -90,6 +95,7 @@ describe('Work Item Note', () => {
const createComponent = ({
note = mockWorkItemCommentNote,
isFirstNote = false,
+ isGroup = false,
updateNoteMutationHandler = successHandler,
workItemId = mockWorkItemId,
updateWorkItemMutationHandler = updateWorkItemMutationSuccessHandler,
@@ -98,9 +104,10 @@ describe('Work Item Note', () => {
} = {}) => {
wrapper = shallowMount(WorkItemNote, {
provide: {
- fullPath: 'test-project-path',
+ isGroup,
},
propsData: {
+ fullPath: 'test-project-path',
workItemId,
workItemIid: '1',
note,
@@ -112,6 +119,7 @@ describe('Work Item Note', () => {
},
apolloProvider: mockApollo([
[workItemByIidQuery, workItemByIidResponseHandler],
+ [groupWorkItemByIidQuery, groupWorkItemResponseHandler],
[updateWorkItemNoteMutation, updateNoteMutationHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
]),
@@ -442,4 +450,32 @@ describe('Work Item Note', () => {
expect(findAwardsList().props('workItemIid')).toBe('1');
});
});
+
+ describe('when project context', () => {
+ it('calls the project work item query', () => {
+ createComponent();
+
+ expect(workItemResponseHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', () => {
+ createComponent();
+
+ expect(groupWorkItemResponseHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('skips calling the project work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(groupWorkItemResponseHandler).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
index b86f9ff34ae..2e1a7983dec 100644
--- a/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
@@ -1,4 +1,4 @@
-import { GlLabel, GlIcon } from '@gitlab/ui';
+import { GlLabel, GlIcon, GlLink } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -33,7 +33,7 @@ describe('WorkItemLinkChildContents', () => {
const findStatusIconComponent = () =>
wrapper.findByTestId('item-status-icon').findComponent(GlIcon);
const findConfidentialIconComponent = () => wrapper.findByTestId('confidential-icon');
- const findTitleEl = () => wrapper.findByTestId('item-title');
+ const findTitleEl = () => wrapper.findComponent(GlLink);
const findStatusTooltipComponent = () => wrapper.findComponent(RichTimestampTooltip);
const findMetadataComponent = () => wrapper.findComponent(WorkItemLinkChildMetadata);
const findAllLabels = () => wrapper.findAllComponents(GlLabel);
@@ -46,7 +46,6 @@ describe('WorkItemLinkChildContents', () => {
propsData: {
canUpdate,
childItem,
- childPath: '/gitlab-org/gitlab-test/-/work_items/4',
},
});
};
diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js
index 0098a2e0864..15c33bf5b1e 100644
--- a/spec/frontend/work_items/components/work_item_actions_spec.js
+++ b/spec/frontend/work_items/components/work_item_actions_spec.js
@@ -22,13 +22,12 @@ import {
import updateWorkItemNotificationsMutation from '~/work_items/graphql/update_work_item_notifications.mutation.graphql';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import convertWorkItemMutation from '~/work_items/graphql/work_item_convert.mutation.graphql';
-import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import {
convertWorkItemMutationResponse,
projectWorkItemTypesQueryResponse,
convertWorkItemMutationErrorResponse,
- workItemByIidResponseFactory,
+ updateWorkItemNotificationsMutationResponse,
} from '../mock_data';
jest.mock('~/lib/utils/common_utils');
@@ -38,10 +37,7 @@ describe('WorkItemActions component', () => {
Vue.use(VueApollo);
let wrapper;
- let mockApollo;
const mockWorkItemReference = 'gitlab-org/gitlab-test#1';
- const mockWorkItemIid = '1';
- const mockFullPath = 'gitlab-org/gitlab-test';
const mockWorkItemCreateNoteEmail =
'gitlab-incoming+gitlab-org-gitlab-test-2-ddpzuq0zd2wefzofcpcdr3dg7-issue-1@gmail.com';
@@ -75,14 +71,22 @@ describe('WorkItemActions component', () => {
hide: jest.fn(),
};
+ const typesQuerySuccessHandler = jest.fn().mockResolvedValue(projectWorkItemTypesQueryResponse);
const convertWorkItemMutationSuccessHandler = jest
.fn()
.mockResolvedValue(convertWorkItemMutationResponse);
-
const convertWorkItemMutationErrorHandler = jest
.fn()
.mockResolvedValue(convertWorkItemMutationErrorResponse);
- const typesQuerySuccessHandler = jest.fn().mockResolvedValue(projectWorkItemTypesQueryResponse);
+ const toggleNotificationsOffHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemNotificationsMutationResponse(false));
+ const toggleNotificationsOnHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemNotificationsMutationResponse(true));
+ const toggleNotificationsFailureHandler = jest
+ .fn()
+ .mockRejectedValue(new Error('Failed to subscribe'));
const createComponent = ({
canUpdate = true,
@@ -90,35 +94,21 @@ describe('WorkItemActions component', () => {
isConfidential = false,
subscribed = false,
isParentConfidential = false,
- notificationsMock = [updateWorkItemNotificationsMutation, jest.fn()],
convertWorkItemMutationHandler = convertWorkItemMutationSuccessHandler,
+ notificationsMutationHandler,
workItemType = 'Task',
workItemReference = mockWorkItemReference,
workItemCreateNoteEmail = mockWorkItemCreateNoteEmail,
- writeQueryCache = false,
} = {}) => {
- const handlers = [notificationsMock];
- mockApollo = createMockApollo([
- ...handlers,
- [convertWorkItemMutation, convertWorkItemMutationHandler],
- [projectWorkItemTypesQuery, typesQuerySuccessHandler],
- ]);
-
- // Write the query cache only when required e.g., notification widget mutation is called
- if (writeQueryCache) {
- const workItemQueryResponse = workItemByIidResponseFactory({ canUpdate: true });
-
- mockApollo.clients.defaultClient.cache.writeQuery({
- query: workItemByIidQuery,
- variables: { fullPath: mockFullPath, iid: mockWorkItemIid },
- data: workItemQueryResponse.data,
- });
- }
-
wrapper = shallowMountExtended(WorkItemActions, {
isLoggedIn: isLoggedIn(),
- apolloProvider: mockApollo,
+ apolloProvider: createMockApollo([
+ [projectWorkItemTypesQuery, typesQuerySuccessHandler],
+ [convertWorkItemMutation, convertWorkItemMutationHandler],
+ [updateWorkItemNotificationsMutation, notificationsMutationHandler],
+ ]),
propsData: {
+ fullPath: 'gitlab-org/gitlab-test',
workItemId: 'gid://gitlab/WorkItem/1',
canUpdate,
canDelete,
@@ -128,10 +118,9 @@ describe('WorkItemActions component', () => {
workItemType,
workItemReference,
workItemCreateNoteEmail,
- workItemIid: '1',
},
provide: {
- fullPath: mockFullPath,
+ isGroup: false,
glFeatures: { workItemsMvc2: true },
},
mocks: {
@@ -159,7 +148,6 @@ describe('WorkItemActions component', () => {
it('renders modal', () => {
createComponent();
- expect(findModal().exists()).toBe(true);
expect(findModal().props('visible')).toBe(false);
});
@@ -247,59 +235,15 @@ describe('WorkItemActions component', () => {
});
it('does not render when canDelete is false', () => {
- createComponent({
- canDelete: false,
- });
+ createComponent({ canDelete: false });
expect(findDeleteButton().exists()).toBe(false);
});
});
describe('notifications action', () => {
- const errorMessage = 'Failed to subscribe';
- const notificationToggledOffMessage = 'Notifications turned off.';
- const notificationToggledOnMessage = 'Notifications turned on.';
-
- const toggleNotificationsOffHandler = jest.fn().mockResolvedValue({
- data: {
- updateWorkItemNotificationsSubscription: {
- issue: {
- id: 'gid://gitlab/WorkItem/1',
- subscribed: false,
- },
- errors: [],
- },
- },
- });
-
- const toggleNotificationsOnHandler = jest.fn().mockResolvedValue({
- data: {
- updateWorkItemNotificationsSubscription: {
- issue: {
- id: 'gid://gitlab/WorkItem/1',
- subscribed: true,
- },
- errors: [],
- },
- },
- });
-
- const toggleNotificationsFailureHandler = jest.fn().mockRejectedValue(new Error(errorMessage));
-
- const notificationsOffMock = [
- updateWorkItemNotificationsMutation,
- toggleNotificationsOffHandler,
- ];
-
- const notificationsOnMock = [updateWorkItemNotificationsMutation, toggleNotificationsOnHandler];
-
- const notificationsFailureMock = [
- updateWorkItemNotificationsMutation,
- toggleNotificationsFailureHandler,
- ];
-
beforeEach(() => {
- createComponent({ writeQueryCache: true });
+ createComponent();
isLoggedIn.mockReturnValue(true);
});
@@ -308,25 +252,26 @@ describe('WorkItemActions component', () => {
});
it.each`
- scenario | subscribedToNotifications | notificationsMock | subscribedState | toastMessage
- ${'turned off'} | ${false} | ${notificationsOffMock} | ${false} | ${notificationToggledOffMessage}
- ${'turned on'} | ${true} | ${notificationsOnMock} | ${true} | ${notificationToggledOnMessage}
+ scenario | subscribedToNotifications | notificationsMutationHandler | subscribed | toastMessage
+ ${'turned off'} | ${false} | ${toggleNotificationsOffHandler} | ${false} | ${'Notifications turned off.'}
+ ${'turned on'} | ${true} | ${toggleNotificationsOnHandler} | ${true} | ${'Notifications turned on.'}
`(
'calls mutation and displays toast when notification toggle is $scenario',
- async ({ subscribedToNotifications, notificationsMock, subscribedState, toastMessage }) => {
- createComponent({ notificationsMock, writeQueryCache: true });
-
- await waitForPromises();
+ async ({
+ subscribedToNotifications,
+ notificationsMutationHandler,
+ subscribed,
+ toastMessage,
+ }) => {
+ createComponent({ notificationsMutationHandler });
findNotificationsToggle().vm.$emit('change', subscribedToNotifications);
-
await waitForPromises();
- expect(notificationsMock[1]).toHaveBeenCalledWith({
+ expect(notificationsMutationHandler).toHaveBeenCalledWith({
input: {
- projectPath: mockFullPath,
- iid: mockWorkItemIid,
- subscribedState,
+ id: 'gid://gitlab/WorkItem/1',
+ subscribed,
},
});
expect(toast).toHaveBeenCalledWith(toastMessage);
@@ -334,15 +279,12 @@ describe('WorkItemActions component', () => {
);
it('emits error when the update notification mutation fails', async () => {
- createComponent({ notificationsMock: notificationsFailureMock, writeQueryCache: true });
-
- await waitForPromises();
+ createComponent({ notificationsMutationHandler: toggleNotificationsFailureHandler });
findNotificationsToggle().vm.$emit('change', false);
-
await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([[errorMessage]]);
+ expect(wrapper.emitted('error')).toEqual([['Failed to subscribe']]);
});
});
@@ -359,13 +301,11 @@ describe('WorkItemActions component', () => {
it('promote key result to objective', async () => {
createComponent({ workItemType: 'Key Result' });
-
- // wait for work item types
await waitForPromises();
expect(findPromoteButton().exists()).toBe(true);
- findPromoteButton().vm.$emit('action');
+ findPromoteButton().vm.$emit('action');
await waitForPromises();
expect(convertWorkItemMutationSuccessHandler).toHaveBeenCalled();
@@ -378,13 +318,11 @@ describe('WorkItemActions component', () => {
workItemType: 'Key Result',
convertWorkItemMutationHandler: convertWorkItemMutationErrorHandler,
});
-
- // wait for work item types
await waitForPromises();
expect(findPromoteButton().exists()).toBe(true);
- findPromoteButton().vm.$emit('action');
+ findPromoteButton().vm.$emit('action');
await waitForPromises();
expect(convertWorkItemMutationErrorHandler).toHaveBeenCalled();
@@ -399,6 +337,7 @@ describe('WorkItemActions component', () => {
createComponent();
expect(findCopyReferenceButton().exists()).toBe(true);
+
findCopyReferenceButton().vm.$emit('action');
expect(toast).toHaveBeenCalledWith('Reference copied');
@@ -421,6 +360,7 @@ describe('WorkItemActions component', () => {
createComponent();
expect(findCopyCreateNoteEmailButton().exists()).toBe(true);
+
findCopyCreateNoteEmailButton().vm.$emit('action');
expect(toast).toHaveBeenCalledWith('Email address copied');
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 50a8847032e..196e19791df 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -6,7 +6,8 @@ import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
-import userSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
+import groupUsersSearchQuery from '~/graphql_shared/queries/group_users_search.query.graphql';
+import usersSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
import currentUserQuery from '~/graphql_shared/queries/current_user.query.graphql';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
@@ -53,6 +54,9 @@ describe('WorkItemAssignees component', () => {
const successSearchQueryHandler = jest
.fn()
.mockResolvedValue(projectMembersResponseWithCurrentUser);
+ const successGroupSearchQueryHandler = jest
+ .fn()
+ .mockResolvedValue(projectMembersResponseWithCurrentUser);
const successSearchQueryHandlerWithMoreAssignees = jest
.fn()
.mockResolvedValue(projectMembersResponseWithCurrentUserWithNextPage);
@@ -75,19 +79,22 @@ describe('WorkItemAssignees component', () => {
allowsMultipleAssignees = true,
canInviteMembers = false,
canUpdate = true,
+ isGroup = false,
} = {}) => {
const apolloProvider = createMockApollo([
- [userSearchQuery, searchQueryHandler],
+ [usersSearchQuery, searchQueryHandler],
+ [groupUsersSearchQuery, successGroupSearchQueryHandler],
[currentUserQuery, currentUserQueryHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
]);
wrapper = mountExtended(WorkItemAssignees, {
provide: {
- fullPath: 'test-project-path',
+ isGroup,
},
propsData: {
assignees,
+ fullPath: 'test-project-path',
workItemId,
allowsMultipleAssignees,
workItemType: TASK_TYPE_NAME,
@@ -540,4 +547,36 @@ describe('WorkItemAssignees component', () => {
expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
});
+
+ describe('when project context', () => {
+ beforeEach(() => {
+ createComponent();
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', 'jane');
+ });
+
+ it('calls the project users search query', () => {
+ expect(successSearchQueryHandler).toHaveBeenCalled();
+ });
+
+ it('does not call the group users search query', () => {
+ expect(successGroupSearchQueryHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ beforeEach(() => {
+ createComponent({ isGroup: true });
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', 'jane');
+ });
+
+ it('does not call the project users search query', () => {
+ expect(successSearchQueryHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group users search query', () => {
+ expect(successGroupSearchQueryHandler).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
index 8b7e04854af..123cf647674 100644
--- a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
@@ -20,6 +20,7 @@ describe('WorkItemAttributesWrapper component', () => {
const createComponent = ({ workItem = workItemQueryResponse.data.workItem } = {}) => {
wrapper = shallowMount(WorkItemAttributesWrapper, {
propsData: {
+ fullPath: 'group/project',
workItem,
},
provide: {
@@ -28,7 +29,6 @@ describe('WorkItemAttributesWrapper component', () => {
hasOkrsFeature: true,
hasIssuableHealthStatusFeature: true,
projectNamespace: 'namespace',
- fullPath: 'group/project',
},
stubs: {
WorkItemWeight: true,
diff --git a/spec/frontend/work_items/components/work_item_created_updated_spec.js b/spec/frontend/work_items/components/work_item_created_updated_spec.js
index f77c5481906..3f14615e173 100644
--- a/spec/frontend/work_items/components/work_item_created_updated_spec.js
+++ b/spec/frontend/work_items/components/work_item_created_updated_spec.js
@@ -7,12 +7,18 @@ import waitForPromises from 'helpers/wait_for_promises';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
-import { workItemByIidResponseFactory, mockAssignees } from '../mock_data';
+import {
+ groupWorkItemByIidResponseFactory,
+ mockAssignees,
+ workItemByIidResponseFactory,
+} from '../mock_data';
describe('WorkItemCreatedUpdated component', () => {
let wrapper;
let successHandler;
+ let groupSuccessHandler;
Vue.use(VueApollo);
@@ -30,21 +36,31 @@ describe('WorkItemCreatedUpdated component', () => {
updatedAt,
confidential = false,
updateInProgress = false,
+ isGroup = false,
} = {}) => {
- const workItemQueryResponse = workItemByIidResponseFactory({
+ const workItemQueryResponse = workItemByIidResponseFactory({ author, updatedAt, confidential });
+ const groupWorkItemQueryResponse = groupWorkItemByIidResponseFactory({
author,
updatedAt,
confidential,
});
successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
+ groupSuccessHandler = jest.fn().mockResolvedValue(groupWorkItemQueryResponse);
wrapper = shallowMount(WorkItemCreatedUpdated, {
- apolloProvider: createMockApollo([[workItemByIidQuery, successHandler]]),
+ apolloProvider: createMockApollo([
+ [workItemByIidQuery, successHandler],
+ [groupWorkItemByIidQuery, groupSuccessHandler],
+ ]),
provide: {
+ isGroup,
+ },
+ propsData: {
fullPath: '/some/project',
+ workItemIid,
+ updateInProgress,
},
- propsData: { workItemIid, updateInProgress },
stubs: {
GlAvatarLink,
GlSprintf,
@@ -54,10 +70,44 @@ describe('WorkItemCreatedUpdated component', () => {
await waitForPromises();
};
- it('skips the work item query when workItemIid is not defined', async () => {
- await createComponent({ workItemIid: null });
+ describe('when project context', () => {
+ it('calls the project work item query', async () => {
+ await createComponent();
+
+ expect(successHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', async () => {
+ await createComponent();
+
+ expect(groupSuccessHandler).not.toHaveBeenCalled();
+ });
+
+ it('skips calling the project work item query when workItemIid is not defined', async () => {
+ await createComponent({ workItemIid: null });
+
+ expect(successHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('skips calling the project work item query', async () => {
+ await createComponent({ isGroup: true });
+
+ expect(successHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', async () => {
+ await createComponent({ isGroup: true });
- expect(successHandler).not.toHaveBeenCalled();
+ expect(groupSuccessHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query when workItemIid is not defined', async () => {
+ await createComponent({ isGroup: true, workItemIid: null });
+
+ expect(groupSuccessHandler).not.toHaveBeenCalled();
+ });
});
it('shows work item type metadata with type and icon', async () => {
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
index 8b9963b2476..de2895591dd 100644
--- a/spec/frontend/work_items/components/work_item_description_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -13,9 +13,11 @@ import WorkItemDescription from '~/work_items/components/work_item_description.v
import WorkItemDescriptionRendered from '~/work_items/components/work_item_description_rendered.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils';
import {
+ groupWorkItemByIidResponseFactory,
updateWorkItemMutationResponse,
workItemByIidResponseFactory,
workItemQueryResponse,
@@ -33,6 +35,7 @@ describe('WorkItemDescription', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
let workItemResponseHandler;
+ let groupWorkItemResponseHandler;
const findForm = () => wrapper.findComponent(GlForm);
const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
@@ -51,22 +54,28 @@ describe('WorkItemDescription', () => {
canUpdate = true,
workItemResponse = workItemByIidResponseFactory({ canUpdate }),
isEditing = false,
+ isGroup = false,
workItemIid = '1',
} = {}) => {
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
+ groupWorkItemResponseHandler = jest
+ .fn()
+ .mockResolvedValue(groupWorkItemByIidResponseFactory({ canUpdate }));
const { id } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemDescription, {
apolloProvider: createMockApollo([
[workItemByIidQuery, workItemResponseHandler],
+ [groupWorkItemByIidQuery, groupWorkItemResponseHandler],
[updateWorkItemMutation, mutationHandler],
]),
propsData: {
+ fullPath: 'test-project-path',
workItemId: id,
workItemIid,
},
provide: {
- fullPath: 'test-project-path',
+ isGroup,
},
});
@@ -247,9 +256,31 @@ describe('WorkItemDescription', () => {
});
});
- it('calls the work item query', async () => {
- await createComponent();
+ describe('when project context', () => {
+ it('calls the project work item query', () => {
+ createComponent();
+
+ expect(workItemResponseHandler).toHaveBeenCalled();
+ });
- expect(workItemResponseHandler).toHaveBeenCalled();
+ it('skips calling the group work item query', () => {
+ createComponent();
+
+ expect(groupWorkItemResponseHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('skips calling the project work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(groupWorkItemResponseHandler).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index fec6d0673c6..28826748cb0 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -28,12 +28,14 @@ import WorkItemStateToggleButton from '~/work_items/components/work_item_state_t
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import WorkItemTodos from '~/work_items/components/work_item_todos.vue';
import { i18n } from '~/work_items/constants';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import workItemUpdatedSubscription from '~/work_items/graphql/work_item_updated.subscription.graphql';
import {
+ groupWorkItemByIidResponseFactory,
mockParent,
workItemByIidResponseFactory,
objectiveType,
@@ -49,6 +51,10 @@ describe('WorkItemDetail component', () => {
Vue.use(VueApollo);
const workItemQueryResponse = workItemByIidResponseFactory({ canUpdate: true, canDelete: true });
+ const groupWorkItemQueryResponse = groupWorkItemByIidResponseFactory({
+ canUpdate: true,
+ canDelete: true,
+ });
const workItemQueryResponseWithCannotUpdate = workItemByIidResponseFactory({
canUpdate: false,
canDelete: false,
@@ -59,6 +65,7 @@ describe('WorkItemDetail component', () => {
canDelete: true,
});
const successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
+ const groupSuccessHandler = jest.fn().mockResolvedValue(groupWorkItemQueryResponse);
const showModalHandler = jest.fn();
const { id } = workItemQueryResponse.data.workspace.workItems.nodes[0];
const workItemUpdatedSubscriptionHandler = jest
@@ -92,6 +99,7 @@ describe('WorkItemDetail component', () => {
const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
const createComponent = ({
+ isGroup = false,
isModal = false,
updateInProgress = false,
workItemIid = '1',
@@ -101,14 +109,13 @@ describe('WorkItemDetail component', () => {
workItemsMvc2Enabled = false,
linkedWorkItemsEnabled = false,
} = {}) => {
- const handlers = [
- [workItemByIidQuery, handler],
- [workItemUpdatedSubscription, workItemUpdatedSubscriptionHandler],
- confidentialityMock,
- ];
-
wrapper = shallowMountExtended(WorkItemDetail, {
- apolloProvider: createMockApollo(handlers),
+ apolloProvider: createMockApollo([
+ [workItemByIidQuery, handler],
+ [groupWorkItemByIidQuery, groupSuccessHandler],
+ [workItemUpdatedSubscription, workItemUpdatedSubscriptionHandler],
+ confidentialityMock,
+ ]),
isLoggedIn: isLoggedIn(),
propsData: {
isModal,
@@ -131,6 +138,7 @@ describe('WorkItemDetail component', () => {
hasIssuableHealthStatusFeature: true,
projectNamespace: 'namespace',
fullPath: 'group/project',
+ isGroup,
reportAbusePath: '/report/abuse/path',
},
stubs: {
@@ -484,25 +492,64 @@ describe('WorkItemDetail component', () => {
expect(findAlert().text()).toBe(updateError);
});
- it('calls the work item query', async () => {
- createComponent();
- await waitForPromises();
+ describe('when project context', () => {
+ it('calls the project work item query', async () => {
+ createComponent();
+ await waitForPromises();
- expect(successHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
- });
+ expect(successHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
+ });
- it('skips the work item query when there is no workItemIid', async () => {
- createComponent({ workItemIid: null });
- await waitForPromises();
+ it('skips calling the group work item query', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(groupSuccessHandler).not.toHaveBeenCalled();
+ });
- expect(successHandler).not.toHaveBeenCalled();
+ it('skips calling the project work item query when there is no workItemIid', async () => {
+ createComponent({ workItemIid: null });
+ await waitForPromises();
+
+ expect(successHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the project work item query when isModal=true', async () => {
+ createComponent({ isModal: true });
+ await waitForPromises();
+
+ expect(successHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
+ });
});
- it('calls the work item query when isModal=true', async () => {
- createComponent({ isModal: true });
- await waitForPromises();
+ describe('when group context', () => {
+ it('skips calling the project work item query', async () => {
+ createComponent({ isGroup: true });
+ await waitForPromises();
+
+ expect(successHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', async () => {
+ createComponent({ isGroup: true });
+ await waitForPromises();
+
+ expect(groupSuccessHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
+ });
+
+ it('skips calling the group work item query when there is no workItemIid', async () => {
+ createComponent({ isGroup: true, workItemIid: null });
+ await waitForPromises();
- expect(successHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
+ expect(groupSuccessHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query when isModal=true', async () => {
+ createComponent({ isGroup: true, isModal: true });
+ await waitForPromises();
+
+ expect(groupSuccessHandler).toHaveBeenCalledWith({ fullPath: 'group/project', iid: '1' });
+ });
});
describe('hierarchy widget', () => {
diff --git a/spec/frontend/work_items/components/work_item_labels_spec.js b/spec/frontend/work_items/components/work_item_labels_spec.js
index 4a20e654060..28aa7ffa1be 100644
--- a/spec/frontend/work_items/components/work_item_labels_spec.js
+++ b/spec/frontend/work_items/components/work_item_labels_spec.js
@@ -7,10 +7,12 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import labelSearchQuery from '~/sidebar/components/labels/labels_select_widget/graphql/project_labels.query.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import { i18n, I18N_WORK_ITEM_ERROR_FETCHING_LABELS } from '~/work_items/constants';
import {
+ groupWorkItemByIidResponseFactory,
projectLabelsResponse,
mockLabels,
workItemByIidResponseFactory,
@@ -32,6 +34,9 @@ describe('WorkItemLabels component', () => {
const workItemQuerySuccess = jest
.fn()
.mockResolvedValue(workItemByIidResponseFactory({ labels: null }));
+ const groupWorkItemQuerySuccess = jest
+ .fn()
+ .mockResolvedValue(groupWorkItemByIidResponseFactory({ labels: null }));
const successSearchQueryHandler = jest.fn().mockResolvedValue(projectLabelsResponse);
const successUpdateWorkItemMutationHandler = jest
.fn()
@@ -40,6 +45,7 @@ describe('WorkItemLabels component', () => {
const createComponent = ({
canUpdate = true,
+ isGroup = false,
workItemQueryHandler = workItemQuerySuccess,
searchQueryHandler = successSearchQueryHandler,
updateWorkItemMutationHandler = successUpdateWorkItemMutationHandler,
@@ -48,13 +54,15 @@ describe('WorkItemLabels component', () => {
wrapper = mountExtended(WorkItemLabels, {
apolloProvider: createMockApollo([
[workItemByIidQuery, workItemQueryHandler],
+ [groupWorkItemByIidQuery, groupWorkItemQuerySuccess],
[labelSearchQuery, searchQueryHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
]),
provide: {
- fullPath: 'test-project-path',
+ isGroup,
},
propsData: {
+ fullPath: 'test-project-path',
workItemId,
workItemIid,
canUpdate,
@@ -244,17 +252,49 @@ describe('WorkItemLabels component', () => {
});
});
- it('calls the work item query', async () => {
- createComponent();
- await waitForPromises();
+ describe('when project context', () => {
+ it('calls the project work item query', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(workItemQuerySuccess).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(groupWorkItemQuerySuccess).not.toHaveBeenCalled();
+ });
- expect(workItemQuerySuccess).toHaveBeenCalled();
+ it('skips calling the project work item query when missing workItemIid', async () => {
+ createComponent({ workItemIid: '' });
+ await waitForPromises();
+
+ expect(workItemQuerySuccess).not.toHaveBeenCalled();
+ });
});
- it('skips calling the work item query when missing workItemIid', async () => {
- createComponent({ workItemIid: '' });
- await waitForPromises();
+ describe('when group context', () => {
+ it('skips calling the project work item query', async () => {
+ createComponent({ isGroup: true });
+ await waitForPromises();
+
+ expect(workItemQuerySuccess).not.toHaveBeenCalled();
+ });
- expect(workItemQuerySuccess).not.toHaveBeenCalled();
+ it('calls the group work item query', async () => {
+ createComponent({ isGroup: true });
+ await waitForPromises();
+
+ expect(groupWorkItemQuerySuccess).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query when missing workItemIid', async () => {
+ createComponent({ isGroup: true, workItemIid: '' });
+ await waitForPromises();
+
+ expect(groupWorkItemQuerySuccess).not.toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
index cd077fbf705..0147b199040 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
@@ -53,9 +53,10 @@ describe('WorkItemChildrenWrapper', () => {
wrapper = shallowMountExtended(WorkItemChildrenWrapper, {
apolloProvider: mockApollo,
provide: {
- fullPath: 'test/project',
+ isGroup: false,
},
propsData: {
+ fullPath: 'test/project',
workItemType,
workItemId: 'gid://gitlab/WorkItem/515',
workItemIid: '1',
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
index a624bbe8567..9addf6c3450 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
@@ -62,9 +62,6 @@ describe('WorkItemLinkChild', () => {
[getWorkItemTreeQuery, getWorkItemTreeQueryHandler],
[updateWorkItemMutation, mutationChangeParentHandler],
]),
- provide: {
- fullPath: 'gitlab-org/gitlab-test',
- },
propsData: {
canUpdate,
issuableGid,
@@ -93,23 +90,7 @@ describe('WorkItemLinkChild', () => {
expect(findWorkItemLinkChildContents().props()).toEqual({
childItem: workItemObjectiveWithChild,
canUpdate: true,
- childPath: '/gitlab-org/gitlab-test/-/work_items/12',
- });
- });
-
- describe('with relative instance', () => {
- beforeEach(() => {
- window.gon = { relative_url_root: '/test' };
- createComponent({
- childItem: workItemObjectiveWithChild,
- workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE,
- });
- });
-
- it('adds the relative url to child path value', () => {
- expect(findWorkItemLinkChildContents().props('childPath')).toBe(
- '/test/gitlab-org/gitlab-test/-/work_items/12',
- );
+ showTaskIcon: false,
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index aaab22fd18d..0a9da17d284 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -54,6 +54,7 @@ describe('WorkItemLinksForm', () => {
[createWorkItemMutation, createMutationResolver],
]),
propsData: {
+ fullPath: 'project/path',
issuableGid: 'gid://gitlab/WorkItem/1',
parentConfidential,
parentIteration,
@@ -62,8 +63,8 @@ describe('WorkItemLinksForm', () => {
formType,
},
provide: {
- fullPath: 'project/path',
hasIterationsFeature,
+ isGroup: false,
},
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index e24cfe27616..0b88b3ff5b4 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -13,9 +13,11 @@ import WorkItemChildrenWrapper from '~/work_items/components/work_item_links/wor
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import { FORM_TYPES } from '~/work_items/constants';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import {
getIssueDetailsResponse,
+ groupWorkItemByIidResponseFactory,
workItemHierarchyResponse,
workItemHierarchyEmptyResponse,
workItemHierarchyNoUpdatePermissionResponse,
@@ -32,6 +34,9 @@ describe('WorkItemLinks', () => {
let mockApollo;
const responseWithAddChildPermission = jest.fn().mockResolvedValue(workItemHierarchyResponse);
+ const groupResponseWithAddChildPermission = jest
+ .fn()
+ .mockResolvedValue(groupWorkItemByIidResponseFactory());
const responseWithoutAddChildPermission = jest
.fn()
.mockResolvedValue(workItemByIidResponseFactory({ adminParentLink: false }));
@@ -40,20 +45,22 @@ describe('WorkItemLinks', () => {
fetchHandler = responseWithAddChildPermission,
issueDetailsQueryHandler = jest.fn().mockResolvedValue(getIssueDetailsResponse()),
hasIterationsFeature = false,
+ isGroup = false,
} = {}) => {
mockApollo = createMockApollo(
[
[workItemByIidQuery, fetchHandler],
+ [groupWorkItemByIidQuery, groupResponseWithAddChildPermission],
[issueDetailsQuery, issueDetailsQueryHandler],
],
resolvers,
- { addTypename: true },
);
wrapper = shallowMountExtended(WorkItemLinks, {
provide: {
fullPath: 'project/path',
hasIterationsFeature,
+ isGroup,
reportAbusePath: '/report/abuse/path',
},
propsData: {
@@ -243,4 +250,32 @@ describe('WorkItemLinks', () => {
expect(findAbuseCategorySelector().exists()).toBe(false);
});
});
+
+ describe('when project context', () => {
+ it('calls the project work item query', () => {
+ createComponent();
+
+ expect(responseWithAddChildPermission).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', () => {
+ createComponent();
+
+ expect(groupResponseWithAddChildPermission).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('skips calling the project work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(responseWithAddChildPermission).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(groupResponseWithAddChildPermission).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
index 01fa4591cde..f30fded0b45 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
@@ -29,10 +29,8 @@ describe('WorkItemTree', () => {
canUpdate = true,
} = {}) => {
wrapper = shallowMountExtended(WorkItemTree, {
- provide: {
- fullPath: 'test/project',
- },
propsData: {
+ fullPath: 'test/project',
workItemType,
parentWorkItemType,
workItemId: 'gid://gitlab/WorkItem/515',
diff --git a/spec/frontend/work_items/components/work_item_milestone_spec.js b/spec/frontend/work_items/components/work_item_milestone_spec.js
index c42c9a573e5..e303ad4b481 100644
--- a/spec/frontend/work_items/components/work_item_milestone_spec.js
+++ b/spec/frontend/work_items/components/work_item_milestone_spec.js
@@ -66,10 +66,8 @@ describe('WorkItemMilestone component', () => {
[projectMilestonesQuery, searchQueryHandler],
[updateWorkItemMutation, mutationHandler],
]),
- provide: {
- fullPath: 'full-path',
- },
propsData: {
+ fullPath: 'full-path',
canUpdate,
workItemMilestone: milestone,
workItemId,
diff --git a/spec/frontend/work_items/components/work_item_notes_spec.js b/spec/frontend/work_items/components/work_item_notes_spec.js
index 35f01c85ec8..9e02e0708d4 100644
--- a/spec/frontend/work_items/components/work_item_notes_spec.js
+++ b/spec/frontend/work_items/components/work_item_notes_spec.js
@@ -98,10 +98,8 @@ describe('WorkItemNotes component', () => {
[workItemNoteUpdatedSubscription, notesUpdateSubscriptionHandler],
[workItemNoteDeletedSubscription, notesDeleteSubscriptionHandler],
]),
- provide: {
- fullPath: 'test-path',
- },
propsData: {
+ fullPath: 'test-path',
workItemId,
workItemIid,
workItemType: 'task',
diff --git a/spec/frontend/work_items/components/work_item_parent_spec.js b/spec/frontend/work_items/components/work_item_parent_spec.js
new file mode 100644
index 00000000000..a72eeabc43c
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_parent_spec.js
@@ -0,0 +1,236 @@
+import * as Sentry from '@sentry/browser';
+import { GlCollapsibleListbox, GlFormGroup } from '@gitlab/ui';
+
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import WorkItemParent from '~/work_items/components/work_item_parent.vue';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import { WORK_ITEM_TYPE_ENUM_OBJECTIVE } from '~/work_items/constants';
+
+import {
+ availableObjectivesResponse,
+ mockParentWidgetResponse,
+ updateWorkItemMutationResponseFactory,
+ searchedObjectiveResponse,
+ updateWorkItemMutationErrorResponse,
+} from '../mock_data';
+
+jest.mock('@sentry/browser');
+
+describe('WorkItemParent component', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const workItemId = 'gid://gitlab/WorkItem/1';
+ const workItemType = 'Objective';
+
+ const availableWorkItemsSuccessHandler = jest.fn().mockResolvedValue(availableObjectivesResponse);
+ const availableWorkItemsFailureHandler = jest.fn().mockRejectedValue(new Error());
+
+ const successUpdateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponseFactory({ parent: mockParentWidgetResponse }));
+
+ const createComponent = ({
+ canUpdate = true,
+ parent = null,
+ searchQueryHandler = availableWorkItemsSuccessHandler,
+ mutationHandler = successUpdateWorkItemMutationHandler,
+ } = {}) => {
+ wrapper = shallowMountExtended(WorkItemParent, {
+ apolloProvider: createMockApollo([
+ [projectWorkItemsQuery, searchQueryHandler],
+ [updateWorkItemMutation, mutationHandler],
+ ]),
+ provide: {
+ fullPath: 'full-path',
+ },
+ propsData: {
+ canUpdate,
+ parent,
+ workItemId,
+ workItemType,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ const findInputGroup = () => wrapper.findComponent(GlFormGroup);
+ const findParentText = () => wrapper.findByTestId('disabled-text');
+ const findCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ describe('template', () => {
+ it('shows field label as Parent', () => {
+ expect(findInputGroup().exists()).toBe(true);
+ expect(findInputGroup().attributes('label')).toBe('Parent');
+ });
+
+ it('renders the collapsible listbox with required props', () => {
+ expect(findCollapsibleListbox().exists()).toBe(true);
+ expect(findCollapsibleListbox().props()).toMatchObject({
+ items: [],
+ headerText: 'Assign parent',
+ category: 'tertiary',
+ loading: false,
+ noCaret: true,
+ isCheckCentered: true,
+ searchable: true,
+ searching: false,
+ infiniteScroll: false,
+ noResultsText: 'No matching results',
+ toggleText: 'None',
+ searchPlaceholder: 'Search',
+ resetButtonLabel: 'Unassign',
+ block: true,
+ });
+ });
+
+ it('displays parent text instead of listbox if canUpdate is false', () => {
+ createComponent({ canUpdate: false, parent: mockParentWidgetResponse });
+
+ expect(findCollapsibleListbox().exists()).toBe(false);
+ expect(findParentText().exists()).toBe(true);
+ expect(findParentText().text()).toBe('Objective 101');
+ });
+
+ it('shows loading while searching', async () => {
+ await findCollapsibleListbox().vm.$emit('shown');
+ expect(findCollapsibleListbox().props('searching')).toBe(true);
+ expect(findCollapsibleListbox().props('no-caret')).toBeUndefined();
+ });
+ });
+
+ describe('work items query', () => {
+ it('loads work items in the listbox', async () => {
+ await findCollapsibleListbox().vm.$emit('shown');
+
+ await waitForPromises();
+
+ expect(findCollapsibleListbox().props('searching')).toBe(false);
+ expect(findCollapsibleListbox().props('items')).toStrictEqual([
+ { text: 'Objective 101', value: 'gid://gitlab/WorkItem/716' },
+ { text: 'Objective 103', value: 'gid://gitlab/WorkItem/712' },
+ { text: 'Objective 102', value: 'gid://gitlab/WorkItem/711' },
+ ]);
+ expect(availableWorkItemsSuccessHandler).toHaveBeenCalled();
+ });
+
+ it('emits error when the query fails', async () => {
+ createComponent({ searchQueryHandler: availableWorkItemsFailureHandler });
+
+ await findCollapsibleListbox().vm.$emit('shown');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([
+ ['Something went wrong while fetching items. Please try again.'],
+ ]);
+ });
+
+ it('searches item when input data is entered', async () => {
+ const searchedItemQueryHandler = jest.fn().mockResolvedValue(searchedObjectiveResponse);
+ createComponent({
+ searchQueryHandler: searchedItemQueryHandler,
+ });
+
+ await findCollapsibleListbox().vm.$emit('shown');
+ await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
+
+ await waitForPromises();
+
+ expect(searchedItemQueryHandler).toHaveBeenCalledWith({
+ fullPath: 'full-path',
+ searchTerm: 'Objective 101',
+ types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
+ in: 'TITLE',
+ });
+
+ await nextTick();
+
+ expect(findCollapsibleListbox().props('items')).toStrictEqual([
+ { text: 'Objective 101', value: 'gid://gitlab/WorkItem/716' },
+ ]);
+ });
+ });
+
+ describe('listbox', () => {
+ const selectWorkItem = async (workItem) => {
+ await findCollapsibleListbox().vm.$emit('shown');
+ await findCollapsibleListbox().vm.$emit('select', workItem);
+ };
+
+ it('calls mutation when item is selected', async () => {
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(successUpdateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ hierarchyWidget: {
+ parentId: 'gid://gitlab/WorkItem/716',
+ },
+ },
+ });
+ });
+
+ it('calls mutation when item is unassigned', async () => {
+ const unAssignParentWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponseFactory({ parent: null }));
+ createComponent({
+ mutationHandler: unAssignParentWorkItemMutationHandler,
+ });
+
+ await findCollapsibleListbox().vm.$emit('reset');
+
+ await waitForPromises();
+
+ expect(unAssignParentWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ hierarchyWidget: {
+ parentId: null,
+ },
+ },
+ });
+ });
+
+ it('emits error when mutation fails', async () => {
+ createComponent({
+ mutationHandler: jest.fn().mockResolvedValue(updateWorkItemMutationErrorResponse),
+ });
+
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([['Error!']]);
+ });
+
+ it('emits error and captures exception in sentry when network request fails', async () => {
+ const error = new Error('error');
+ createComponent({
+ mutationHandler: jest.fn().mockRejectedValue(error),
+ });
+
+ selectWorkItem('gid://gitlab/WorkItem/716');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([
+ ['Something went wrong while updating the objective. Please try again.'],
+ ]);
+ expect(Sentry.captureException).toHaveBeenCalledWith(error);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap b/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap
index 9105e4de5e0..bbc19a011a5 100644
--- a/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap
+++ b/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap
@@ -1,7 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`WorkItemRelationshipList renders linked item list 1`] = `
-<div>
+<div
+ data-testid="work-item-linked-items-list"
+>
<h4
class="gl-font-sm gl-font-weight-semibold gl-mb-2 gl-mt-3 gl-mx-2 gl-text-gray-700"
data-testid="work-items-list-heading"
@@ -20,7 +22,7 @@ exports[`WorkItemRelationshipList renders linked item list 1`] = `
<work-item-link-child-contents-stub
canupdate="true"
childitem="[object Object]"
- childpath="/test-project-path/-/work_items/83"
+ showtaskicon="true"
/>
</li>
</ul>
diff --git a/spec/frontend/work_items/components/work_item_relationships/work_item_add_relationship_form_spec.js b/spec/frontend/work_items/components/work_item_relationships/work_item_add_relationship_form_spec.js
new file mode 100644
index 00000000000..d7b3ced2ff9
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_relationships/work_item_add_relationship_form_spec.js
@@ -0,0 +1,156 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlForm, GlFormRadioGroup, GlAlert } from '@gitlab/ui';
+
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import WorkItemAddRelationshipForm from '~/work_items/components/work_item_relationships/work_item_add_relationship_form.vue';
+import WorkItemTokenInput from '~/work_items/components/shared/work_item_token_input.vue';
+import addLinkedItemsMutation from '~/work_items/graphql/add_linked_items.mutation.graphql';
+import { LINKED_ITEM_TYPE_VALUE, MAX_WORK_ITEMS } from '~/work_items/constants';
+
+import { linkedWorkItemResponse, generateWorkItemsListWithId } from '../../mock_data';
+
+describe('WorkItemAddRelationshipForm', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+ const linkedWorkItemsSuccessMutationHandler = jest
+ .fn()
+ .mockResolvedValue(linkedWorkItemResponse());
+
+ const createComponent = async ({
+ workItemId = 'gid://gitlab/WorkItem/1',
+ workItemIid = '1',
+ workItemType = 'Objective',
+ childrenIds = [],
+ linkedWorkItemsMutationHandler = linkedWorkItemsSuccessMutationHandler,
+ } = {}) => {
+ const mockApolloProvider = createMockApollo([
+ [addLinkedItemsMutation, linkedWorkItemsMutationHandler],
+ ]);
+
+ wrapper = shallowMountExtended(WorkItemAddRelationshipForm, {
+ apolloProvider: mockApolloProvider,
+ propsData: {
+ workItemId,
+ workItemIid,
+ workItemFullPath: 'test-project-path',
+ workItemType,
+ childrenIds,
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ const findLinkWorkItemForm = () => wrapper.findComponent(GlForm);
+ const findLinkWorkItemButton = () => wrapper.findByTestId('link-work-item-button');
+ const findMaxWorkItemNote = () => wrapper.findByTestId('max-work-item-note');
+ const findRadioGroup = () => wrapper.findComponent(GlFormRadioGroup);
+ const findWorkItemTokenInput = () => wrapper.findComponent(WorkItemTokenInput);
+ const findGlAlert = () => wrapper.findComponent(GlAlert);
+
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('renders link work item form with default values', () => {
+ expect(findLinkWorkItemForm().exists()).toBe(true);
+ expect(findRadioGroup().props('options')).toEqual([
+ { text: 'relates to', value: LINKED_ITEM_TYPE_VALUE.RELATED },
+ { text: 'blocks', value: LINKED_ITEM_TYPE_VALUE.BLOCKS },
+ { text: 'is blocked by', value: LINKED_ITEM_TYPE_VALUE.BLOCKED_BY },
+ ]);
+ expect(findLinkWorkItemButton().attributes('disabled')).toBe('true');
+ expect(findMaxWorkItemNote().text()).toBe('Add a maximum of 10 items at a time.');
+ });
+
+ it('renders work item token input with default props', () => {
+ expect(findWorkItemTokenInput().props()).toMatchObject({
+ value: [],
+ fullPath: 'test-project-path',
+ childrenIds: [],
+ parentWorkItemId: 'gid://gitlab/WorkItem/1',
+ areWorkItemsToAddValid: true,
+ });
+ });
+
+ describe('linking a work item', () => {
+ const selectWorkItemTokens = (workItems) => {
+ findWorkItemTokenInput().vm.$emit('input', workItems);
+ };
+
+ it('enables add button when work item is selected', async () => {
+ await selectWorkItemTokens([
+ {
+ id: 'gid://gitlab/WorkItem/644',
+ },
+ ]);
+ expect(findLinkWorkItemButton().attributes('disabled')).toBeUndefined();
+ });
+
+ it('disables button when more than 10 work items are selected', async () => {
+ await selectWorkItemTokens(generateWorkItemsListWithId(MAX_WORK_ITEMS + 1));
+
+ expect(findWorkItemTokenInput().props('areWorkItemsToAddValid')).toBe(false);
+ expect(findLinkWorkItemButton().attributes('disabled')).toBe('true');
+ });
+
+ it.each`
+ assertionName | linkTypeInput
+ ${'related'} | ${LINKED_ITEM_TYPE_VALUE.RELATED}
+ ${'blocking'} | ${LINKED_ITEM_TYPE_VALUE.BLOCKED_BY}
+ `('selects and links $assertionName work item', async ({ linkTypeInput }) => {
+ findRadioGroup().vm.$emit('input', linkTypeInput);
+ await selectWorkItemTokens([
+ {
+ id: 'gid://gitlab/WorkItem/641',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/642',
+ },
+ ]);
+
+ expect(findWorkItemTokenInput().props('areWorkItemsToAddValid')).toBe(true);
+
+ findLinkWorkItemForm().vm.$emit('submit', {
+ preventDefault: jest.fn(),
+ stopPropagation: jest.fn(),
+ });
+ await waitForPromises();
+
+ expect(linkedWorkItemsSuccessMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ linkType: linkTypeInput,
+ workItemsIds: ['gid://gitlab/WorkItem/641', 'gid://gitlab/WorkItem/642'],
+ },
+ });
+ });
+
+ it.each`
+ errorType | mutationMock | errorMessage
+ ${'an error in the mutation response'} | ${jest.fn().mockResolvedValue(linkedWorkItemResponse({}, ['Linked Item failed']))} | ${'Linked Item failed'}
+ ${'a network error'} | ${jest.fn().mockRejectedValue(new Error('Network Error'))} | ${'Something went wrong when trying to link a item. Please try again.'}
+ `('shows an error message when there is $errorType', async ({ mutationMock, errorMessage }) => {
+ createComponent({ linkedWorkItemsMutationHandler: mutationMock });
+ await selectWorkItemTokens([
+ {
+ id: 'gid://gitlab/WorkItem/641',
+ },
+ ]);
+
+ findLinkWorkItemForm().vm.$emit('submit', {
+ preventDefault: jest.fn(),
+ stopPropagation: jest.fn(),
+ });
+ await waitForPromises();
+
+ expect(findGlAlert().exists()).toBe(true);
+ expect(findGlAlert().text()).toBe(errorMessage);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js b/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js
index 759ab7e14da..e26bea46ab1 100644
--- a/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js
+++ b/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js
@@ -14,7 +14,6 @@ describe('WorkItemRelationshipList', () => {
linkedItems,
heading,
canUpdate,
- workItemFullPath: 'test-project-path',
},
});
};
@@ -35,7 +34,7 @@ describe('WorkItemRelationshipList', () => {
expect(findWorkItemLinkChildContents().props()).toMatchObject({
childItem: mockLinkedItems[0].workItem,
canUpdate: true,
- childPath: '/test-project-path/-/work_items/83',
+ showTaskIcon: true,
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js b/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js
index c9a2499b127..7178fa1aae7 100644
--- a/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js
+++ b/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js
@@ -9,12 +9,17 @@ import waitForPromises from 'helpers/wait_for_promises';
import WidgetWrapper from '~/work_items/components/widget_wrapper.vue';
import WorkItemRelationships from '~/work_items/components/work_item_relationships/work_item_relationships.vue';
import WorkItemRelationshipList from '~/work_items/components/work_item_relationships/work_item_relationship_list.vue';
+import WorkItemAddRelationshipForm from '~/work_items/components/work_item_relationships/work_item_add_relationship_form.vue';
+import groupWorkItemByIidQuery from '~/work_items/graphql/group_work_item_by_iid.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+import removeLinkedItemsMutation from '~/work_items/graphql/remove_linked_items.mutation.graphql';
import {
+ groupWorkItemByIidResponseFactory,
workItemByIidResponseFactory,
mockLinkedItems,
mockBlockingLinkedItem,
+ removeLinkedWorkItemResponse,
} from '../../mock_data';
describe('WorkItemRelationships', () => {
@@ -24,23 +29,44 @@ describe('WorkItemRelationships', () => {
const emptyLinkedWorkItemsQueryHandler = jest
.fn()
.mockResolvedValue(workItemByIidResponseFactory());
- const linkedWorkItemsQueryHandler = jest
+ const groupWorkItemsQueryHandler = jest
.fn()
- .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockLinkedItems }));
- const blockingLinkedWorkItemQueryHandler = jest
+ .mockResolvedValue(groupWorkItemByIidResponseFactory());
+ const removeLinkedWorkItemSuccessMutationHandler = jest
.fn()
- .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockBlockingLinkedItem }));
+ .mockResolvedValue(removeLinkedWorkItemResponse('Successfully unlinked IDs: 2.'));
+ const removeLinkedWorkItemErrorMutationHandler = jest
+ .fn()
+ .mockResolvedValue(removeLinkedWorkItemResponse(null, ['Linked item removal failed']));
+ const $toast = {
+ show: jest.fn(),
+ };
const createComponent = async ({
workItemQueryHandler = emptyLinkedWorkItemsQueryHandler,
+ workItemType = 'Task',
+ isGroup = false,
+ removeLinkedWorkItemMutationHandler = removeLinkedWorkItemSuccessMutationHandler,
} = {}) => {
- const mockApollo = createMockApollo([[workItemByIidQuery, workItemQueryHandler]]);
+ const mockApollo = createMockApollo([
+ [workItemByIidQuery, workItemQueryHandler],
+ [removeLinkedItemsMutation, removeLinkedWorkItemMutationHandler],
+ [groupWorkItemByIidQuery, groupWorkItemsQueryHandler],
+ ]);
wrapper = shallowMountExtended(WorkItemRelationships, {
apolloProvider: mockApollo,
propsData: {
+ workItemId: 'gid://gitlab/WorkItem/1',
workItemIid: '1',
workItemFullPath: 'test-project-path',
+ workItemType,
+ },
+ provide: {
+ isGroup,
+ },
+ mocks: {
+ $toast,
},
});
@@ -51,8 +77,11 @@ describe('WorkItemRelationships', () => {
const findWidgetWrapper = () => wrapper.findComponent(WidgetWrapper);
const findEmptyRelatedMessageContainer = () => wrapper.findByTestId('links-empty');
const findLinkedItemsCountContainer = () => wrapper.findByTestId('linked-items-count');
+ const findLinkedItemsHelpLink = () => wrapper.findByTestId('help-link');
const findAllWorkItemRelationshipListComponents = () =>
wrapper.findAllComponents(WorkItemRelationshipList);
+ const findAddButton = () => wrapper.findByTestId('link-item-add-button');
+ const findWorkItemRelationshipForm = () => wrapper.findComponent(WorkItemAddRelationshipForm);
it('shows loading icon when query is not processed', () => {
createComponent();
@@ -60,22 +89,35 @@ describe('WorkItemRelationships', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
- it('renders the component with empty message when there are no items', async () => {
+ it('renders the component with with defaults', async () => {
await createComponent();
expect(wrapper.find('.work-item-relationships').exists()).toBe(true);
expect(findEmptyRelatedMessageContainer().exists()).toBe(true);
+ expect(findAddButton().exists()).toBe(true);
+ expect(findWorkItemRelationshipForm().exists()).toBe(false);
+ expect(findLinkedItemsHelpLink().attributes('href')).toBe(
+ '/help/user/okrs.md#linked-items-in-okrs',
+ );
});
it('renders blocking linked item lists', async () => {
- await createComponent({ workItemQueryHandler: blockingLinkedWorkItemQueryHandler });
+ await createComponent({
+ workItemQueryHandler: jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockBlockingLinkedItem })),
+ });
expect(findAllWorkItemRelationshipListComponents().length).toBe(1);
expect(findLinkedItemsCountContainer().text()).toBe('1');
});
it('renders blocking, blocked by and related to linked item lists with proper count', async () => {
- await createComponent({ workItemQueryHandler: linkedWorkItemsQueryHandler });
+ await createComponent({
+ workItemQueryHandler: jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockLinkedItems })),
+ });
// renders all 3 lists: blocking, blocked by and related to
expect(findAllWorkItemRelationshipListComponents().length).toBe(3);
@@ -90,4 +132,103 @@ describe('WorkItemRelationships', () => {
expect(findWidgetWrapper().props('error')).toBe(errorMessage);
});
+
+ it('does not render add button when there is no permission', async () => {
+ await createComponent({
+ workItemQueryHandler: jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ canAdminWorkItemLink: false })),
+ });
+
+ expect(findAddButton().exists()).toBe(false);
+ });
+
+ it('shows form on add button and hides when cancel button is clicked', async () => {
+ await createComponent();
+
+ await findAddButton().vm.$emit('click');
+ expect(findWorkItemRelationshipForm().exists()).toBe(true);
+
+ await findWorkItemRelationshipForm().vm.$emit('cancel');
+ expect(findWorkItemRelationshipForm().exists()).toBe(false);
+ });
+
+ describe('when project context', () => {
+ it('calls the project work item query', () => {
+ createComponent();
+
+ expect(emptyLinkedWorkItemsQueryHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the group work item query', () => {
+ createComponent();
+
+ expect(groupWorkItemsQueryHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when group context', () => {
+ it('skips calling the project work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(emptyLinkedWorkItemsQueryHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the group work item query', () => {
+ createComponent({ isGroup: true });
+
+ expect(groupWorkItemsQueryHandler).toHaveBeenCalled();
+ });
+ });
+
+ it('removes linked item and shows toast message when removeLinkedItem event is emitted', async () => {
+ await createComponent({
+ workItemQueryHandler: jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockLinkedItems })),
+ });
+
+ expect(findLinkedItemsCountContainer().text()).toBe('3');
+
+ await findAllWorkItemRelationshipListComponents()
+ .at(0)
+ .vm.$emit('removeLinkedItem', { id: 'gid://gitlab/WorkItem/2' });
+
+ await waitForPromises();
+
+ expect(removeLinkedWorkItemSuccessMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/WorkItem/1',
+ workItemsIds: ['gid://gitlab/WorkItem/2'],
+ },
+ });
+
+ expect($toast.show).toHaveBeenCalledWith('Linked item removed');
+
+ expect(findLinkedItemsCountContainer().text()).toBe('2');
+ });
+
+ it.each`
+ errorType | mutationMock | errorMessage
+ ${'an error in the mutation response'} | ${removeLinkedWorkItemErrorMutationHandler} | ${'Linked item removal failed'}
+ ${'a network error'} | ${jest.fn().mockRejectedValue(new Error('Network Error'))} | ${'Something went wrong when removing item. Please refresh this page.'}
+ `(
+ 'shows an error message when there is $errorType while removing items',
+ async ({ mutationMock, errorMessage }) => {
+ await createComponent({
+ workItemQueryHandler: jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockLinkedItems })),
+ removeLinkedWorkItemMutationHandler: mutationMock,
+ });
+
+ await findAllWorkItemRelationshipListComponents()
+ .at(0)
+ .vm.$emit('removeLinkedItem', { id: 'gid://gitlab/WorkItem/2' });
+
+ await waitForPromises();
+
+ expect(findWidgetWrapper().props('error')).toBe(errorMessage);
+ },
+ );
});
diff --git a/spec/frontend/work_items/components/work_item_todos_spec.js b/spec/frontend/work_items/components/work_item_todos_spec.js
index 454bd97bbee..c76cdbcee53 100644
--- a/spec/frontend/work_items/components/work_item_todos_spec.js
+++ b/spec/frontend/work_items/components/work_item_todos_spec.js
@@ -86,6 +86,9 @@ describe('WorkItemTodo component', () => {
workItemFullpath: mockWorkItemFullpath,
currentUserTodos,
},
+ provide: {
+ isGroup: false,
+ },
});
};
diff --git a/spec/frontend/work_items/graphql/cache_utils_spec.js b/spec/frontend/work_items/graphql/cache_utils_spec.js
index 6d0083790d1..64ef1bdbb88 100644
--- a/spec/frontend/work_items/graphql/cache_utils_spec.js
+++ b/spec/frontend/work_items/graphql/cache_utils_spec.js
@@ -43,7 +43,7 @@ describe('work items graphql cache utils', () => {
title: 'New child',
};
- addHierarchyChild(mockCache, fullPath, iid, child);
+ addHierarchyChild({ cache: mockCache, fullPath, iid, workItem: child });
expect(mockCache.writeQuery).toHaveBeenCalledWith({
query: workItemByIidQuery,
@@ -88,7 +88,7 @@ describe('work items graphql cache utils', () => {
title: 'New child',
};
- addHierarchyChild(mockCache, fullPath, iid, child);
+ addHierarchyChild({ cache: mockCache, fullPath, iid, workItem: child });
expect(mockCache.writeQuery).not.toHaveBeenCalled();
});
@@ -106,7 +106,7 @@ describe('work items graphql cache utils', () => {
title: 'Child',
};
- removeHierarchyChild(mockCache, fullPath, iid, childToRemove);
+ removeHierarchyChild({ cache: mockCache, fullPath, iid, workItem: childToRemove });
expect(mockCache.writeQuery).toHaveBeenCalledWith({
query: workItemByIidQuery,
@@ -145,7 +145,7 @@ describe('work items graphql cache utils', () => {
title: 'Child',
};
- removeHierarchyChild(mockCache, fullPath, iid, childToRemove);
+ removeHierarchyChild({ cache: mockCache, fullPath, iid, workItem: childToRemove });
expect(mockCache.writeQuery).not.toHaveBeenCalled();
});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index ba244b19eb5..9eb604c81cb 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -146,6 +146,7 @@ export const workItemQueryResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
widgets: [
@@ -193,6 +194,7 @@ export const workItemQueryResponse = {
confidential: false,
title: '123',
state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
workItemType: {
id: '1',
name: 'Task',
@@ -251,6 +253,7 @@ export const updateWorkItemMutationResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
reference: 'test-project-path#1',
@@ -269,6 +272,7 @@ export const updateWorkItemMutationResponse = {
confidential: false,
title: '123',
state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
workItemType: {
id: '1',
name: 'Task',
@@ -360,6 +364,7 @@ export const convertWorkItemMutationResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
reference: 'gitlab-org/gitlab-test#1',
@@ -378,6 +383,7 @@ export const convertWorkItemMutationResponse = {
confidential: false,
title: '123',
state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
workItemType: {
id: '1',
name: 'Task',
@@ -486,6 +492,7 @@ export const mockBlockingLinkedItem = {
state: 'OPEN',
createdAt: '2023-03-28T10:50:16Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/83',
widgets: [],
__typename: 'WorkItem',
},
@@ -518,6 +525,7 @@ export const mockLinkedItems = {
state: 'OPEN',
createdAt: '2023-03-28T10:50:16Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/83',
widgets: [],
__typename: 'WorkItem',
},
@@ -540,6 +548,7 @@ export const mockLinkedItems = {
state: 'OPEN',
createdAt: '2023-03-28T10:50:16Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/55',
widgets: [],
__typename: 'WorkItem',
},
@@ -562,6 +571,7 @@ export const mockLinkedItems = {
state: 'OPEN',
createdAt: '2023-03-28T10:50:16Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/56',
widgets: [],
__typename: 'WorkItem',
},
@@ -579,6 +589,7 @@ export const workItemResponseFactory = ({
canDelete = false,
canCreateNote = false,
adminParentLink = false,
+ canAdminWorkItemLink = true,
notificationsWidgetPresent = true,
currentUserTodosWidgetPresent = true,
awardEmojiWidgetPresent = true,
@@ -636,6 +647,7 @@ export const workItemResponseFactory = ({
updateWorkItem: canUpdate,
setWorkItemMetadata: canUpdate,
adminParentLink,
+ adminWorkItemLink: canAdminWorkItemLink,
createNote: canCreateNote,
__typename: 'WorkItemPermissions',
},
@@ -756,6 +768,7 @@ export const workItemResponseFactory = ({
confidential: false,
title: '123',
state: 'OPEN',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/5',
workItemType: {
id: '1',
name: 'Task',
@@ -828,13 +841,16 @@ export const workItemByIidResponseFactory = (options) => {
};
};
-export const updateWorkItemMutationResponseFactory = (options) => {
+export const groupWorkItemByIidResponseFactory = (options) => {
const response = workItemResponseFactory(options);
return {
data: {
- workItemUpdate: {
- workItem: response.data.workItem,
- errors: [],
+ workspace: {
+ __typename: 'Group',
+ id: 'gid://gitlab/Group/1',
+ workItems: {
+ nodes: [response.data.workItem],
+ },
},
},
};
@@ -914,6 +930,7 @@ export const createWorkItemMutationResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
reference: 'test-project-path#1',
@@ -996,6 +1013,7 @@ export const workItemHierarchyEmptyResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
confidential: false,
@@ -1046,6 +1064,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
setWorkItemMetadata: false,
adminParentLink: false,
createNote: false,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
project: {
@@ -1077,6 +1096,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/2',
widgets: [
{
type: 'HIERARCHY',
@@ -1110,6 +1130,7 @@ export const workItemTask = {
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/4',
widgets: [],
__typename: 'WorkItem',
};
@@ -1128,6 +1149,7 @@ export const confidentialWorkItemTask = {
confidential: true,
createdAt: '2022-08-03T12:41:54Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/2',
widgets: [],
__typename: 'WorkItem',
};
@@ -1146,6 +1168,7 @@ export const closedWorkItemTask = {
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
closedAt: '2022-08-12T13:07:52Z',
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/3',
widgets: [],
__typename: 'WorkItem',
};
@@ -1168,6 +1191,7 @@ export const childrenWorkItems = [
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/5',
widgets: [],
__typename: 'WorkItem',
},
@@ -1196,6 +1220,7 @@ export const workItemHierarchyResponse = {
setWorkItemMetadata: true,
adminParentLink: true,
createNote: true,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
author: {
@@ -1297,6 +1322,7 @@ export const workItemObjectiveWithChild = {
setWorkItemMetadata: true,
adminParentLink: true,
createNote: true,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
author: {
@@ -1368,6 +1394,7 @@ export const workItemHierarchyTreeResponse = {
setWorkItemMetadata: true,
adminParentLink: true,
createNote: true,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
confidential: false,
@@ -1403,6 +1430,7 @@ export const workItemHierarchyTreeResponse = {
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
closedAt: null,
+ webUrl: '/gitlab-org/gitlab-test/-/work_items/13',
widgets: [
{
type: 'HIERARCHY',
@@ -1449,6 +1477,7 @@ export const changeIndirectWorkItemParentMutationResponse = {
setWorkItemMetadata: true,
adminParentLink: true,
createNote: true,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
description: null,
@@ -1517,6 +1546,7 @@ export const changeWorkItemParentMutationResponse = {
setWorkItemMetadata: true,
adminParentLink: true,
createNote: true,
+ adminWorkItemLink: true,
__typename: 'WorkItemPermissions',
},
description: null,
@@ -1568,6 +1598,7 @@ export const availableWorkItemsResponse = {
nodes: [
{
id: 'gid://gitlab/WorkItem/458',
+ iid: '2',
title: 'Task 1',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
@@ -1576,6 +1607,7 @@ export const availableWorkItemsResponse = {
},
{
id: 'gid://gitlab/WorkItem/459',
+ iid: '3',
title: 'Task 2',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
@@ -1584,6 +1616,7 @@ export const availableWorkItemsResponse = {
},
{
id: 'gid://gitlab/WorkItem/460',
+ iid: '4',
title: 'Task 3',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
@@ -1596,6 +1629,64 @@ export const availableWorkItemsResponse = {
},
};
+export const availableObjectivesResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/716',
+ iid: '122',
+ title: 'Objective 101',
+ state: 'OPEN',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/712',
+ iid: '118',
+ title: 'Objective 103',
+ state: 'OPEN',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/711',
+ iid: '117',
+ title: 'Objective 102',
+ state: 'OPEN',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const searchedObjectiveResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/716',
+ iid: '122',
+ title: 'Objective 101',
+ state: 'OPEN',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
export const searchedWorkItemsResponse = {
data: {
workspace: {
@@ -1605,6 +1696,7 @@ export const searchedWorkItemsResponse = {
nodes: [
{
id: 'gid://gitlab/WorkItem/459',
+ iid: '3',
title: 'Task 2',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
@@ -1931,6 +2023,21 @@ export const mockMilestoneWidgetResponse = {
title: 'v4.0',
};
+export const mockParentWidgetResponse = {
+ id: 'gid://gitlab/WorkItem/716',
+ iid: '122',
+ title: 'Objective 101',
+ confidential: false,
+ webUrl: 'http://127.0.0.1:3000/gitlab-org/gitlab-test/-/work_items/122',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ name: 'Objective',
+ iconName: 'issue-type-objective',
+ __typename: 'WorkItemType',
+ },
+ __typename: 'WorkItem',
+};
+
export const projectMilestonesResponse = {
data: {
workspace: {
@@ -3439,6 +3546,31 @@ export const getTodosMutationResponse = (state) => {
};
};
+export const linkedWorkItemResponse = (options, errors = []) => {
+ const response = workItemResponseFactory(options);
+ return {
+ data: {
+ workItemAddLinkedItems: {
+ workItem: response.data.workItem,
+ errors,
+ __typename: 'WorkItemAddLinkedItemsPayload',
+ },
+ },
+ };
+};
+
+export const removeLinkedWorkItemResponse = (message, errors = []) => {
+ return {
+ data: {
+ workItemRemoveLinkedItems: {
+ errors,
+ message,
+ __typename: 'WorkItemRemoveLinkedItemsPayload',
+ },
+ },
+ };
+};
+
export const groupWorkItemsQueryResponse = {
data: {
group: {
@@ -3498,3 +3630,36 @@ export const groupWorkItemsQueryResponse = {
},
},
};
+
+export const updateWorkItemMutationResponseFactory = (options) => {
+ const response = workItemResponseFactory(options);
+ return {
+ data: {
+ workItemUpdate: {
+ workItem: response.data.workItem,
+ errors: [],
+ },
+ },
+ };
+};
+
+export const updateWorkItemNotificationsMutationResponse = (subscribed) => ({
+ data: {
+ workItemSubscribe: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/1',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetNotifications',
+ type: 'NOTIFICATIONS',
+ subscribed,
+ },
+ ],
+ },
+ errors: [],
+ },
+ },
+});
+
+export const generateWorkItemsListWithId = (count) =>
+ Array.from({ length: count }, (_, i) => ({ id: `gid://gitlab/WorkItem/${i + 1}` }));
diff --git a/spec/frontend/work_items/pages/create_work_item_spec.js b/spec/frontend/work_items/pages/create_work_item_spec.js
index c369a454286..527f5890338 100644
--- a/spec/frontend/work_items/pages/create_work_item_spec.js
+++ b/spec/frontend/work_items/pages/create_work_item_spec.js
@@ -65,6 +65,7 @@ describe('Create work item component', () => {
},
provide: {
fullPath: 'full-path',
+ isGroup: false,
},
});
};
@@ -199,8 +200,6 @@ describe('Create work item component', () => {
wrapper.find('form').trigger('submit');
await waitForPromises();
- expect(findAlert().text()).toBe(
- 'Something went wrong when creating work item. Please try again.',
- );
+ expect(findAlert().text()).toBe('Something went wrong when creating item. Please try again.');
});
});
diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js
index 79ba31e7012..d4efcf78189 100644
--- a/spec/frontend/work_items/router_spec.js
+++ b/spec/frontend/work_items/router_spec.js
@@ -41,6 +41,7 @@ describe('Work items router', () => {
router,
provide: {
fullPath: 'full-path',
+ isGroup: false,
issuesListPath: 'full-path/-/issues',
hasIssueWeightsFeature: false,
hasIterationsFeature: false,
diff --git a/spec/frontend/work_items/utils_spec.js b/spec/frontend/work_items/utils_spec.js
index 8a49140119d..aa24b80cf08 100644
--- a/spec/frontend/work_items/utils_spec.js
+++ b/spec/frontend/work_items/utils_spec.js
@@ -1,4 +1,4 @@
-import { autocompleteDataSources, markdownPreviewPath, workItemPath } from '~/work_items/utils';
+import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils';
describe('autocompleteDataSources', () => {
beforeEach(() => {
@@ -25,14 +25,3 @@ describe('markdownPreviewPath', () => {
);
});
});
-
-describe('workItemPath', () => {
- it('returns corrrect data sources', () => {
- expect(workItemPath('project/group', '2')).toEqual('/project/group/-/work_items/2');
- });
-
- it('returns corrrect data sources with relative url root', () => {
- gon.relative_url_root = '/foobar';
- expect(workItemPath('project/group', '2')).toEqual('/foobar/project/group/-/work_items/2');
- });
-});
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 4f8ae92ff99..d3a7df8cb00 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -386,7 +386,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
issues = Issue.where(project: [visible_project, other_project]).order(id: :asc)
type_factory do |type|
type.graphql_name 'FakeProjectType'
- type.field :test_issues, issue_type.connection_type,
+ type.field :test_issues, field_type,
null: false,
resolver: new_resolver(issues)
end
@@ -398,32 +398,52 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
end
end
- let(:query_string) do
- <<~QRY
- { testProject { testIssues(first: 3) { edges { node { id } } } } }
- QRY
- end
-
before do
allow(Ability).to receive(:allowed?).and_call_original
end
- it 'renders the issues the user has access to' do
- issue_edges = result.dig('data', 'testProject', 'testIssues', 'edges')
- issue_ids = issue_edges.map { |issue_edge| issue_edge['node']&.fetch('id') }
+ context 'for connection field type' do
+ let(:field_type) { issue_type.connection_type }
+
+ let(:query_string) do
+ <<~QRY
+ { testProject { testIssues(first: 3) { edges { node { id } } } } }
+ QRY
+ end
+
+ it 'renders the issues the user has access to' do
+ issue_edges = result.dig('data', 'testProject', 'testIssues', 'edges')
+ issue_ids = issue_edges.map { |issue_edge| issue_edge['node']&.fetch('id') }
+
+ expect(issue_edges.size).to eq(visible_issues.size)
+ expect(issue_ids).to eq(visible_issues.map { |i| i.to_global_id.to_s })
+ end
+
+ it 'does not check access on fields that will not be rendered' do
+ expect(Ability).not_to receive(:allowed?).with(user, :read_issue, other_issues.last)
- expect(issue_edges.size).to eq(visible_issues.size)
- expect(issue_ids).to eq(visible_issues.map { |i| i.to_global_id.to_s })
+ result
+ end
end
- it 'does not check access on fields that will not be rendered' do
- expect(Ability).not_to receive(:allowed?).with(user, :read_issue, other_issues.last)
+ context 'for list field type' do
+ let(:field_type) { [issue_type] }
- result
+ let(:query_string) do
+ <<~QRY
+ { testProject { testIssues { id } } }
+ QRY
+ end
+
+ it 'renders the issues the user has access to' do
+ issue_ids = result.dig('data', 'testProject', 'testIssues').pluck('id')
+
+ expect(issue_ids).to eq(visible_issues.map { |i| i.to_global_id.to_s })
+ end
end
end
- describe 'Authorization on GraphQL::Execution::Execute::SKIP' do
+ describe 'Authorization on GraphQL::Execution::SKIP' do
let(:type) do
type_factory do |type|
type.authorize permission_single
@@ -432,7 +452,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
let(:query_type) do
query_factory do |query|
- query.field :item, [type], null: true, resolver: new_resolver(GraphQL::Execution::Execute::SKIP)
+ query.field :item, [type], null: true, resolver: new_resolver(GraphQL::Execution::SKIP)
end
end
diff --git a/spec/graphql/mutations/alert_management/update_alert_status_spec.rb b/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
index fb11ec7065b..8e1b2d90117 100644
--- a/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
+++ b/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Mutations::AlertManagement::UpdateAlertStatus do
allow(alert).to receive(:save).and_return(false)
allow(alert).to receive(:errors).and_return(
- double(full_messages: %w(foo bar), :[] => nil)
+ double(full_messages: %w[foo bar], :[] => nil)
)
expect(resolve).to eq(
alert: alert,
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 50351321be8..02bb7ee2170 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Mutations::Ci::Runner::Update, feature_category: :runner_fleet do
active: false,
locked: true,
run_untagged: false,
- tag_list: %w(tag1 tag2)
+ tag_list: %w[tag1 tag2]
}
end
diff --git a/spec/graphql/mutations/commits/create_spec.rb b/spec/graphql/mutations/commits/create_spec.rb
index 2c452410cca..8cfd27485e6 100644
--- a/spec/graphql/mutations/commits/create_spec.rb
+++ b/spec/graphql/mutations/commits/create_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Mutations::Commits::Create do
context 'when service successfully creates a new commit' do
it "returns the ETag path for the commit's pipeline" do
commit_pipeline_path = subject[:commit_pipeline_path]
- expect(commit_pipeline_path).to match(%r(pipelines/sha/\w+))
+ expect(commit_pipeline_path).to match(%r{pipelines/sha/\w+})
end
it 'returns the content of the commit' do
diff --git a/spec/graphql/mutations/container_repositories/destroy_spec.rb b/spec/graphql/mutations/container_repositories/destroy_spec.rb
index 85e0ac96e55..b49751985ec 100644
--- a/spec/graphql/mutations/container_repositories/destroy_spec.rb
+++ b/spec/graphql/mutations/container_repositories/destroy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::ContainerRepositories::Destroy do
+RSpec.describe Mutations::ContainerRepositories::Destroy, feature_category: :container_registry do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:container_repository) { create(:container_repository) }
@@ -23,7 +23,6 @@ RSpec.describe Mutations::ContainerRepositories::Destroy do
it 'marks the repository as delete_scheduled' do
expect(::Packages::CreateEventService)
.to receive(:new).with(nil, user, event_name: :delete_repository, scope: :container).and_call_original
- expect(DeleteContainerRepositoryWorker).not_to receive(:perform_async)
subject
expect(container_repository.reload.delete_scheduled?).to be true
@@ -32,9 +31,6 @@ RSpec.describe Mutations::ContainerRepositories::Destroy do
shared_examples 'denying access to container respository' do
it 'raises an error' do
- expect(DeleteContainerRepositoryWorker)
- .not_to receive(:perform_async).with(user.id, container_repository.id)
-
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
diff --git a/spec/graphql/mutations/design_management/delete_spec.rb b/spec/graphql/mutations/design_management/delete_spec.rb
index 9a2efb61e55..1b78529fbc7 100644
--- a/spec/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/graphql/mutations/design_management/delete_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe Mutations::DesignManagement::Delete do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
filenames.each(&:present?) # ignore setup
- # Queries: as of 2022-08-30
+ # Queries: as of 2022-09-08
# -------------
# 01. routing query
# 02. policy query: find namespace by type and id
@@ -101,30 +101,31 @@ RSpec.describe Mutations::DesignManagement::Delete do
# 08. find project by id
# 09. find namespace by id
# 10. find group namespace by id
- # 11. project.authorizations for user (same query as 5)
- # 12. find user by id
- # 13. project.project_features (same query as 3)
- # 14. project.authorizations for user (same query as 5)
- # 15. current designs by filename and issue
- # 16, 17 project.authorizations for user (same query as 5)
- # 18. find design_management_repository for project
- # 19. find route by id and source_type
+ # 11. policy query: find namespace by id (same query as 3)
+ # 12. project.authorizations for user (same query as 5)
+ # 13. find user by id
+ # 14. project.project_features (same query as 3)
+ # 15. project.authorizations for user (same query as 5)
+ # 16. current designs by filename and issue
+ # 17, 18 project.authorizations for user (same query as 5)
+ # 19. find design_management_repository for project
+ # 20. find route by id and source_type
# ------------- our queries are below:
- # 20. start transaction
- # 21. create version with sha and issue
- # 22. create design-version links
- # 23. validate version.actions.present?
- # 24. validate version.sha is unique
- # 25. validate version.issue.present?
- # 26. leave transaction
- # 27. find project by id (same query as 8)
- # 28. find namespace by id (same query as 9)
- # 29. find project by id (same query as 8)
+ # 21. start transaction
+ # 22. create version with sha and issue
+ # 23. create design-version links
+ # 24. validate version.actions.present?
+ # 25. validate version.sha is unique
+ # 26. validate version.issue.present?
+ # 27. leave transaction
+ # 28. find project by id (same query as 8)
+ # 29. find namespace by id (same query as 9)
# 30. find project by id (same query as 8)
- # 31. create event
- # 32. find plan for standard context
+ # 31. find project by id (same query as 8)
+ # 32. create event
+ # 33. find plan for standard context
#
- expect { run_mutation }.not_to exceed_query_limit(32)
+ expect { run_mutation }.not_to exceed_query_limit(33)
end
end
diff --git a/spec/graphql/mutations/issues/set_assignees_spec.rb b/spec/graphql/mutations/issues/set_assignees_spec.rb
index 4cc49e76bc6..9dc152872a6 100644
--- a/spec/graphql/mutations/issues/set_assignees_spec.rb
+++ b/spec/graphql/mutations/issues/set_assignees_spec.rb
@@ -12,10 +12,12 @@ RSpec.describe Mutations::Issues::SetAssignees do
describe '#resolve' do
subject do
- mutation.resolve(project_path: issue.project.full_path,
- iid: issue.iid,
- operation_mode: Types::MutationOperationModeEnum.default_mode,
- assignee_usernames: [assignee.username])
+ mutation.resolve(
+ project_path: issue.project.full_path,
+ iid: issue.iid,
+ operation_mode: Types::MutationOperationModeEnum.default_mode,
+ assignee_usernames: [assignee.username]
+ )
end
it_behaves_like 'permission level for issue mutation is correctly verified'
diff --git a/spec/graphql/mutations/merge_requests/accept_spec.rb b/spec/graphql/mutations/merge_requests/accept_spec.rb
index c99b1d988c5..b0d2a9e9c22 100644
--- a/spec/graphql/mutations/merge_requests/accept_spec.rb
+++ b/spec/graphql/mutations/merge_requests/accept_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Mutations::MergeRequests::Accept do
project.add_maintainer(user)
end
- def common_args(merge_request)
+ let(:common_args) do
{
project_path: project.full_path,
iid: merge_request.iid.to_s,
@@ -37,136 +37,69 @@ RSpec.describe Mutations::MergeRequests::Accept do
}
end
- it 'merges the merge request' do
- merge_request = create(:merge_request, source_project: project)
+ let(:args) { common_args.merge(additional_args) }
+ let(:additional_args) { {} }
+ let(:result) { mutation.resolve(**args) }
+ let!(:merge_request) { create(:merge_request, source_project: project) }
- result = mutation.resolve(**common_args(merge_request))
-
- expect(result).to include(errors: be_empty, merge_request: be_merged)
- end
-
- it 'rejects the mutation if the SHA is a mismatch' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request).merge(sha: 'not a good sha')
-
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
- expect(result).to include(errors: [described_class::SHA_MISMATCH])
- end
-
- it 'respects the merge commit message' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request).merge(commit_message: 'my super custom message')
-
- result = mutation.resolve(**args)
-
- expect(result).to include(merge_request: be_merged)
- expect(project.repository.commit(merge_request.target_branch)).to have_attributes(
- message: args[:commit_message]
- )
+ it 'merges the merge request asynchronously' do
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:merge_async).with(user.id, args.except(:project_path, :iid))
+ end
+ expect(result).to include(errors: be_empty)
end
- it 'respects the squash flag' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request).merge(squash: true)
+ context 'when the squash flag is specified' do
+ let(:additional_args) { { squash: true } }
- result = mutation.resolve(**args)
-
- expect(result).to include(merge_request: be_merged)
- expect(result[:merge_request].squash_commit_sha).to be_present
+ it 'sets squash on the merge request' do
+ expect { result }.to change { merge_request.reload.squash }.from(false).to(true)
+ end
end
- it 'respects the squash_commit_message argument' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request).merge(squash: true, squash_commit_message: 'squish')
-
- result = mutation.resolve(**args)
- sha = result[:merge_request].squash_commit_sha
+ context 'when the sha is a mismatch' do
+ let(:additional_args) { { sha: 'not a good sha' } }
- expect(result).to include(merge_request: be_merged)
- expect(project.repository.commit(sha)).to have_attributes(message: "squish\n")
+ it 'rejects the mutation' do
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:merge_async)
+ end
+ expect(result).to include(errors: [described_class::SHA_MISMATCH])
+ end
end
- it 'respects the should_remove_source_branch argument when true' do
- b = project.repository.add_branch(user, generate(:branch), 'master')
- merge_request = create(:merge_request, source_branch: b.name, source_project: project)
- args = common_args(merge_request).merge(should_remove_source_branch: true)
-
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async)
-
- result = mutation.resolve(**args)
-
- expect(result).to include(merge_request: be_merged)
- end
-
- it 'respects the should_remove_source_branch argument when false' do
- b = project.repository.add_branch(user, generate(:branch), 'master')
- merge_request = create(:merge_request, source_branch: b.name, source_project: project)
- args = common_args(merge_request).merge(should_remove_source_branch: false)
-
- expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
-
- result = mutation.resolve(**args)
+ context 'when MR is unmergeable' do
+ let(:merge_request) { create(:merge_request, :closed, source_project: project) }
- expect(result).to include(merge_request: be_merged)
- end
-
- it 'rejects unmergeable MRs' do
- merge_request = create(:merge_request, :closed, source_project: project)
- args = common_args(merge_request)
-
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
- expect(result).to include(errors: [described_class::NOT_MERGEABLE])
+ it 'rejects the MRs' do
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:merge_async)
+ end
+ expect(result).to include(errors: [described_class::NOT_MERGEABLE])
+ end
end
it 'rejects merges when we cannot validate the hooks' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request)
expect_next(::MergeRequests::MergeService)
.to receive(:hooks_validation_pass?).with(merge_request).and_return(false)
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:merge_async)
+ end
expect(result).to include(errors: [described_class::HOOKS_VALIDATION_ERROR])
end
- it 'rejects merges when the merge service returns an error' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request)
- expect_next(::MergeRequests::MergeService)
- .to receive(:execute).with(merge_request).and_return(:failed)
-
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
- expect(result).to include(errors: [described_class::MERGE_FAILED])
- end
-
- it 'rejects merges when the merge service raises merge error' do
- merge_request = create(:merge_request, source_project: project)
- args = common_args(merge_request)
- expect_next(::MergeRequests::MergeService)
- .to receive(:execute).and_raise(::MergeRequests::MergeBaseService::MergeError, 'boom')
-
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
- expect(result).to include(errors: ['boom'])
- end
-
- it "can use the MERGE_WHEN_PIPELINE_SUCCEEDS strategy" do
- enum = ::Types::MergeStrategyEnum.values['MERGE_WHEN_PIPELINE_SUCCEEDS']
- merge_request = create(:merge_request, :with_head_pipeline, source_project: project)
- args = common_args(merge_request).merge(auto_merge_strategy: enum.value)
-
- result = mutation.resolve(**args)
-
- expect(result).not_to include(merge_request: be_merged)
- expect(result).to include(errors: be_empty, merge_request: be_auto_merge_enabled)
+ context 'when MR has head pipeline' do
+ let(:merge_request) { create(:merge_request, :with_head_pipeline, source_project: project) }
+ let(:strategy) { ::Types::MergeStrategyEnum.values['MERGE_WHEN_PIPELINE_SUCCEEDS'].value }
+ let(:additional_args) { { auto_merge_strategy: strategy } }
+
+ it "can use the MERGE_WHEN_PIPELINE_SUCCEEDS strategy" do
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).not_to receive(:merge_async)
+ end
+ expect(result).to include(errors: be_empty, merge_request: be_auto_merge_enabled)
+ end
end
end
end
diff --git a/spec/graphql/mutations/merge_requests/create_spec.rb b/spec/graphql/mutations/merge_requests/create_spec.rb
index 6e593a5f4be..b8ea913b727 100644
--- a/spec/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/graphql/mutations/merge_requests/create_spec.rb
@@ -112,11 +112,13 @@ RSpec.describe Mutations::MergeRequests::Create do
context 'when project is public with private merge requests' do
let_it_be(:project) do
- create(:project,
- :public,
- :repository,
- group: group,
- merge_requests_access_level: ProjectFeature::DISABLED)
+ create(
+ :project,
+ :public,
+ :repository,
+ group: group,
+ merge_requests_access_level: ProjectFeature::DISABLED
+ )
end
context 'and user is a guest' do
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
index 9b0460bc709..42315af75d5 100644
--- a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -12,10 +12,12 @@ RSpec.describe Mutations::MergeRequests::SetAssignees do
describe '#resolve' do
subject do
- mutation.resolve(project_path: merge_request.project.full_path,
- iid: merge_request.iid,
- operation_mode: described_class.arguments['operationMode'].default_value,
- assignee_usernames: [assignee.username])
+ mutation.resolve(
+ project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ operation_mode: described_class.arguments['operationMode'].default_value,
+ assignee_usernames: [assignee.username]
+ )
end
it_behaves_like 'permission level for merge request mutation is correctly verified'
diff --git a/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb b/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb
index df4aa885bbf..59d3fcbfeda 100644
--- a/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb
@@ -16,10 +16,12 @@ RSpec.describe Mutations::MergeRequests::SetReviewers do
let(:mode) { described_class.arguments['operationMode'].default_value }
subject do
- mutation.resolve(project_path: merge_request.project.full_path,
- iid: merge_request.iid,
- operation_mode: mode,
- reviewer_usernames: reviewer_usernames)
+ mutation.resolve(
+ project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ operation_mode: mode,
+ reviewer_usernames: reviewer_usernames
+ )
end
it 'does not change reviewers if the merge_request is not accessible to the reviewers' do
diff --git a/spec/graphql/mutations/release_asset_links/update_spec.rb b/spec/graphql/mutations/release_asset_links/update_spec.rb
index abb091fc68d..2242387cb1a 100644
--- a/spec/graphql/mutations/release_asset_links/update_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/update_spec.rb
@@ -16,12 +16,14 @@ RSpec.describe Mutations::ReleaseAssetLinks::Update, feature_category: :release_
let_it_be(:link_type) { 'package' }
let_it_be(:release_link) do
- create(:release_link,
- release: release,
- name: name,
- url: url,
- filepath: filepath,
- link_type: link_type)
+ create(
+ :release_link,
+ release: release,
+ name: name,
+ url: url,
+ filepath: filepath,
+ link_type: link_type
+ )
end
let(:current_user) { developer }
diff --git a/spec/graphql/mutations/releases/update_spec.rb b/spec/graphql/mutations/releases/update_spec.rb
index 0cf10e03fb1..fc323c5a146 100644
--- a/spec/graphql/mutations/releases/update_spec.rb
+++ b/spec/graphql/mutations/releases/update_spec.rb
@@ -17,9 +17,16 @@ RSpec.describe Mutations::Releases::Update do
let_it_be(:milestones) { [milestone_12_3.title, milestone_12_4.title] }
let_it_be(:release) do
- create(:release, project: project, tag: tag, name: name,
- description: description, released_at: released_at,
- created_at: created_at, milestones: [milestone_12_3, milestone_12_4])
+ create(
+ :release,
+ project: project,
+ tag: tag,
+ name: name,
+ description: description,
+ released_at: released_at,
+ created_at: created_at,
+ milestones: [milestone_12_3, milestone_12_4]
+ )
end
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
diff --git a/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb b/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
index 6d8e15ac791..93456ec7b93 100644
--- a/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
+++ b/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
@@ -71,5 +71,5 @@ RSpec.describe Mutations::Users::SetNamespaceCommitEmail, feature_category: :use
end
end
- specify { expect(described_class).to require_graphql_authorizations(:read_namespace) }
+ specify { expect(described_class).to require_graphql_authorizations(:read_namespace_via_membership) }
end
diff --git a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
index 6b5e351a610..6b39f59444f 100644
--- a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
+++ b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
let(:current_user) { admin_user }
it 'returns the records, latest first' do
- expect(subject).to eq([project_measurement_new, project_measurement_old])
+ expect(subject.items).to eq([project_measurement_new, project_measurement_old])
end
end
@@ -54,7 +54,7 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
arguments[:recorded_before] = 1.day.ago
end
- it { is_expected.to match_array([project_measurement_new]) }
+ it { expect(subject.items).to match_array([project_measurement_new]) }
context 'when "incorrect" values are passed' do
before do
@@ -62,7 +62,7 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
arguments[:recorded_before] = 4.days.ago
end
- it { is_expected.to be_empty }
+ it { expect(subject.items).to be_empty }
end
end
end
@@ -71,7 +71,7 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
let_it_be(:pipelines_succeeded_measurement) { create(:usage_trends_measurement, :pipelines_succeeded_count, recorded_at: 2.days.ago) }
let_it_be(:pipelines_skipped_measurement) { create(:usage_trends_measurement, :pipelines_skipped_count, recorded_at: 2.days.ago) }
- subject { resolve_measurements({ identifier: identifier }, { current_user: current_user }) }
+ subject { resolve_measurements({ identifier: identifier }, { current_user: current_user }).items }
context 'filter for pipelines_succeeded' do
let(:identifier) { 'pipelines_succeeded' }
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index 27c62da31c3..16b6212a833 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -118,18 +118,16 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'does not apply the block to the resolver' do
- expect(resolver.field_options).to include(
- arguments: be_empty
- )
+ expect(resolver.arguments).to be_empty
+
result = resolve(resolver)
expect(result).to eq([1])
end
it 'applies the block to the single version of the resolver' do
- expect(resolver.single.field_options).to include(
- arguments: match('foo' => an_instance_of(::Types::BaseArgument))
- )
+ expect(resolver.single.arguments).to match('foo' => an_instance_of(::Types::BaseArgument))
+
result = resolve(resolver.single, args: { foo: 7 })
expect(result).to eq(49)
@@ -155,9 +153,8 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'applies both blocks to the single version of the resolver' do
- expect(resolver.single.field_options).to include(
- arguments: match('foo' => ::Types::BaseArgument, 'bar' => ::Types::BaseArgument)
- )
+ expect(resolver.single.arguments).to match('foo' => ::Types::BaseArgument, 'bar' => ::Types::BaseArgument)
+
result = resolve(resolver.single, args: { foo: 7, bar: 5 })
expect(result).to eq(35)
@@ -178,12 +175,9 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'applies both blocks to the single version of the resolver' do
- expect(resolver.single.field_options).to include(
- arguments: match('foo' => ::Types::BaseArgument)
- )
- expect(subclass.single.field_options).to include(
- arguments: match('foo' => ::Types::BaseArgument, 'inc' => ::Types::BaseArgument)
- )
+ expect(resolver.single.arguments).to match('foo' => ::Types::BaseArgument)
+ expect(subclass.single.arguments).to match('foo' => ::Types::BaseArgument, 'inc' => ::Types::BaseArgument)
+
result = resolve(subclass.single, args: { foo: 7, inc: 1 })
expect(result).to eq(64)
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index 1de59c5f507..5344e70d082 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Resolvers::BoardListsResolver do
lists = resolve_board_lists
expect(lists.count).to eq 3
- expect(lists.map(&:list_type)).to eq %w(backlog label closed)
+ expect(lists.map(&:list_type)).to eq %w[backlog label closed]
end
context 'when another user has list preferences' do
@@ -100,8 +100,12 @@ RSpec.describe Resolvers::BoardListsResolver do
end
def resolve_board_lists(args: {}, current_user: user)
- resolve(described_class, obj: board, args: args, ctx: { current_user: current_user },
- arg_style: :internal
+ resolve(
+ described_class,
+ obj: board,
+ args: args,
+ ctx: { current_user: current_user },
+ arg_style: :internal
)
end
end
diff --git a/spec/graphql/resolvers/ci/config_resolver_spec.rb b/spec/graphql/resolvers/ci/config_resolver_spec.rb
index 16a2286cb7e..3f21c1f505f 100644
--- a/spec/graphql/resolvers/ci/config_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/config_resolver_spec.rb
@@ -15,9 +15,11 @@ RSpec.describe Resolvers::Ci::ConfigResolver, feature_category: :continuous_inte
end
subject(:response) do
- resolve(described_class,
- args: { project_path: project.full_path, content: content, sha: sha },
- ctx: { current_user: user })
+ resolve(
+ described_class,
+ args: { project_path: project.full_path, content: content, sha: sha },
+ ctx: { current_user: user }
+ )
end
shared_examples 'a valid config file' do
@@ -36,7 +38,8 @@ RSpec.describe Resolvers::Ci::ConfigResolver, feature_category: :continuous_inte
expect(response[:merged_yaml]).to eq(content)
expect(response[:includes]).to eq([])
expect(response[:errors]).to be_empty
- expect(::Gitlab::Ci::Lint).to have_received(:new).with(current_user: user, project: project, sha: sha)
+ expect(::Gitlab::Ci::Lint).to have_received(:new)
+ .with(current_user: user, project: project, sha: sha, verify_project_sha: true)
end
end
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index fedae5c86a8..e0fc3b96b93 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -7,8 +7,13 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
describe '#resolve' do
subject(:resolve_scope) do
- resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
- arg_style: :internal)
+ resolve(
+ described_class,
+ obj: obj,
+ ctx: { current_user: user },
+ args: args,
+ arg_style: :internal
+ )
end
include_context 'runners resolver setup'
diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
index b99eb56d6ab..8138ac18938 100644
--- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
@@ -39,8 +39,12 @@ RSpec.describe Resolvers::Ci::JobsResolver, feature_category: :continuous_integr
::Types::Security::ReportTypeEnum.values['SAST'].value,
::Types::Security::ReportTypeEnum.values['DAST'].value
]
- jobs = resolve(described_class, obj: pipeline, args: { security_report_types: report_types },
- arg_style: :internal)
+ jobs = resolve(
+ described_class,
+ obj: pipeline,
+ args: { security_report_types: report_types },
+ arg_style: :internal
+ )
expect(jobs).to contain_exactly(
have_attributes(name: 'DAST job'),
diff --git a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
index 55a98106baf..9d9f0fee04a 100644
--- a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
@@ -7,8 +7,13 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
describe '#resolve' do
subject(:resolve_scope) do
- resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
- arg_style: :internal)
+ resolve(
+ described_class,
+ obj: obj,
+ ctx: { current_user: user },
+ args: args,
+ arg_style: :internal
+ )
end
include_context 'runners resolver setup'
@@ -77,8 +82,10 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
end
it 'calls RunnersFinder with expected arguments' do
- allow(::Ci::RunnersFinder).to receive(:new).with(current_user: user,
- params: expected_params).once.and_return(finder)
+ allow(::Ci::RunnersFinder).to receive(:new).with(
+ current_user: user, params: expected_params
+ ).once.and_return(finder)
+
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
expect(resolve_scope.items.to_a).to contain_exactly(:execute_return_value)
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 35831579799..c164393d605 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -10,8 +10,13 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
let(:args) { {} }
subject(:resolve_scope) do
- resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
- arg_style: :internal)
+ resolve(
+ described_class,
+ obj: obj,
+ ctx: { current_user: user },
+ args: args,
+ arg_style: :internal
+ )
end
include_context 'runners resolver setup'
diff --git a/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb b/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb
index dfd1addff71..afb10b26907 100644
--- a/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb
+++ b/spec/graphql/resolvers/clusters/agent_tokens_resolver_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Resolvers::Clusters::AgentTokensResolver do
include GraphqlHelpers
- it { expect(described_class.type).to eq(Types::Clusters::AgentTokenType) }
+ it { expect(described_class.type).to eq(Types::Clusters::AgentTokenType.connection_type) }
it { expect(described_class.null).to be_truthy }
it { expect(described_class.arguments.keys).to be_empty }
@@ -22,7 +22,7 @@ RSpec.describe Resolvers::Clusters::AgentTokensResolver do
subject { resolve(described_class, obj: agent, ctx: ctx) }
it 'returns active tokens associated with the agent, ordered by last_used_at' do
- expect(subject).to eq([matching_token2, matching_token1])
+ expect(subject.items).to eq([matching_token2, matching_token1])
end
context 'user does not have permission' do
diff --git a/spec/graphql/resolvers/clusters/agents_resolver_spec.rb b/spec/graphql/resolvers/clusters/agents_resolver_spec.rb
index 152d7fa22c4..f451a337848 100644
--- a/spec/graphql/resolvers/clusters/agents_resolver_spec.rb
+++ b/spec/graphql/resolvers/clusters/agents_resolver_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Resolvers::Clusters::AgentsResolver do
end
specify do
- expect(described_class.field_options).to include(extras: include(:lookahead))
+ expect(described_class.extras).to include(:lookahead)
end
describe '#resolve' do
@@ -52,17 +52,9 @@ end
RSpec.describe Resolvers::Clusters::AgentsResolver.single do
it { expect(described_class).to be < Resolvers::Clusters::AgentsResolver }
- describe '.field_options' do
- subject { described_class.field_options }
-
- specify do
- expect(subject).to include(
- type: ::Types::Clusters::AgentType,
- null: true,
- extras: [:lookahead]
- )
- end
- end
+ it { expect(described_class.type).to eq(::Types::Clusters::AgentType) }
+ it { expect(described_class.null).to eq(true) }
+ it { expect(described_class.extras).to include(:lookahead) }
describe 'arguments' do
subject { described_class.arguments[argument] }
diff --git a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
index 892ab53a53e..d3cda7d9c8f 100644
--- a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
+++ b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
@@ -206,8 +206,8 @@ RSpec.describe ::CachingArrayResolver do
def resolve_users(admin:, resolver: caching_resolver)
args = { is_admin: admin }
- opts = resolver.field_options
- allow(resolver).to receive(:field_options).and_return(opts.merge(max_page_size: max_page_size))
+ allow(resolver).to receive(:has_max_page_size?).and_return(true)
+ allow(resolver).to receive(:max_page_size).and_return(max_page_size)
resolve(resolver, args: args, ctx: query_context, schema: schema, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
index 4c244da5c62..532eff87307 100644
--- a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
+++ b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
@@ -57,10 +57,12 @@ RSpec.describe LooksAhead do
end
def query(doc = document)
- GraphQL::Query.new(schema,
- document: doc,
- context: { user_db: [the_user] },
- variables: { username: the_user.username })
+ GraphQL::Query.new(
+ schema,
+ document: doc,
+ context: { user_db: [the_user] },
+ variables: { username: the_user.username }
+ )
end
let(:document) do
diff --git a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
index d15c8f2ee42..79f3708da22 100644
--- a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
@@ -18,10 +18,7 @@ RSpec.describe ResolvesGroups do
let_it_be(:query_type) do
query_factory do |query|
- query.field :groups,
- Types::GroupType.connection_type,
- null: true,
- resolver: resolver
+ query.field :groups, Types::GroupType.connection_type, null: true, resolver: resolver
end
end
diff --git a/spec/graphql/resolvers/container_repositories_resolver_spec.rb b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
index df0a98b1536..d2d1d622cf4 100644
--- a/spec/graphql/resolvers/container_repositories_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
@@ -16,8 +16,13 @@ RSpec.describe Resolvers::ContainerRepositoriesResolver do
let(:object) { project }
subject do
- resolve(described_class, ctx: { current_user: user }, args: args, obj: object,
- arg_style: :internal)
+ resolve(
+ described_class,
+ ctx: { current_user: user },
+ args: args,
+ obj: object,
+ arg_style: :internal
+ )
end
shared_examples 'returning container repositories' do
diff --git a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
index 3ed3fe76267..0408357e8f2 100644
--- a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
@@ -13,8 +13,13 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver do
describe '#resolve' do
let(:resolver) do
- resolve(described_class, ctx: { current_user: user }, obj: repository, args: args,
- arg_style: :internal)
+ resolve(
+ described_class,
+ ctx: { current_user: user },
+ obj: repository,
+ args: args,
+ arg_style: :internal
+ )
end
before do
@@ -25,7 +30,7 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver do
subject { resolver.map(&:name) }
before do
- stub_container_registry_tags(repository: repository.path, tags: %w(aaa bab bbb ccc 123), with_manifest: false)
+ stub_container_registry_tags(repository: repository.path, tags: %w[aaa bab bbb ccc 123], with_manifest: false)
end
context 'without sort' do
@@ -37,19 +42,19 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver do
context "name_asc" do
let(:args) { { sort: :name_asc } }
- it { is_expected.to eq(%w(123 aaa bab bbb ccc)) }
+ it { is_expected.to eq(%w[123 aaa bab bbb ccc]) }
end
context "name_desc" do
let(:args) { { sort: :name_desc } }
- it { is_expected.to eq(%w(ccc bbb bab aaa 123)) }
+ it { is_expected.to eq(%w[ccc bbb bab aaa 123]) }
end
context 'filter by name' do
let(:args) { { sort: :name_desc, name: 'b' } }
- it { is_expected.to eq(%w(bbb bab)) }
+ it { is_expected.to eq(%w[bbb bab]) }
end
end
end
diff --git a/spec/graphql/resolvers/environments_resolver_spec.rb b/spec/graphql/resolvers/environments_resolver_spec.rb
index b1f7dc1673e..419d0a12462 100644
--- a/spec/graphql/resolvers/environments_resolver_spec.rb
+++ b/spec/graphql/resolvers/environments_resolver_spec.rb
@@ -23,12 +23,14 @@ RSpec.describe Resolvers::EnvironmentsResolver do
describe '#resolve' do
it 'finds all environments' do
- expect(resolve_environments).to contain_exactly(environment1,
- environment2,
- environment3,
- environment4,
- environment5,
- environment6)
+ expect(resolve_environments).to contain_exactly(
+ environment1,
+ environment2,
+ environment3,
+ environment4,
+ environment5,
+ environment6
+ )
end
context 'with name' do
@@ -51,11 +53,13 @@ RSpec.describe Resolvers::EnvironmentsResolver do
context 'with states' do
it 'searches environments by state' do
- expect(resolve_environments(states: ['available'])).to contain_exactly(environment1,
- environment3,
- environment4,
- environment5,
- environment6)
+ expect(resolve_environments(states: ['available'])).to contain_exactly(
+ environment1,
+ environment3,
+ environment4,
+ environment5,
+ environment6
+ )
end
it 'generates an error if requested state is invalid' do
diff --git a/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb b/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb
index 65b6c551dde..784d1142bf2 100644
--- a/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb
+++ b/spec/graphql/resolvers/error_tracking/sentry_errors_resolver_spec.rb
@@ -99,6 +99,12 @@ RSpec.describe Resolvers::ErrorTracking::SentryErrorsResolver do
private
def resolve_errors(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: error_collection, args: args, ctx: context)
+ field = ::Types::BaseField.from_options(
+ 'dummy_field',
+ owner: resolver_parent,
+ resolver: described_class,
+ connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension
+ )
+ resolve_field(field, error_collection, args: args, ctx: context, object_type: resolver_parent)
end
end
diff --git a/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
index e4cf62b0361..dc3721d2e83 100644
--- a/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
+++ b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
include GraphqlHelpers
- it { expect(described_class.type).to eq(Types::Kas::AgentConfigurationType) }
+ it { expect(described_class.type).to eq(Types::Kas::AgentConfigurationType.connection_type) }
it { expect(described_class.null).to be_truthy }
- it { expect(described_class.field_options).to include(calls_gitaly: true) }
+ it { expect(described_class.calls_gitaly?).to eq(true) }
describe '#resolve' do
let_it_be(:project) { create(:project) }
@@ -26,7 +26,7 @@ RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
end
it 'returns agents configured for the project' do
- expect(subject).to contain_exactly(agent1, agent2)
+ expect(subject.items).to contain_exactly(agent1, agent2)
end
context 'an error is returned from the KAS client' do
@@ -44,7 +44,7 @@ RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
context 'user does not have permission' do
let(:user) { create(:user) }
- it { is_expected.to be_empty }
+ it { expect(subject.items).to be_empty }
end
end
end
diff --git a/spec/graphql/resolvers/package_pipelines_resolver_spec.rb b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
index a52dee59bc6..9d5f610c7db 100644
--- a/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Resolvers::PackagePipelinesResolver do
let(:user) { package.project.first_owner }
+ it { expect(described_class.extras).to include(:lookahead) }
+
describe '#resolve' do
let(:returned_pipelines) { graphql_dig_at(subject, 'data', 'package', 'pipelines', 'nodes') }
let(:returned_errors) { graphql_dig_at(subject, 'errors', 'message') }
@@ -156,19 +158,4 @@ RSpec.describe Resolvers::PackagePipelinesResolver do
expect(returned_pipelines).to match_array(entities)
end
end
-
- describe '.field options' do
- let(:field) do
- field_options = described_class.field_options.merge(
- owner: resolver_parent,
- name: 'dummy_field'
- )
- ::Types::BaseField.new(**field_options)
- end
-
- it 'sets them properly' do
- expect(field).not_to be_connection
- expect(field.extras).to match_array([:lookahead])
- end
- end
end
diff --git a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
index 931d4ba132c..7e0e55e8d2a 100644
--- a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
+++ b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
@@ -120,12 +120,13 @@ RSpec.describe Resolvers::PaginatedTreeResolver, feature_category: :source_code_
end
def resolve_repository(args, opts = {})
- field_options = described_class.field_options.merge(
+ field_options = {
owner: resolver_parent,
- name: 'field_value'
- ).merge(opts)
+ resolver: described_class,
+ connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension
+ }.merge(opts)
- field = ::Types::BaseField.new(**field_options)
+ field = ::Types::BaseField.from_options('field_value', **field_options)
resolve_field(field, repository, args: args, object_type: resolver_parent)
end
end
diff --git a/spec/graphql/resolvers/project_issues_resolver_spec.rb b/spec/graphql/resolvers/project_issues_resolver_spec.rb
index faafbc465e3..593489290e0 100644
--- a/spec/graphql/resolvers/project_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_issues_resolver_spec.rb
@@ -351,9 +351,14 @@ RSpec.describe Resolvers::ProjectIssuesResolver do
context 'when filtering by negated author' do
let_it_be(:issue_by_reporter) { create(:issue, author: reporter, project: project, state: :opened) }
+ let_it_be(:other_user) { build_stubbed(:user) }
it 'returns issues without the specified author_username' do
- expect(resolve_issues(not: { author_username: issue1.author.username })).to contain_exactly(issue_by_reporter)
+ expect(resolve_issues(not: { author_username: [issue1.author.username] })).to contain_exactly(issue_by_reporter)
+ end
+
+ it 'returns issues without the specified author_usernames' do
+ expect(resolve_issues(not: { author_username: [issue1.author.username, other_user.username] })).to contain_exactly(issue_by_reporter)
end
end
end
diff --git a/spec/graphql/resolvers/project_merge_requests_resolver_spec.rb b/spec/graphql/resolvers/project_merge_requests_resolver_spec.rb
index 45777aa96e1..67bb6411d08 100644
--- a/spec/graphql/resolvers/project_merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_merge_requests_resolver_spec.rb
@@ -11,13 +11,15 @@ RSpec.describe Resolvers::ProjectMergeRequestsResolver do
let_it_be(:reviewer) { create(:user) }
let_it_be(:merge_request) do
- create(:merge_request,
- :unique_branches,
- source_project: project,
- target_project: project,
- author: other_user,
- assignee: other_user,
- reviewers: [reviewer])
+ create(
+ :merge_request,
+ :unique_branches,
+ source_project: project,
+ target_project: project,
+ author: other_user,
+ assignee: other_user,
+ reviewers: [reviewer]
+ )
end
before do
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
index af6b16804b0..c7ece114373 100644
--- a/spec/graphql/resolvers/project_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -44,10 +44,14 @@ RSpec.describe 'Resolvers::ProjectMilestonesResolver' do
end
it 'calls MilestonesFinder with correct parameters' do
- expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, group_ids: contain_exactly(group, parent_group),
- state: 'all', sort: :due_date_asc))
- .and_call_original
+ expect(MilestonesFinder).to receive(:new).with(
+ args(
+ project_ids: project.id,
+ group_ids: contain_exactly(group, parent_group),
+ state: 'all',
+ sort: :due_date_asc
+ )
+ ).and_call_original
resolve_project_milestones(include_ancestors: true)
end
@@ -99,10 +103,15 @@ RSpec.describe 'Resolvers::ProjectMilestonesResolver' do
start_date = now_date
end_date = now_date + 5.days
- expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'all',
- sort: :due_date_asc, start_date: start_date, end_date: end_date))
- .and_call_original
+ expect(MilestonesFinder).to receive(:new).with(
+ args(
+ project_ids: project.id,
+ state: 'all',
+ sort: :due_date_asc,
+ start_date: start_date,
+ end_date: end_date
+ )
+ ).and_call_original
resolve_project_milestones(timeframe: { start: start_date, end: end_date })
end
diff --git a/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb b/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
index ef1b18f0a11..07415077d1f 100644
--- a/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
@@ -38,12 +38,8 @@ RSpec.describe Resolvers::Projects::ForkTargetsResolver do
end
def resolve_targets(args, opts = {})
- field_options = described_class.field_options.merge(
- owner: resolver_parent,
- name: 'field_value'
- ).merge(opts)
-
- field = ::Types::BaseField.new(**field_options)
+ field_options = { owner: resolver_parent, resolver: described_class }.merge(opts)
+ field = ::Types::BaseField.from_options('field_value', **field_options)
resolve_field(field, project, args: args, ctx: { current_user: user }, object_type: resolver_parent)
end
end
diff --git a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
index 6af2f56cef4..b2d5e0b7405 100644
--- a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
@@ -60,9 +60,9 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver, feature_category: :int
project_ids = jira_projects.map(&:id)
expect(jira_projects.size).to eq 2
- expect(project_keys).to eq(%w(EX ABC))
- expect(project_names).to eq(%w(Example Alphabetical))
- expect(project_ids).to eq(%w(10000 10001))
+ expect(project_keys).to eq(%w[EX ABC])
+ expect(project_names).to eq(%w[Example Alphabetical])
+ expect(project_ids).to eq(%w[10000 10001])
expect(resolver.max_page_size).to eq(2)
end
@@ -75,9 +75,9 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver, feature_category: :int
project_ids = jira_projects.map(&:id)
expect(jira_projects.size).to eq 1
- expect(project_keys).to eq(%w(ABC))
- expect(project_names).to eq(%w(Alphabetical))
- expect(project_ids).to eq(%w(10001))
+ expect(project_keys).to eq(%w[ABC])
+ expect(project_names).to eq(%w[Alphabetical])
+ expect(project_ids).to eq(%w[10001])
expect(resolver.max_page_size).to eq(1)
end
end
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index 77507474170..058d46a5e86 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -6,11 +6,11 @@ RSpec.describe Resolvers::ProjectsResolver do
include GraphqlHelpers
describe '#resolve' do
- subject { resolve(described_class, obj: nil, args: filters, ctx: { current_user: current_user }) }
+ subject { resolve(described_class, obj: nil, args: filters, ctx: { current_user: current_user }).items }
let_it_be(:group) { create(:group, name: 'public-group') }
let_it_be(:private_group) { create(:group, name: 'private-group') }
- let_it_be(:project) { create(:project, :public, topic_list: %w(ruby javascript)) }
+ let_it_be(:project) { create(:project, :public, topic_list: %w[ruby javascript]) }
let_it_be(:other_project) { create(:project, :public) }
let_it_be(:group_project) { create(:project, :public, group: group) }
let_it_be(:private_project) { create(:project, :private) }
@@ -68,7 +68,7 @@ RSpec.describe Resolvers::ProjectsResolver do
end
context 'when topics filter is provided' do
- let(:filters) { { topics: %w(ruby) } }
+ let(:filters) { { topics: %w[ruby] } }
it 'returns matching project' do
is_expected.to contain_exactly(project)
@@ -148,7 +148,7 @@ RSpec.describe Resolvers::ProjectsResolver do
end
context 'when topics filter is provided' do
- let(:filters) { { topics: %w(ruby) } }
+ let(:filters) { { topics: %w[ruby] } }
it 'returns matching project' do
is_expected.to contain_exactly(project)
diff --git a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
index b3368d532b2..810dfc9c324 100644
--- a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
@@ -27,6 +27,14 @@ RSpec.describe Resolvers::UserNotesCountResolver do
it 'returns the number of non-system notes for the issue' do
expect(subject).to eq(2)
end
+
+ context 'when not logged in' do
+ let(:user) { nil }
+
+ it 'returns the number of non-system notes for the issue' do
+ expect(subject).to eq(2)
+ end
+ end
end
context 'when a user has permission to view notes' do
@@ -65,6 +73,14 @@ RSpec.describe Resolvers::UserNotesCountResolver do
it 'returns the number of non-system notes for the merge request' do
expect(subject).to eq(2)
end
+
+ context 'when not logged in' do
+ let(:user) { nil }
+
+ it 'returns the number of non-system notes for the merge request' do
+ expect(subject).to eq(2)
+ end
+ end
end
context 'when a user has permission to view notes' do
diff --git a/spec/graphql/resolvers/users/participants_resolver_spec.rb b/spec/graphql/resolvers/users/participants_resolver_spec.rb
index 22111626c5b..ae23eabaeb0 100644
--- a/spec/graphql/resolvers/users/participants_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/participants_resolver_spec.rb
@@ -138,7 +138,8 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
# 1 extra query per source (3 emojis + 2 notes) to fetch participables collection
# 2 extra queries to load work item widgets collection
# 1 extra query to load the project creator to check if they are banned
- expect { query.call }.not_to exceed_query_limit(control_count).with_threshold(8)
+ # 1 extra query to load the invited groups to see if the user is banned from any of them
+ expect { query.call }.not_to exceed_query_limit(control_count).with_threshold(9)
end
it 'does not execute N+1 for system note metadata relation' do
diff --git a/spec/graphql/resolvers/work_items_resolver_spec.rb b/spec/graphql/resolvers/work_items_resolver_spec.rb
index c856f990e7a..d030f543958 100644
--- a/spec/graphql/resolvers/work_items_resolver_spec.rb
+++ b/spec/graphql/resolvers/work_items_resolver_spec.rb
@@ -13,20 +13,37 @@ RSpec.describe Resolvers::WorkItemsResolver do
let_it_be(:other_project) { create(:project, group: group) }
let_it_be(:item1) do
- create(:work_item, project: project, state: :opened, created_at:
- 3.hours.ago, updated_at: 3.hours.ago)
+ create(
+ :work_item,
+ project: project,
+ state: :opened,
+ created_at: 3.hours.ago,
+ updated_at: 3.hours.ago
+ )
end
let_it_be(:item2) do
- create(:work_item, project: project, state: :closed, title: 'foo',
- created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at:
- 1.hour.ago)
+ create(
+ :work_item,
+ project: project,
+ state: :closed,
+ title: 'foo',
+ created_at: 1.hour.ago,
+ updated_at: 1.hour.ago,
+ closed_at: 1.hour.ago
+ )
end
let_it_be(:item3) do
- create(:work_item, project: other_project, state: :closed, title: 'foo',
- created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at:
- 1.hour.ago)
+ create(
+ :work_item,
+ project: other_project,
+ state: :closed,
+ title: 'foo',
+ created_at: 1.hour.ago,
+ updated_at: 1.hour.ago,
+ closed_at: 1.hour.ago
+ )
end
let_it_be(:item4) { create(:work_item) }
diff --git a/spec/graphql/subscriptions/issuable_updated_spec.rb b/spec/graphql/subscriptions/issuable_updated_spec.rb
index bc640e9e3c4..906b7e1f11a 100644
--- a/spec/graphql/subscriptions/issuable_updated_spec.rb
+++ b/spec/graphql/subscriptions/issuable_updated_spec.rb
@@ -52,8 +52,8 @@ RSpec.describe Subscriptions::IssuableUpdated do
let(:current_user) { unauthorized_user }
it 'unsubscribes the user' do
- # GraphQL::Execution::Execute::Skip is returned when unsubscribed
- expect(subject).to be_an(GraphQL::Execution::Execute::Skip)
+ # GraphQL::Execution::Skip is returned when unsubscribed
+ expect(subject).to be_an(GraphQL::Execution::Skip)
end
end
end
diff --git a/spec/graphql/types/achievements/user_achievement_type_spec.rb b/spec/graphql/types/achievements/user_achievement_type_spec.rb
index b7fe4d815f7..bb332c26894 100644
--- a/spec/graphql/types/achievements/user_achievement_type_spec.rb
+++ b/spec/graphql/types/achievements/user_achievement_type_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe GitlabSchema.types['UserAchievement'], feature_category: :user_pr
created_at
updated_at
revoked_at
+ priority
]
end
diff --git a/spec/graphql/types/base_edge_spec.rb b/spec/graphql/types/base_edge_spec.rb
index 0cc0c838fac..8aad77a6400 100644
--- a/spec/graphql/types/base_edge_spec.rb
+++ b/spec/graphql/types/base_edge_spec.rb
@@ -7,8 +7,7 @@ RSpec.describe Types::BaseEdge, feature_category: :api do
let_it_be(:test_schema) do
project_edge_type = Class.new(described_class) do
- field :proof_of_admin_rights, String,
- null: true, authorize: :admin_project
+ field :proof_of_admin_rights, String, null: true, authorize: :admin_project
def proof_of_admin_rights
'ok'
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 831d36950db..b52d5514368 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -31,23 +31,25 @@ RSpec.describe Types::BaseField, feature_category: :api do
end
it 'only tests the resolver authorization if it authorizes_object?' do
- resolver = Class.new
+ resolver = Class.new(Resolvers::BaseResolver)
- field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
- resolver_class: resolver)
+ field = described_class.new(
+ name: 'test', type: GraphQL::Types::String, null: true, resolver_class: resolver
+ )
expect(field).to be_authorized(object, nil, ctx)
end
it 'tests the resolver authorization, if provided' do
- resolver = Class.new do
+ resolver = Class.new(Resolvers::BaseResolver) do
include Gitlab::Graphql::Authorize::AuthorizeResource
authorizes_object!
end
- field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
- resolver_class: resolver)
+ field = described_class.new(
+ name: 'test', type: GraphQL::Types::String, null: true, resolver_class: resolver
+ )
expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
@@ -55,15 +57,19 @@ RSpec.describe Types::BaseField, feature_category: :api do
end
it 'tests field authorization before resolver authorization, when field auth fails' do
- resolver = Class.new do
+ resolver = Class.new(Resolvers::BaseResolver) do
include Gitlab::Graphql::Authorize::AuthorizeResource
authorizes_object!
end
- field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
- authorize: :foo,
- resolver_class: resolver)
+ field = described_class.new(
+ name: 'test',
+ type: GraphQL::Types::String,
+ null: true,
+ authorize: :foo,
+ resolver_class: resolver
+ )
expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
expect(resolver).not_to receive(:authorized?)
@@ -72,15 +78,19 @@ RSpec.describe Types::BaseField, feature_category: :api do
end
it 'tests field authorization before resolver authorization, when field auth succeeds' do
- resolver = Class.new do
+ resolver = Class.new(Resolvers::BaseResolver) do
include Gitlab::Graphql::Authorize::AuthorizeResource
authorizes_object!
end
- field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
- authorize: :foo,
- resolver_class: resolver)
+ field = described_class.new(
+ name: 'test',
+ type: GraphQL::Types::String,
+ null: true,
+ authorize: :foo,
+ resolver_class: resolver
+ )
expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
@@ -91,7 +101,7 @@ RSpec.describe Types::BaseField, feature_category: :api do
context 'when considering complexity' do
let(:resolver) do
- Class.new(described_class) do
+ Class.new(Resolvers::BaseResolver) do
def self.resolver_complexity(args, child_complexity:)
2 if args[:foo]
end
diff --git a/spec/graphql/types/boards/board_issue_input_type_spec.rb b/spec/graphql/types/boards/board_issue_input_type_spec.rb
index ed2872c3598..dccd9f1c16c 100644
--- a/spec/graphql/types/boards/board_issue_input_type_spec.rb
+++ b/spec/graphql/types/boards/board_issue_input_type_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe GitlabSchema.types['BoardIssueInput'] do
it { expect(described_class.graphql_name).to eq('BoardIssueInput') }
it 'has specific fields' do
- allowed_args = %w(labelName milestoneTitle assigneeUsername authorUsername
- releaseTag myReactionEmoji not search assigneeWildcardId confidential)
+ allowed_args = %w[labelName milestoneTitle assigneeUsername authorUsername
+ releaseTag myReactionEmoji not search assigneeWildcardId confidential]
expect(described_class.arguments.keys).to include(*allowed_args)
expect(described_class.arguments['not'].type).to eq(Types::Boards::NegatedBoardIssueInputType)
diff --git a/spec/graphql/types/ci/ci_cd_setting_type_spec.rb b/spec/graphql/types/ci/ci_cd_setting_type_spec.rb
new file mode 100644
index 00000000000..5fdfb405e23
--- /dev/null
+++ b/spec/graphql/types/ci/ci_cd_setting_type_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::CiCdSettingType, feature_category: :continuous_integration do
+ specify { expect(described_class.graphql_name).to eq('ProjectCiCdSetting') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %w[
+ inbound_job_token_scope_enabled job_token_scope_enabled
+ keep_latest_artifact merge_pipelines_enabled project
+ ]
+
+ if Gitlab.ee?
+ expected_fields += %w[
+ merge_trains_skip_train_allowed merge_trains_enabled
+ ]
+ end
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 81ab1b52552..c069098e5b0 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -10,9 +10,12 @@ RSpec.describe Types::Ci::DetailedStatusType do
specify { expect(described_class.graphql_name).to eq('DetailedStatus') }
it 'has all fields' do
- expect(described_class).to have_graphql_fields(:id, :group, :icon, :favicon,
- :details_path, :has_details,
- :label, :text, :tooltip, :action)
+ expect(described_class).to have_graphql_fields(
+ :id, :group, :icon, :favicon,
+ :details_path, :has_details,
+ :label, :name, :text, :tooltip,
+ :action
+ )
end
describe 'id field' do
@@ -29,12 +32,12 @@ RSpec.describe Types::Ci::DetailedStatusType do
status = stage.detailed_status(stage.pipeline.user)
expected_status = {
- button_title: status.action_button_title,
- icon: status.action_icon,
- method: status.action_method,
- path: status.action_path,
- title: status.action_title
- }
+ button_title: status.action_button_title,
+ icon: status.action_icon,
+ method: status.action_method,
+ path: status.action_path,
+ title: status.action_title
+ }
expect(resolve_field('action', status, arg_style: :internal)).to eq(expected_status)
end
diff --git a/spec/graphql/types/ci/job_base_field_spec.rb b/spec/graphql/types/ci/job_base_field_spec.rb
index 2d283ce854d..ec7d2a7d33a 100644
--- a/spec/graphql/types/ci/job_base_field_spec.rb
+++ b/spec/graphql/types/ci/job_base_field_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Types::Ci::JobBaseField, feature_category: :runner_fleet do
end
context 'with field resolver' do
- let(:resolver) { Class.new }
+ let(:resolver) { Class.new(Resolvers::BaseResolver) }
let(:args) { { resolver_class: resolver } }
it 'only tests the resolver authorization if it authorizes_object?' do
@@ -86,7 +86,7 @@ RSpec.describe Types::Ci::JobBaseField, feature_category: :runner_fleet do
context 'when resolver authorizes object' do
let(:resolver) do
- Class.new do
+ Class.new(Resolvers::BaseResolver) do
include Gitlab::Graphql::Authorize::AuthorizeResource
authorizes_object!
diff --git a/spec/graphql/types/ci/job_trace_type_spec.rb b/spec/graphql/types/ci/job_trace_type_spec.rb
index 69123445b8b..6d1214eafe6 100644
--- a/spec/graphql/types/ci/job_trace_type_spec.rb
+++ b/spec/graphql/types/ci/job_trace_type_spec.rb
@@ -123,22 +123,6 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
end
end
- context 'when FF graphql_job_trace_html_summary_max_size is disabled' do
- before do
- stub_feature_flags(graphql_job_trace_html_summary_max_size: false)
- end
-
- let(:args) { { last_lines: 20 } }
-
- it 'does not limit the read size from the raw trace' do
- expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
- expect(trace).to receive(:html).with(last_lines: 20, max_size: nil).and_call_original
- end
-
- is_expected.to eq "<span>#{(1..20).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
- end
- end
-
context 'when trace is cut in middle of a line' do
let(:args) { {} }
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index 67209874b54..26dfc0b10c6 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -9,18 +9,20 @@ RSpec.describe Types::Ci::PipelineType do
it 'contains attributes related to a pipeline' do
expected_fields = %w[
- id iid sha before_sha complete status detailed_status config_source
+ id iid sha before_sha complete status detailed_status config_source name
duration queued_duration
coverage created_at updated_at started_at finished_at committed_at
stages user retryable cancelable jobs source_job job job_artifacts downstream
upstream path project active user_permissions warnings commit commit_path uses_needs
test_report_summary test_suite ref ref_path warning_messages merge_request_event_type
+ name total_jobs triggered_by_path child source stuck
+ latest merge_request ref_text failure_reason
]
if Gitlab.ee?
expected_fields += %w[
security_report_summary security_report_findings security_report_finding
- code_quality_reports dast_profile code_quality_report_summary
+ code_quality_reports dast_profile code_quality_report_summary compute_minutes
]
end
diff --git a/spec/graphql/types/current_user_todos_type_spec.rb b/spec/graphql/types/current_user_todos_type_spec.rb
index 4ce97e1c006..febbe868f33 100644
--- a/spec/graphql/types/current_user_todos_type_spec.rb
+++ b/spec/graphql/types/current_user_todos_type_spec.rb
@@ -173,9 +173,7 @@ RSpec.describe GitlabSchema.types['CurrentUserTodos'] do
end
it 'returns correct data' do
- result = execute_query(query_type,
- graphql: query_without_state_arguments,
- raise_on_error: true).to_h
+ result = execute_query(query_type, graphql: query_without_state_arguments, raise_on_error: true).to_h
expect(result.dig('data', 'a', 'todos', 'nodes')).to contain_exactly(
a_graphql_entity_for(todo_a),
@@ -198,9 +196,7 @@ RSpec.describe GitlabSchema.types['CurrentUserTodos'] do
end
it 'returns correct data, when state arguments are supplied' do
- result = execute_query(query_type,
- raise_on_error: true,
- graphql: with_state_arguments).to_h
+ result = execute_query(query_type, raise_on_error: true, graphql: with_state_arguments).to_h
expect(result.dig('data', 'a', 'todos', 'nodes')).to contain_exactly(
a_graphql_entity_for(todo_a)
diff --git a/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
index f536d91aeda..15960407a6f 100644
--- a/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
+++ b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
@@ -6,6 +6,6 @@ RSpec.describe GitlabSchema.types['DesignCollectionCopyState'] do
it { expect(described_class.graphql_name).to eq('DesignCollectionCopyState') }
it 'exposes the correct event states' do
- expect(described_class.values.keys).to match_array(%w(READY IN_PROGRESS ERROR))
+ expect(described_class.values.keys).to match_array(%w[READY IN_PROGRESS ERROR])
end
end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index d4d0eff9adb..f6d461d6a5f 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
let_it_be(:now) { Time.now.change(usec: 0) }
let_it_be(:issues) { create_list(:issue, 10, project: project, created_at: now) }
- let(:count_path) { %w(data project issues count) }
+ let(:count_path) { %w[data project issues count] }
let(:page_size) { 3 }
let(:query) do
<<~GRAPHQL
@@ -81,8 +81,8 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
context 'count' do
- let(:end_cursor) { %w(data project issues pageInfo endCursor) }
- let(:issues_edges) { %w(data project issues edges) }
+ let(:end_cursor) { %w[data project issues pageInfo endCursor] }
+ let(:issues_edges) { %w[data project issues edges] }
it 'returns total count' do
expect(subject.dig(*count_path)).to eq(issues.count)
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 9742908edf9..be25c5177f6 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_revie
squash_on_merge available_auto_merge_strategies
has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message
auto_merge_strategy merge_user award_emoji prepared_at codequality_reports_comparer supports_lock_on_merge
+ mergeability_checks
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
diff --git a/spec/graphql/types/merge_requests/mergeability_check_identifier_enum_spec.rb b/spec/graphql/types/merge_requests/mergeability_check_identifier_enum_spec.rb
new file mode 100644
index 00000000000..093cd670418
--- /dev/null
+++ b/spec/graphql/types/merge_requests/mergeability_check_identifier_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::MergeRequests::MergeabilityCheckIdentifierEnum, feature_category: :code_review_workflow do
+ specify { expect(described_class.graphql_name).to eq('MergeabilityCheckIdentifier') }
+
+ it 'exposes all the existing mergeability check identifiers' do
+ expect(described_class.values.keys).to contain_exactly(
+ *MergeRequest.all_mergeability_checks.map { |check_class| check_class.identifier.to_s.upcase }
+ )
+ end
+end
diff --git a/spec/graphql/types/merge_requests/mergeability_check_status_enum_spec.rb b/spec/graphql/types/merge_requests/mergeability_check_status_enum_spec.rb
new file mode 100644
index 00000000000..0a15d83f560
--- /dev/null
+++ b/spec/graphql/types/merge_requests/mergeability_check_status_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::MergeRequests::MergeabilityCheckStatusEnum, feature_category: :code_review_workflow do
+ specify { expect(described_class.graphql_name).to eq('MergeabilityCheckStatus') }
+
+ it 'exposes all the existing mergeability check statuses' do
+ expect(described_class.values.keys).to contain_exactly(
+ *%w[SUCCESS FAILED INACTIVE]
+ )
+ end
+end
diff --git a/spec/graphql/types/merge_requests/mergeability_check_type_spec.rb b/spec/graphql/types/merge_requests/mergeability_check_type_spec.rb
new file mode 100644
index 00000000000..fb6ac0ad271
--- /dev/null
+++ b/spec/graphql/types/merge_requests/mergeability_check_type_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::MergeRequests::MergeabilityCheckType, feature_category: :code_review_workflow do
+ let(:fields) { %i[identifier status] }
+
+ specify { expect(described_class.graphql_name).to eq('MergeRequestMergeabilityCheck') }
+ specify { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/namespace_type_spec.rb b/spec/graphql/types/namespace_type_spec.rb
index d80235023ef..9e1a2bfd466 100644
--- a/spec/graphql/types/namespace_type_spec.rb
+++ b/spec/graphql/types/namespace_type_spec.rb
@@ -15,5 +15,5 @@ RSpec.describe GitlabSchema.types['Namespace'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
- specify { expect(described_class).to require_graphql_authorizations(:read_namespace) }
+ specify { expect(described_class).to require_graphql_authorizations(:read_namespace_via_membership) }
end
diff --git a/spec/graphql/types/packages/package_base_type_spec.rb b/spec/graphql/types/packages/package_base_type_spec.rb
index 7156f22c513..ebe29da0539 100644
--- a/spec/graphql/types/packages/package_base_type_spec.rb
+++ b/spec/graphql/types/packages/package_base_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['PackageBase'] do
+RSpec.describe GitlabSchema.types['PackageBase'], feature_category: :package_registry do
specify { expect(described_class.description).to eq('Represents a package in the Package Registry') }
specify { expect(described_class).to require_graphql_authorizations(:read_package) }
@@ -13,7 +13,7 @@ RSpec.describe GitlabSchema.types['PackageBase'] do
created_at updated_at
project
tags metadata
- status can_destroy
+ status status_message can_destroy
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/protection/rule_access_level_enum_spec.rb b/spec/graphql/types/packages/protection/rule_access_level_enum_spec.rb
new file mode 100644
index 00000000000..421b5fb0f39
--- /dev/null
+++ b/spec/graphql/types/packages/protection/rule_access_level_enum_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackagesProtectionRuleAccessLevel'], feature_category: :package_registry do
+ it 'exposes all options' do
+ expect(described_class.values.keys).to match_array(%w[DEVELOPER MAINTAINER OWNER])
+ end
+end
diff --git a/spec/graphql/types/packages/protection/rule_package_type_enum_spec.rb b/spec/graphql/types/packages/protection/rule_package_type_enum_spec.rb
new file mode 100644
index 00000000000..b0d9772f285
--- /dev/null
+++ b/spec/graphql/types/packages/protection/rule_package_type_enum_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackagesProtectionRulePackageType'], feature_category: :package_registry do
+ it 'exposes all options' do
+ expect(described_class.values.keys).to contain_exactly('NPM')
+ end
+end
diff --git a/spec/graphql/types/packages/protection/rule_type_spec.rb b/spec/graphql/types/packages/protection/rule_type_spec.rb
new file mode 100644
index 00000000000..a4a458d3568
--- /dev/null
+++ b/spec/graphql/types/packages/protection/rule_type_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackagesProtectionRule'], feature_category: :package_registry do
+ specify { expect(described_class.graphql_name).to eq('PackagesProtectionRule') }
+
+ specify { expect(described_class.description).to be_present }
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_package) }
+
+ describe 'package_name_pattern' do
+ subject { described_class.fields['packageNamePattern'] }
+
+ it { is_expected.to have_non_null_graphql_type(GraphQL::Types::String) }
+ end
+
+ describe 'package_type' do
+ subject { described_class.fields['packageType'] }
+
+ it { is_expected.to have_non_null_graphql_type(Types::Packages::Protection::RulePackageTypeEnum) }
+ end
+
+ describe 'push_protected_up_to_access_level' do
+ subject { described_class.fields['pushProtectedUpToAccessLevel'] }
+
+ it { is_expected.to have_non_null_graphql_type(Types::Packages::Protection::RuleAccessLevelEnum) }
+ end
+end
diff --git a/spec/graphql/types/project_statistics_type_spec.rb b/spec/graphql/types/project_statistics_type_spec.rb
index 558ff41f6f4..f639d042efe 100644
--- a/spec/graphql/types/project_statistics_type_spec.rb
+++ b/spec/graphql/types/project_statistics_type_spec.rb
@@ -4,9 +4,11 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['ProjectStatistics'] do
it 'has the expected fields' do
- expect(described_class).to include_graphql_fields(:storage_size, :repository_size, :lfs_objects_size,
- :build_artifacts_size, :packages_size, :commit_count,
- :wiki_size, :snippets_size, :pipeline_artifacts_size,
- :uploads_size, :container_registry_size)
+ expect(described_class).to include_graphql_fields(
+ :storage_size, :repository_size, :lfs_objects_size,
+ :build_artifacts_size, :packages_size, :commit_count,
+ :wiki_size, :snippets_size, :pipeline_artifacts_size,
+ :uploads_size, :container_registry_size
+ )
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index a20a4767bb5..e295014a2a6 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -39,6 +39,7 @@ RSpec.describe GitlabSchema.types['Project'] do
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
timelog_categories fork_targets branch_rules ci_config_variables pipeline_schedules languages
incident_management_timeline_event_tags visible_forks inherited_ci_variables autocomplete_users
+ ci_cd_settings
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -328,30 +329,31 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_resolver(Resolvers::ProjectMergeRequestsResolver) }
it do
- is_expected.to have_graphql_arguments(:iids,
- :source_branches,
- :target_branches,
- :state,
- :draft,
- :approved,
- :labels,
- :before,
- :after,
- :first,
- :last,
- :merged_after,
- :merged_before,
- :created_after,
- :created_before,
- :updated_after,
- :updated_before,
- :author_username,
- :assignee_username,
- :reviewer_username,
- :milestone_title,
- :not,
- :sort
- )
+ is_expected.to have_graphql_arguments(
+ :iids,
+ :source_branches,
+ :target_branches,
+ :state,
+ :draft,
+ :approved,
+ :labels,
+ :before,
+ :after,
+ :first,
+ :last,
+ :merged_after,
+ :merged_before,
+ :created_after,
+ :created_before,
+ :updated_after,
+ :updated_before,
+ :author_username,
+ :assignee_username,
+ :reviewer_username,
+ :milestone_title,
+ :not,
+ :sort
+ )
end
end
@@ -755,23 +757,29 @@ RSpec.describe GitlabSchema.types['Project'] do
describe 'timeline_event_tags' do
let_it_be(:user) { create(:user) }
let_it_be(:project) do
- create(:project,
- :private,
- :repository,
- creator_id: user.id,
- namespace: user.namespace)
+ create(
+ :project,
+ :private,
+ :repository,
+ creator_id: user.id,
+ namespace: user.namespace
+ )
end
let_it_be(:tag1) do
- create(:incident_management_timeline_event_tag,
- project: project,
- name: 'Tag 1')
+ create(
+ :incident_management_timeline_event_tag,
+ project: project,
+ name: 'Tag 1'
+ )
end
let_it_be(:tag2) do
- create(:incident_management_timeline_event_tag,
- project: project,
- name: 'Tag 2')
+ create(
+ :incident_management_timeline_event_tag,
+ project: project,
+ name: 'Tag 2'
+ )
end
let(:query) do
@@ -809,11 +817,13 @@ RSpec.describe GitlabSchema.types['Project'] do
describe 'languages' do
let_it_be(:user) { create(:user) }
let_it_be(:project) do
- create(:project,
- :private,
- :repository,
- creator_id: user.id,
- namespace: user.namespace)
+ create(
+ :project,
+ :private,
+ :repository,
+ creator_id: user.id,
+ namespace: user.namespace
+ )
end
let(:query) do
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index a46c51e0a27..1fc9bc8b429 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
let_it_be(:user) { create(:user) }
it 'has the correct fields' do
- expected_fields = [:id, :title, :project, :author,
+ expected_fields = [:id, :title, :project, :author, :hidden,
:file_name, :description,
:visibility_level, :created_at, :updated_at,
:web_url, :raw_url, :ssh_url_to_repo, :http_url_to_repo,
diff --git a/spec/graphql/types/todo_type_spec.rb b/spec/graphql/types/todo_type_spec.rb
index 2118a777a45..af1981e804c 100644
--- a/spec/graphql/types/todo_type_spec.rb
+++ b/spec/graphql/types/todo_type_spec.rb
@@ -32,13 +32,15 @@ RSpec.describe GitlabSchema.types['Todo'] do
describe 'project field' do
let(:todo) do
- create(:todo,
- user: current_user,
- project: project,
- state: :done,
- action: Todo::ASSIGNED,
- author: author,
- target: issue)
+ create(
+ :todo,
+ user: current_user,
+ project: project,
+ state: :done,
+ action: Todo::ASSIGNED,
+ author: author,
+ target: issue
+ )
end
let(:query) do
@@ -86,13 +88,15 @@ RSpec.describe GitlabSchema.types['Todo'] do
describe 'group field' do
let(:todo) do
- create(:todo,
- user: current_user,
- group: group,
- state: :done,
- action: Todo::MENTIONED,
- author: author,
- target: issue)
+ create(
+ :todo,
+ user: current_user,
+ group: group,
+ state: :done,
+ action: Todo::MENTIONED,
+ author: author,
+ target: issue
+ )
end
let(:query) do
diff --git a/spec/graphql/types/work_item_type_spec.rb b/spec/graphql/types/work_item_type_spec.rb
index 328450084c2..e57f7aa77ce 100644
--- a/spec/graphql/types/work_item_type_spec.rb
+++ b/spec/graphql/types/work_item_type_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe GitlabSchema.types['WorkItem'], feature_category: :team_planning
web_url
create_note_email
reference
+ archived
]
expect(described_class).to have_graphql_fields(*fields)
diff --git a/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb b/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb
index 20413a35c58..a46ffc3db50 100644
--- a/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb
+++ b/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Types::WorkItems::Widgets::HierarchyType, feature_category: :team_planning do
it 'exposes the expected fields' do
- expected_fields = %i[parent children has_children type]
+ expected_fields = %i[parent children has_children ancestors type]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/haml_lint/linter/documentation_links_spec.rb b/spec/haml_lint/linter/documentation_links_spec.rb
index d47127d9661..ee34751de72 100644
--- a/spec/haml_lint/linter/documentation_links_spec.rb
+++ b/spec/haml_lint/linter/documentation_links_spec.rb
@@ -11,49 +11,49 @@ RSpec.describe HamlLint::Linter::DocumentationLinks, feature_category: :tooling
shared_examples 'link validation rules' do |link_pattern|
context 'when link_to points to the existing file path' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index.md')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index')" }
it { is_expected.not_to report_lint }
end
context 'when link_to points to the existing file with valid anchor' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', anchor: 'user-account'), target: '_blank'" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index', anchor: 'user-account'), target: '_blank'" }
it { is_expected.not_to report_lint }
end
- context 'when link_to points to the existing file path without .md extension' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index')" }
+ context 'when link_to points to the existing file path with .md extension' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index.md')" }
- it { is_expected.not_to report_lint }
+ it { is_expected.to report_lint }
end
context 'when anchor is not correct' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', anchor: 'wrong')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index', anchor: 'wrong')" }
it { is_expected.to report_lint }
context "when #{link_pattern} has multiple options" do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index.md', key: :value, anchor: 'wrong')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index', key: :value, anchor: 'wrong')" }
it { is_expected.to report_lint }
end
end
context 'when file path is wrong' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('wrong.md'), target: '_blank'" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('wrong'), target: '_blank'" }
it { is_expected.to report_lint }
context 'when haml ends with block definition' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('wrong.md') do" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('wrong') do" }
it { is_expected.to report_lint }
end
end
context 'when link with wrong file path is assigned to a variable' do
- let(:haml) { "- my_link = link_to 'Description', #{link_pattern}('wrong.md')" }
+ let(:haml) { "- my_link = link_to 'Description', #{link_pattern}('wrong')" }
it { is_expected.to report_lint }
end
@@ -65,13 +65,13 @@ RSpec.describe HamlLint::Linter::DocumentationLinks, feature_category: :tooling
end
context 'when anchor belongs to a different element' do
- let(:haml) { "= link_to 'Description', #{link_pattern}('index.md'), target: (anchor: 'blank')" }
+ let(:haml) { "= link_to 'Description', #{link_pattern}('index'), target: (anchor: 'blank')" }
it { is_expected.not_to report_lint }
end
context "when a simple #{link_pattern}" do
- let(:haml) { "- url = #{link_pattern}('wrong.md')" }
+ let(:haml) { "- url = #{link_pattern}('wrong')" }
it { is_expected.to report_lint }
end
@@ -83,13 +83,13 @@ RSpec.describe HamlLint::Linter::DocumentationLinks, feature_category: :tooling
end
context 'when link is a part of the tag' do
- let(:haml) { ".data-form{ data: { url: #{link_pattern}('wrong.md') } }" }
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('wrong') } }" }
it { is_expected.to report_lint }
end
context 'when the second link is invalid' do
- let(:haml) { ".data-form{ data: { url: #{link_pattern}('index.md'), wrong_url: #{link_pattern}('wrong.md') } }" }
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('index'), wrong_url: #{link_pattern}('wrong') } }" }
it { is_expected.to report_lint }
end
diff --git a/spec/helpers/access_tokens_helper_spec.rb b/spec/helpers/access_tokens_helper_spec.rb
index a466b2a0d3b..9b47f23f514 100644
--- a/spec/helpers/access_tokens_helper_spec.rb
+++ b/spec/helpers/access_tokens_helper_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe AccessTokensHelper do
where(:prefix, :description_location) do
:personal_access_token | [:doorkeeper, :scope_desc]
:project_access_token | [:doorkeeper, :project_access_token_scope_desc]
+ :group_access_token | [:doorkeeper, :group_access_token_scope_desc]
end
with_them do
diff --git a/spec/helpers/appearances_helper_spec.rb b/spec/helpers/appearances_helper_spec.rb
index 4a32c586315..376db6412e8 100644
--- a/spec/helpers/appearances_helper_spec.rb
+++ b/spec/helpers/appearances_helper_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe AppearancesHelper do
let!(:appearance) { create(:appearance, :with_logo) }
it 'returns a path' do
- expect(helper.brand_image).to match(%r(img .* data-src="/uploads/-/system/appearance/.*png))
+ expect(helper.brand_image).to match(%r{img .* data-src="/uploads/-/system/appearance/.*png})
end
context 'when there is no associated upload' do
@@ -163,14 +163,14 @@ RSpec.describe AppearancesHelper do
end
it 'falls back to using the original path' do
- expect(helper.brand_image).to match(%r(img .* data-src="/uploads/-/system/appearance/.*png))
+ expect(helper.brand_image).to match(%r{img .* data-src="/uploads/-/system/appearance/.*png})
end
end
end
context 'when there is no logo' do
it 'returns path of GitLab logo' do
- expect(helper.brand_image).to match(%r(img .* data-src="#{gitlab_logo}))
+ expect(helper.brand_image).to match(%r{img .* data-src="#{gitlab_logo}})
end
end
@@ -178,13 +178,13 @@ RSpec.describe AppearancesHelper do
let!(:appearance) { create(:appearance, title: 'My title') }
it 'returns the title' do
- expect(helper.brand_image).to match(%r(img alt="My title"))
+ expect(helper.brand_image).to match(%r{img alt="My title"})
end
end
context 'when there is no title' do
it 'returns the default title' do
- expect(helper.brand_image).to match(%r(img alt="GitLab))
+ expect(helper.brand_image).to match(%r{img alt="GitLab})
end
end
end
@@ -194,7 +194,7 @@ RSpec.describe AppearancesHelper do
let!(:appearance) { create(:appearance, :with_logo) }
it 'returns path of custom logo' do
- expect(helper.brand_image_path).to match(%r(/uploads/-/system/appearance/.*/dk.png))
+ expect(helper.brand_image_path).to match(%r{/uploads/-/system/appearance/.*/dk.png})
end
end
@@ -240,7 +240,7 @@ RSpec.describe AppearancesHelper do
let!(:appearance) { create(:appearance, :with_header_logo) }
it 'renders image tag' do
- expect(helper).to receive(:image_tag).with(appearance.header_logo_path, class: 'brand-header-logo')
+ expect(helper).to receive(:image_tag).with(appearance.header_logo_path, class: 'brand-header-logo', alt: '')
subject
end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 757f832faa4..7cf64c6e049 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -922,14 +922,14 @@ RSpec.describe ApplicationHelper do
context 'when resource is an issue' do
let_it_be(:resource) { build(:issue) }
- let(:expected_title) { 'This issue is hidden because its author has been banned' }
+ let(:expected_title) { 'This issue is hidden because its author has been banned.' }
it_behaves_like 'returns icon with tooltip'
end
context 'when resource is a merge request' do
let_it_be(:resource) { build(:merge_request) }
- let(:expected_title) { 'This merge request is hidden because its author has been banned' }
+ let(:expected_title) { 'This merge request is hidden because its author has been banned.' }
it_behaves_like 'returns icon with tooltip'
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 9d591164547..5dc75a60a6e 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe ApplicationSettingsHelper do
describe '.visible_attributes' do
it 'contains tracking parameters' do
- expect(helper.visible_attributes).to include(*%i(snowplow_collector_hostname snowplow_cookie_domain snowplow_enabled snowplow_app_id))
+ expect(helper.visible_attributes).to include(*%i[snowplow_collector_hostname snowplow_cookie_domain snowplow_enabled snowplow_app_id])
end
it 'contains :deactivate_dormant_users' do
@@ -60,16 +60,16 @@ RSpec.describe ApplicationSettingsHelper do
it 'contains rate limit parameters' do
expect(helper.visible_attributes).to include(
- *%i(
+ *%i[
issues_create_limit notes_create_limit project_export_limit
project_download_export_limit project_export_limit project_import_limit
raw_blob_request_limit group_export_limit group_download_export_limit
group_import_limit users_get_by_id_limit search_rate_limit search_rate_limit_unauthenticated
- ))
+ ])
end
it 'contains GitLab for Slack app parameters' do
- params = %i(slack_app_enabled slack_app_id slack_app_secret slack_app_signing_secret slack_app_verification_token)
+ params = %i[slack_app_enabled slack_app_id slack_app_secret slack_app_signing_secret slack_app_verification_token]
expect(helper.visible_attributes).to include(*params)
end
@@ -306,7 +306,7 @@ RSpec.describe ApplicationSettingsHelper do
describe '#sidekiq_job_limiter_modes_for_select' do
subject { helper.sidekiq_job_limiter_modes_for_select }
- it { is_expected.to eq([%w(Track track), %w(Compress compress)]) }
+ it { is_expected.to eq([%w[Track track], %w[Compress compress]]) }
end
describe '#instance_clusters_enabled?', :request_store do
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index 4b0b44d1325..40798b4c038 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -35,12 +35,12 @@ RSpec.describe AuthHelper do
describe "form_based_providers" do
it 'includes LDAP providers' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
- expect(helper.form_based_providers).to eq %i(ldapmain)
+ expect(helper.form_based_providers).to eq %i[ldapmain]
end
it 'includes crowd provider' do
allow(helper).to receive(:auth_providers) { [:twitter, :crowd] }
- expect(helper.form_based_providers).to eq %i(crowd)
+ expect(helper.form_based_providers).to eq %i[crowd]
end
end
@@ -101,15 +101,15 @@ RSpec.describe AuthHelper do
describe 'popular_enabled_button_based_providers' do
it 'returns the intersection set of popular & enabled providers', :aggregate_failures do
- allow(helper).to receive(:enabled_button_based_providers) { %w(twitter github google_oauth2) }
+ allow(helper).to receive(:enabled_button_based_providers) { %w[twitter github google_oauth2] }
- expect(helper.popular_enabled_button_based_providers).to eq(%w(github google_oauth2))
+ expect(helper.popular_enabled_button_based_providers).to eq(%w[github google_oauth2])
- allow(helper).to receive(:enabled_button_based_providers) { %w(google_oauth2 bitbucket) }
+ allow(helper).to receive(:enabled_button_based_providers) { %w[google_oauth2 bitbucket] }
- expect(helper.popular_enabled_button_based_providers).to eq(%w(google_oauth2))
+ expect(helper.popular_enabled_button_based_providers).to eq(%w[google_oauth2])
- allow(helper).to receive(:enabled_button_based_providers) { %w(bitbucket) }
+ allow(helper).to receive(:enabled_button_based_providers) { %w[bitbucket] }
expect(helper.popular_enabled_button_based_providers).to be_empty
end
@@ -129,7 +129,7 @@ RSpec.describe AuthHelper do
context 'all the button based providers are disabled via application_setting' do
it 'returns false' do
stub_application_setting(
- disabled_oauth_sign_in_sources: %w(github twitter)
+ disabled_oauth_sign_in_sources: %w[github twitter]
)
expect(helper.button_based_providers_enabled?).to be false
@@ -277,86 +277,6 @@ RSpec.describe AuthHelper do
end
end
- describe '#google_tag_manager_enabled?' do
- let(:is_gitlab_com) { true }
- let(:user) { nil }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- subject(:google_tag_manager_enabled) { helper.google_tag_manager_enabled? }
-
- context 'when not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to eq(false) }
- end
-
- context 'regular and nonce versions' do
- using RSpec::Parameterized::TableSyntax
-
- where(:gtm_nonce_enabled, :gtm_key) do
- false | 'google_tag_manager_id'
- true | 'google_tag_manager_nonce_id'
- end
-
- with_them do
- before do
- stub_feature_flags(gtm_nonce: gtm_nonce_enabled)
- stub_config(extra: { gtm_key => 'key' })
- end
-
- context 'on gitlab.com and a key set without a current user' do
- it { is_expected.to be_truthy }
- end
-
- context 'when no key is set' do
- before do
- stub_config(extra: {})
- end
-
- it { is_expected.to eq(false) }
- end
- end
- end
- end
-
- describe '#google_tag_manager_id' do
- subject(:google_tag_manager_id) { helper.google_tag_manager_id }
-
- before do
- stub_config(extra: { 'google_tag_manager_nonce_id': 'nonce', 'google_tag_manager_id': 'gtm' })
- end
-
- context 'when google tag manager is disabled' do
- before do
- allow(helper).to receive(:google_tag_manager_enabled?).and_return(false)
- end
-
- it { is_expected.to be_falsey }
- end
-
- context 'when google tag manager is enabled' do
- before do
- allow(helper).to receive(:google_tag_manager_enabled?).and_return(true)
- end
-
- context 'when nonce feature flag is enabled' do
- it { is_expected.to eq('nonce') }
- end
-
- context 'when nonce feature flag is disabled' do
- before do
- stub_feature_flags(gtm_nonce: false)
- end
-
- it { is_expected.to eq('gtm') }
- end
- end
- end
-
describe '#auth_app_owner_text' do
shared_examples 'generates text with the correct info' do
it 'includes the name of the application owner' do
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 6d97afd4c78..e832fa2718a 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -469,61 +469,6 @@ RSpec.describe BlobHelper do
end
end
- describe '#editing_ci_config?' do
- let(:project) { build(:project) }
-
- subject { helper.editing_ci_config? }
-
- before do
- assign(:project, project)
- assign(:path, path)
- end
-
- context 'when path is nil' do
- let(:path) { nil }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when path is not a ci file' do
- let(:path) { 'some-file.txt' }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when path ends is gitlab-ci.yml' do
- let(:path) { '.gitlab-ci.yml' }
-
- it { is_expected.to be_truthy }
- end
-
- context 'when path ends with gitlab-ci.yml' do
- let(:path) { 'template.gitlab-ci.yml' }
-
- it { is_expected.to be_truthy }
- end
-
- context 'with custom ci paths' do
- let(:path) { 'path/to/ci.yaml' }
-
- before do
- project.ci_config_path = 'path/to/ci.yaml'
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'with custom ci config and path' do
- let(:path) { 'path/to/template.gitlab-ci.yml' }
-
- before do
- project.ci_config_path = 'ci/path/.gitlab-ci.yml@another-group/another-project'
- end
-
- it { is_expected.to be_truthy }
- end
- end
-
describe '#vue_blob_app_data' do
let(:blob) { fake_blob(path: 'file.md', size: 2.megabytes) }
let(:project) { build_stubbed(:project) }
diff --git a/spec/helpers/breadcrumbs_helper_spec.rb b/spec/helpers/breadcrumbs_helper_spec.rb
index 8e2a684656b..43a30f59e50 100644
--- a/spec/helpers/breadcrumbs_helper_spec.rb
+++ b/spec/helpers/breadcrumbs_helper_spec.rb
@@ -54,8 +54,8 @@ RSpec.describe BreadcrumbsHelper do
describe '#schema_breadcrumb_json' do
let(:elements) do
[
- %w(element1 http://test.host/link1),
- %w(element2 http://test.host/link2)
+ %w[element1 http://test.host/link1],
+ %w[element2 http://test.host/link2]
]
end
@@ -89,8 +89,8 @@ RSpec.describe BreadcrumbsHelper do
context 'when extra breadcrumb element is added' do
let(:extra_elements) do
[
- %w(extra_element1 http://test.host/extra_link1),
- %w(extra_element2 http://test.host/extra_link2)
+ %w[extra_element1 http://test.host/extra_link1],
+ %w[extra_element2 http://test.host/extra_link2]
]
end
diff --git a/spec/helpers/ci/builds_helper_spec.rb b/spec/helpers/ci/builds_helper_spec.rb
index eabd40f3dd4..dcb775fb16d 100644
--- a/spec/helpers/ci/builds_helper_spec.rb
+++ b/spec/helpers/ci/builds_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::BuildsHelper do
+RSpec.describe Ci::BuildsHelper, feature_category: :continuous_integration do
describe '#sidebar_build_class' do
using RSpec::Parameterized::TableSyntax
@@ -25,22 +25,6 @@ RSpec.describe Ci::BuildsHelper do
end
end
- describe '#javascript_build_options' do
- subject { helper.javascript_build_options }
-
- it 'returns build options' do
- project = assign_project
- ci_build = assign_build
-
- expect(subject).to eq({
- page_path: project_job_path(project, ci_build),
- build_status: ci_build.status,
- build_stage: ci_build.stage_name,
- log_state: ''
- })
- end
- end
-
describe '#build_failed_issue_options' do
subject { helper.build_failed_issue_options }
diff --git a/spec/helpers/ci/jobs_helper_spec.rb b/spec/helpers/ci/jobs_helper_spec.rb
index 30cad66af04..884fe7a018e 100644
--- a/spec/helpers/ci/jobs_helper_spec.rb
+++ b/spec/helpers/ci/jobs_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::JobsHelper do
+RSpec.describe Ci::JobsHelper, feature_category: :continuous_integration do
describe 'job helper functions' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:job) { create(:ci_build, project: project) }
@@ -20,20 +20,13 @@ RSpec.describe Ci::JobsHelper do
it 'returns jobs data' do
expect(helper.jobs_data(project, job)).to include({
"endpoint" => "/#{project.full_path}/-/jobs/#{job.id}.json",
+ "page_path" => "/#{project.full_path}/-/jobs/#{job.id}",
"project_path" => project.full_path,
"artifact_help_url" => "/help/user/gitlab_com/index.md#gitlab-cicd",
"deployment_help_url" => "/help/user/project/clusters/deploy_to_cluster.md#troubleshooting",
"runner_settings_url" => "/#{project.full_path}/-/runners#js-runners-settings",
- "page_path" => "/#{project.full_path}/-/jobs/#{job.id}",
"build_status" => "pending",
"build_stage" => "test",
- "log_state" => "",
- "build_options" => {
- build_stage: "test",
- build_status: "pending",
- log_state: "",
- page_path: "/#{project.full_path}/-/jobs/#{job.id}"
- },
"retry_outdated_job_docs_url" => "/help/ci/pipelines/settings#retry-outdated-jobs"
})
end
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index 00bc38dbd94..477c07bf3e3 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -88,10 +88,7 @@ RSpec.describe Ci::PipelinesHelper do
:params,
:artifacts_endpoint,
:artifacts_endpoint_placeholder,
- :pipeline_schedule_url,
- :empty_state_svg_path,
- :error_state_svg_path,
- :no_pipelines_svg_path,
+ :pipeline_schedules_path,
:can_create_pipeline,
:new_pipeline_path,
:ci_lint_path,
@@ -110,7 +107,7 @@ RSpec.describe Ci::PipelinesHelper do
before do
allow(helper).to receive(:current_user).and_return(user)
project.add_developer(user)
- create(:project_setting, project: project, target_platforms: %w(ios))
+ create(:project_setting, project: project, target_platforms: %w[ios])
end
describe 'the `registration_token` attribute' do
diff --git a/spec/helpers/ci/status_helper_spec.rb b/spec/helpers/ci/status_helper_spec.rb
index 66c821df8f1..17fe474b360 100644
--- a/spec/helpers/ci/status_helper_spec.rb
+++ b/spec/helpers/ci/status_helper_spec.rb
@@ -55,10 +55,6 @@ RSpec.describe Ci::StatusHelper do
is_expected.to include("href=\"/commit-path\"")
end
- it "does not contain a span element" do
- is_expected.not_to include("<span")
- end
-
it "has 'Pipeline' as the status type in the title" do
is_expected.to include("title=\"Pipeline: passed\"")
end
@@ -88,7 +84,8 @@ RSpec.describe Ci::StatusHelper do
subject { helper.render_status_with_link("success", cssclass: "extra-class") }
it "has appended extra class to icon classes" do
- is_expected.to include("class=\"ci-status-link ci-status-icon-success d-inline-flex extra-class\"")
+ is_expected.to include('class="ci-status-link ci-status-icon-success d-inline-flex ' \
+ 'gl-line-height-1 extra-class"')
end
end
@@ -107,5 +104,44 @@ RSpec.describe Ci::StatusHelper do
is_expected.to include("<svg class=\"s24\"")
end
end
+
+ context "when status is success-with-warnings" do
+ subject { helper.render_status_with_link("success-with-warnings") }
+
+ it "renders warning variant of gl-badge" do
+ is_expected.to include('gl-badge badge badge-pill badge-warning')
+ end
+ end
+
+ context "when status is manual" do
+ subject { helper.render_status_with_link("manual") }
+
+ it "renders neutral variant of gl-badge" do
+ is_expected.to include('gl-badge badge badge-pill badge-neutral')
+ end
+ end
+ end
+
+ describe '#badge_variant' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :expected_badge_variant_class) do
+ 'success' | 'badge-success'
+ 'success-with-warnings' | 'badge-warning'
+ 'pending' | 'badge-warning'
+ 'failed' | 'badge-danger'
+ 'running' | 'badge-info'
+ 'canceled' | 'badge-neutral'
+ 'manual' | 'badge-neutral'
+ 'other-status' | 'badge-muted'
+ end
+
+ with_them do
+ subject { helper.render_status_with_link(status) }
+
+ it 'uses the correct badge variant classes for gl-badge' do
+ is_expected.to include("gl-badge badge badge-pill #{expected_badge_variant_class}")
+ end
+ end
end
end
diff --git a/spec/helpers/ci/triggers_helper_spec.rb b/spec/helpers/ci/triggers_helper_spec.rb
index 5e43dbfdd5c..63755257215 100644
--- a/spec/helpers/ci/triggers_helper_spec.rb
+++ b/spec/helpers/ci/triggers_helper_spec.rb
@@ -21,11 +21,11 @@ RSpec.describe Ci::TriggersHelper do
end
end
- describe '.service_trigger_url' do
- subject { helper.service_trigger_url(service) }
+ describe '.integration_trigger_url' do
+ subject { helper.integration_trigger_url(integration) }
- let(:service) { double(project_id: 1, to_param: 'param') }
+ let(:integration) { double(project_id: 1, to_param: 'param') }
- specify { expect(subject).to eq "#{Settings.gitlab.url}/api/v4/projects/1/services/param/trigger" }
+ specify { expect(subject).to eq "#{Settings.gitlab.url}/api/v4/projects/1/integrations/param/trigger" }
end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index a9fbdfbe3ca..f3d6b5bdda6 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -48,9 +48,9 @@ RSpec.describe ClustersHelper do
end
it 'generates svg image data', :aggregate_failures do
- expect(subject.dig(:img_tags, :aws, :path)).to match(%r(/illustrations/logos/amazon_eks|svg))
- expect(subject.dig(:img_tags, :default, :path)).to match(%r(/illustrations/logos/kubernetes|svg))
- expect(subject.dig(:img_tags, :gcp, :path)).to match(%r(/illustrations/logos/google_gke|svg))
+ expect(subject.dig(:img_tags, :aws, :path)).to match(%r{/illustrations/logos/amazon_eks|svg})
+ expect(subject.dig(:img_tags, :default, :path)).to match(%r{/illustrations/logos/kubernetes|svg})
+ expect(subject.dig(:img_tags, :gcp, :path)).to match(%r{/illustrations/logos/google_gke|svg})
expect(subject.dig(:img_tags, :aws, :text)).to eq('Amazon EKS')
expect(subject.dig(:img_tags, :default, :text)).to eq('Kubernetes Cluster')
@@ -62,8 +62,8 @@ RSpec.describe ClustersHelper do
end
it 'displays empty image path' do
- expect(subject[:clusters_empty_state_image]).to match(%r(/illustrations/empty-state/empty-state-clusters|svg))
- expect(subject[:empty_state_image]).to match(%r(/illustrations/empty-state/empty-state-agents|svg))
+ expect(subject[:clusters_empty_state_image]).to match(%r{/illustrations/empty-state/empty-state-clusters|svg})
+ expect(subject[:empty_state_image]).to match(%r{/illustrations/empty-state/empty-state-agents|svg})
end
it 'displays add cluster using certificate path' do
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 2318bbf861a..a6db9e77c61 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe DiffHelper do
+RSpec.describe DiffHelper, feature_category: :code_review_workflow do
include RepoHelpers
let(:project) { create(:project, :repository) }
@@ -196,26 +196,26 @@ RSpec.describe DiffHelper do
end
describe "#mark_inline_diffs" do
- let(:old_line) { %{abc 'def'} }
- let(:new_line) { %{abc "def"} }
+ let(:old_line) { %(abc 'def') }
+ let(:new_line) { %(abc "def") }
it "returns strings with marked inline diffs" do
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
- expect(marked_old_line).to eq(%q{abc <span class="idiff left deletion">&#39;</span>def<span class="idiff right deletion">&#39;</span>})
+ expect(marked_old_line).to eq(%q(abc <span class="idiff left deletion">&#39;</span>def<span class="idiff right deletion">&#39;</span>))
expect(marked_old_line).to be_html_safe
- expect(marked_new_line).to eq(%q{abc <span class="idiff left addition">&quot;</span>def<span class="idiff right addition">&quot;</span>})
+ expect(marked_new_line).to eq(%q(abc <span class="idiff left addition">&quot;</span>def<span class="idiff right addition">&quot;</span>))
expect(marked_new_line).to be_html_safe
end
context 'when given HTML' do
it 'sanitizes it' do
- old_line = %{test.txt}
+ old_line = %(test.txt)
new_line = %{<img src=x onerror=alert(document.domain)>}
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
- expect(marked_old_line).to eq(%q{<span class="idiff left right deletion">test.txt</span>})
+ expect(marked_old_line).to eq(%q(<span class="idiff left right deletion">test.txt</span>))
expect(marked_old_line).to be_html_safe
expect(marked_new_line).to eq(%q{<span class="idiff left right addition">&lt;img src=x onerror=alert(document.domain)&gt;</span>})
expect(marked_new_line).to be_html_safe
@@ -637,4 +637,27 @@ RSpec.describe DiffHelper do
end
end
end
+
+ describe '#submodule_diff_compare_link' do
+ context 'when the diff includes submodule changes' do
+ it 'generates a link to compare a diff for a submodule' do
+ allow(helper).to receive(:submodule_links).and_return(
+ Gitlab::SubmoduleLinks::Urls.new(nil, nil, '/comparison-path')
+ )
+
+ output = helper.submodule_diff_compare_link(diff_file)
+ expect(output).to match(%r{href="/comparison-path"})
+ expect(output).to match(
+ %r{Compare <span class="commit-sha">5b812ff1</span>...<span class="commit-sha">7e3e39eb</span>}
+ )
+ end
+ end
+
+ context 'when the diff does not include submodule changes' do
+ it 'returns an empty string' do
+ output = helper.submodule_diff_compare_link(diff_file)
+ expect(output).to eq('')
+ end
+ end
+ end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index dbc6bd2eb28..ee623cea664 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -237,7 +237,7 @@ RSpec.describe EmailsHelper do
it 'returns the brand header logo' do
expect(header_logo).to eq(
- %{<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" />}
+ %(<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" />)
)
end
@@ -326,8 +326,8 @@ RSpec.describe EmailsHelper do
create :appearance, header_message: 'Foo', footer_message: 'Bar', email_header_and_footer_enabled: true
aggregate_failures do
- expect(html_header_message).to eq(%{<div class="header-message" style=""><p>Foo</p></div>})
- expect(html_footer_message).to eq(%{<div class="footer-message" style=""><p>Bar</p></div>})
+ expect(html_header_message).to eq(%(<div class="header-message" style=""><p>Foo</p></div>))
+ expect(html_footer_message).to eq(%(<div class="footer-message" style=""><p>Bar</p></div>))
expect(text_header_message).to eq('Foo')
expect(text_footer_message).to eq('Bar')
end
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index 83b08e5fcec..0db48dfc28e 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -67,11 +67,10 @@ RSpec.describe FormHelper do
it 'renders an appropriately styled alert div' do
model = double(errors: errors_stub('Error 1'))
+ alert_classes = "gl-alert gl-mb-5 gl-alert-danger gl-alert-not-dismissible gl-alert-has-title"
expect(helper.form_errors(model))
- .to include(
- '<div class="gl-alert gl-mb-5 gl-alert-danger gl-alert-not-dismissible" id="error_explanation" role="alert">'
- )
+ .to include("<div class=\"#{alert_classes}\" id=\"error_explanation\" role=\"alert\">")
end
it 'contains a summary message' do
diff --git a/spec/helpers/groups/observability_helper_spec.rb b/spec/helpers/groups/observability_helper_spec.rb
deleted file mode 100644
index f0e6aa0998a..00000000000
--- a/spec/helpers/groups/observability_helper_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe Groups::ObservabilityHelper do
- let(:group) { build_stubbed(:group) }
-
- describe '#observability_iframe_src' do
- before do
- allow(Gitlab::Observability).to receive(:build_full_url).and_return('full-url')
- end
-
- it 'returns the iframe src for action: dashboards' do
- allow(helper).to receive(:params).and_return({ action: 'dashboards', observability_path: '/foo?bar=foobar' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, '/foo?bar=foobar', '/')
- end
-
- it 'returns the iframe src for action: manage' do
- allow(helper).to receive(:params).and_return({ action: 'manage', observability_path: '/foo?bar=foobar' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, '/foo?bar=foobar', '/dashboards')
- end
-
- it 'returns the iframe src for action: explore' do
- allow(helper).to receive(:params).and_return({ action: 'explore', observability_path: '/foo?bar=foobar' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, '/foo?bar=foobar', '/explore')
- end
-
- it 'returns the iframe src for action: datasources' do
- allow(helper).to receive(:params).and_return({ action: 'datasources', observability_path: '/foo?bar=foobar' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, '/foo?bar=foobar', '/datasources')
- end
-
- it 'returns the iframe src when action is not recognised' do
- allow(helper).to receive(:params).and_return({ action: 'unrecognised', observability_path: '/foo?bar=foobar' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, '/foo?bar=foobar', '/')
- end
-
- it 'returns the iframe src when observability_path is missing' do
- allow(helper).to receive(:params).and_return({ action: 'dashboards' })
- expect(helper.observability_iframe_src(group)).to eq('full-url')
- expect(Gitlab::Observability).to have_received(:build_full_url).with(group, nil, '/')
- end
- end
-
- describe '#observability_page_title' do
- it 'returns the title for action: dashboards' do
- allow(helper).to receive(:params).and_return({ action: 'dashboards' })
- expect(helper.observability_page_title).to eq("Dashboards")
- end
-
- it 'returns the title for action: manage' do
- allow(helper).to receive(:params).and_return({ action: 'manage' })
- expect(helper.observability_page_title).to eq("Manage dashboards")
- end
-
- it 'returns the title for action: explore' do
- allow(helper).to receive(:params).and_return({ action: 'explore' })
- expect(helper.observability_page_title).to eq("Explore telemetry data")
- end
-
- it 'returns the title for action: datasources' do
- allow(helper).to receive(:params).and_return({ action: 'datasources' })
- expect(helper.observability_page_title).to eq("Data sources")
- end
-
- it 'returns the default title for unknown action' do
- allow(helper).to receive(:params).and_return({ action: 'unknown' })
- expect(helper.observability_page_title).to eq("Dashboards")
- end
- end
-end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 1b5f23a5e8e..0db15541b99 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupsHelper do
+RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
include ApplicationHelper
include AvatarsHelper
@@ -97,23 +97,11 @@ RSpec.describe GroupsHelper do
end
end
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples 'correct ancestor order'
+ before do
+ very_deep_nested_group.reload # make sure traversal_ids are reloaded
end
- context 'linear' do
- before do
- stub_feature_flags(use_traversal_ids: true)
-
- very_deep_nested_group.reload # make sure traversal_ids are reloaded
- end
-
- include_examples 'correct ancestor order'
- end
+ include_examples 'correct ancestor order'
end
it 'enqueues the elements in the breadcrumb schema list' do
@@ -269,21 +257,7 @@ RSpec.describe GroupsHelper do
end
end
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples 'correct ancestor order'
- end
-
- context 'linear' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- include_examples 'correct ancestor order'
- end
+ include_examples 'correct ancestor order'
end
end
@@ -524,23 +498,58 @@ RSpec.describe GroupsHelper do
end
end
+ describe '#show_group_readme?' do
+ let_it_be_with_refind(:group) { create(:group, :public) }
+ let_it_be(:current_user) { nil }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+
+ context 'when project is public' do
+ let_it_be(:project) { create(:project, :public, :readme, group: group, path: 'gitlab-profile') }
+
+ it { expect(helper.show_group_readme?(group)).to be(true) }
+ end
+
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private, :readme, group: group, path: 'gitlab-profile') }
+
+ context 'when user can see the project' do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it { expect(helper.show_group_readme?(group)).to be(true) }
+ end
+
+ it 'when user can not see the project' do
+ expect(helper.show_group_readme?(group)).to be(false)
+ end
+ end
+ end
+
describe "#enabled_git_access_protocol_options_for_group" do
- subject { helper.enabled_git_access_protocol_options_for_group }
+ let_it_be(:group) { create(:group) }
+
+ subject { helper.enabled_git_access_protocol_options_for_group(group) }
before do
- expect(::Gitlab::CurrentSettings).to receive(:enabled_git_access_protocol).and_return(instance_setting)
+ allow(::Gitlab::CurrentSettings).to receive(:enabled_git_access_protocol).and_return(instance_setting)
end
context "instance setting is nil" do
let(:instance_setting) { nil }
- it { is_expected.to contain_exactly([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
+ it { is_expected.to include([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
end
context "instance setting is blank" do
- let(:instance_setting) { nil }
+ let(:instance_setting) { '' }
- it { is_expected.to contain_exactly([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
+ it { is_expected.to include([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
end
context "instance setting is ssh" do
@@ -555,4 +564,44 @@ RSpec.describe GroupsHelper do
it { is_expected.to contain_exactly([_("Only HTTP(S)"), "http"]) }
end
end
+
+ describe '#new_custom_emoji_path' do
+ subject { helper.new_custom_emoji_path(group) }
+
+ let_it_be(:group) { create(:group) }
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(custom_emoji: false)
+ end
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with feature flag enabled' do
+ context 'with nil group' do
+ let(:group) { nil }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with current_user who has no permissions' do
+ before do
+ allow(helper).to receive(:current_user).and_return(create(:user))
+ end
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with current_user who has permissions' do
+ before do
+ user = create(:user)
+ group.add_owner(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it { is_expected.to eq(new_group_custom_emoji_path(group)) }
+ end
+ end
+ end
end
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index 7f657caa986..47500b8e21e 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -103,10 +103,7 @@ RSpec.describe IdeHelper, feature_category: :web_ide do
'new-web-ide-help-page-path' =>
help_page_path('user/project/web_ide/index.md', anchor: 'vscode-reimplementation'),
'csp-nonce' => 'test-csp-nonce',
- 'ide-remote-path' => ide_remote_path(remote_host: ':remote_host', remote_path: ':remote_path'),
- 'editor-font-family' => 'GitLab Mono',
- 'editor-font-format' => 'woff2',
- 'editor-font-src-url' => a_string_matching(%r{gitlab-mono/GitLabMono})
+ 'ide-remote-path' => ide_remote_path(remote_host: ':remote_host', remote_path: ':remote_path')
}
end
@@ -119,6 +116,34 @@ RSpec.describe IdeHelper, feature_category: :web_ide do
.to include(base_data)
end
+ it 'includes editor font configuration' do
+ ide_data = helper.ide_data(project: nil, fork_info: fork_info, params: params)
+ editor_font = ::Gitlab::Json.parse(ide_data.fetch('editor-font'), symbolize_names: true)
+
+ expect(editor_font).to include({
+ fallback_font_family: 'monospace',
+ font_faces: [
+ {
+ family: 'GitLab Mono',
+ display: 'block',
+ src: [{
+ url: a_string_matching(%r{gitlab-mono/GitLabMono-[^I]}),
+ format: 'woff2'
+ }]
+ },
+ {
+ family: 'GitLab Mono',
+ display: 'block',
+ style: 'italic',
+ src: [{
+ url: a_string_matching(%r{gitlab-mono/GitLabMono-Italic}),
+ format: 'woff2'
+ }]
+ }
+ ]
+ })
+ end
+
it 'does not use new web ide if feature flag is disabled' do
stub_feature_flags(vscode_web_ide: false)
diff --git a/spec/helpers/issuables_description_templates_helper_spec.rb b/spec/helpers/issuables_description_templates_helper_spec.rb
index b32a99fe989..4634ff83469 100644
--- a/spec/helpers/issuables_description_templates_helper_spec.rb
+++ b/spec/helpers/issuables_description_templates_helper_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
end
describe '#selected_template_name' do
- let(:template_names) { %w(another_issue_template custom_issue_template) }
+ let(:template_names) { %w[another_issue_template custom_issue_template] }
context 'when no issuable_template parameter is provided' do
it 'does not select a template' do
@@ -118,7 +118,7 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
describe '#default_template_name' do
context 'when a default template is available' do
- let(:template_names) { %w(another_issue_template deFault) }
+ let(:template_names) { %w[another_issue_template deFault] }
it 'returns the default template' do
issue = build(:issue)
@@ -140,7 +140,7 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
end
context 'when there is no default template' do
- let(:template_names) { %w(another_issue_template) }
+ let(:template_names) { %w[another_issue_template] }
it 'returns nil' do
expect(helper.default_template_name(template_names, build(:issue))).to be_nil
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 9fe820ccae9..0faea5629e8 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -268,7 +268,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
markdownPreviewPath: "/#{@project.full_path}/preview_markdown?target_id=#{issue.iid}&target_type=Issue",
markdownDocsPath: '/help/user/markdown',
lockVersion: issue.lock_version,
- state: issue.state,
issuableTemplateNamesPath: template_names_path(@project, issue),
initialTitleHtml: issue.title,
initialTitleText: issue.title,
@@ -284,7 +283,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
issuableId: issue.id,
issueType: 'issue',
isHidden: false,
- sentryIssueIdentifier: nil,
zoomMeetingUrl: nil
}
@@ -384,26 +382,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#sentryIssueIdentifier' do
- let(:issue) { create(:issue, author: user) }
-
- before do
- assign(:project, issue.project)
- end
-
- it 'sets sentryIssueIdentifier to nil with no sentry issue' do
- expect(helper.issuable_initial_data(issue)[:sentryIssueIdentifier])
- .to be_nil
- end
-
- it 'sets sentryIssueIdentifier to sentry_issue_identifier' do
- sentry_issue = create(:sentry_issue, issue: issue)
-
- expect(helper.issuable_initial_data(issue)[:sentryIssueIdentifier])
- .to eq(sentry_issue.sentry_issue_identifier)
- end
- end
-
describe '#zoomMeetingUrl in issue' do
let(:issue) { create(:issue, author: user) }
@@ -568,41 +546,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#state_name_with_icon' do
- let_it_be(:project) { create(:project, :repository) }
-
- context 'for an issue' do
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:issue_closed) { create(:issue, :closed, project: project) }
-
- it 'returns the correct state name and icon when issue is open' do
- expect(helper.state_name_with_icon(issue)).to match_array([_('Open'), 'issues'])
- end
-
- it 'returns the correct state name and icon when issue is closed' do
- expect(helper.state_name_with_icon(issue_closed)).to match_array([_('Closed'), 'issue-closed'])
- end
- end
-
- context 'for a merge request' do
- let_it_be(:merge_request) { create(:merge_request, source_project: project) }
- let_it_be(:merge_request_merged) { create(:merge_request, :merged, source_project: project) }
- let_it_be(:merge_request_closed) { create(:merge_request, :closed, source_project: project) }
-
- it 'returns the correct state name and icon when merge request is open' do
- expect(helper.state_name_with_icon(merge_request)).to match_array([_('Open'), 'merge-request-open'])
- end
-
- it 'returns the correct state name and icon when merge request is merged' do
- expect(helper.state_name_with_icon(merge_request_merged)).to match_array([_('Merged'), 'merge'])
- end
-
- it 'returns the correct state name and icon when merge request is closed' do
- expect(helper.state_name_with_icon(merge_request_closed)).to match_array([_('Closed'), 'merge-request-close'])
- end
- end
- end
-
describe '#issuable_type_selector_data' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 72fa264698d..62d94b59c2a 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe IssuesHelper, feature_category: :team_planning do
describe 'awards_sort' do
it 'sorts a hash so thumbsup and thumbsdown are always on top' do
data = { 'thumbsdown' => 'some value', 'lifter' => 'some value', 'thumbsup' => 'some value' }
- expect(awards_sort(data).keys).to eq(%w(thumbsup thumbsdown lifter))
+ expect(awards_sort(data).keys).to eq(%w[thumbsup thumbsdown lifter])
end
end
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 950d8b77d01..12ab7ca93c0 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -115,13 +115,13 @@ RSpec.describe NavHelper, feature_category: :navigation do
describe '#page_has_markdown?' do
using RSpec::Parameterized::TableSyntax
- where path: %w(
+ where path: %w[
projects/merge_requests#show
projects/merge_requests/conflicts#show
issues#show
milestones#show
issues#designs
- )
+ ]
with_them do
before do
diff --git a/spec/helpers/organizations/organization_helper_spec.rb b/spec/helpers/organizations/organization_helper_spec.rb
index ec99d928059..cf8ae358e49 100644
--- a/spec/helpers/organizations/organization_helper_spec.rb
+++ b/spec/helpers/organizations/organization_helper_spec.rb
@@ -6,12 +6,19 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
let_it_be(:organization) { build_stubbed(:organization) }
let_it_be(:new_group_path) { '/groups/new' }
let_it_be(:new_project_path) { '/projects/new' }
+ let_it_be(:organizations_empty_state_svg_path) { 'illustrations/empty-state/empty-organizations-md.svg' }
+ let_it_be(:organizations_path) { '/-/organizations/' }
+ let_it_be(:root_url) { 'http://127.0.0.1:3000/' }
let_it_be(:groups_empty_state_svg_path) { 'illustrations/empty-state/empty-groups-md.svg' }
let_it_be(:projects_empty_state_svg_path) { 'illustrations/empty-state/empty-projects-md.svg' }
before do
allow(helper).to receive(:new_group_path).and_return(new_group_path)
allow(helper).to receive(:new_project_path).and_return(new_project_path)
+ allow(helper).to receive(:image_path).with(organizations_empty_state_svg_path)
+ .and_return(organizations_empty_state_svg_path)
+ allow(helper).to receive(:organizations_path).and_return(organizations_path)
+ allow(helper).to receive(:root_url).and_return(root_url)
allow(helper).to receive(:image_path).with(groups_empty_state_svg_path).and_return(groups_empty_state_svg_path)
allow(helper).to receive(:image_path).with(projects_empty_state_svg_path).and_return(projects_empty_state_svg_path)
end
@@ -62,4 +69,26 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
)
end
end
+
+ describe '#organization_index_app_data' do
+ it 'returns expected data object' do
+ expect(helper.organization_index_app_data).to eq(
+ {
+ new_organization_url: new_organization_path,
+ organizations_empty_state_svg_path: organizations_empty_state_svg_path
+ }
+ )
+ end
+ end
+
+ describe '#organization_new_app_data' do
+ it 'returns expected json' do
+ expect(Gitlab::Json.parse(helper.organization_new_app_data)).to eq(
+ {
+ 'organizations_path' => organizations_path,
+ 'root_url' => root_url
+ }
+ )
+ end
+ end
end
diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb
index 43500d98591..a4f10933be6 100644
--- a/spec/helpers/page_layout_helper_spec.rb
+++ b/spec/helpers/page_layout_helper_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe PageLayoutHelper do
expect(helper.page_image).to match_asset_path 'assets/twitter_card.jpg'
end
- %w(project user group).each do |type|
+ %w[project user group].each do |type|
context "with @#{type} assigned" do
let(:object) { build(type, trait) }
let(:trait) { :with_avatar }
@@ -116,11 +116,11 @@ RSpec.describe PageLayoutHelper do
it 'escapes content' do
allow(helper).to receive(:page_card_attributes)
- .and_return(foo: %q{foo" http-equiv="refresh}.html_safe)
+ .and_return(foo: %q(foo" http-equiv="refresh).html_safe)
tags = helper.page_card_meta_tags
- expect(tags).to include(%q{content="foo&quot; http-equiv=&quot;refresh"})
+ expect(tags).to include(%q(content="foo&quot; http-equiv=&quot;refresh"))
end
end
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index 15ca5f61b51..ece6574ee38 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -106,9 +106,9 @@ RSpec.describe ProfilesHelper do
using RSpec::Parameterized::TableSyntax
where(:stacking, :breakpoint, :expected) do
- nil | nil | %w(gl-mb-3 gl-display-inline-block middle-dot-divider)
- true | nil | %w(gl-mb-3 middle-dot-divider-sm gl-display-block gl-sm-display-inline-block)
- nil | :sm | %w(gl-mb-3 gl-display-inline-block middle-dot-divider-sm)
+ nil | nil | %w[gl-mb-3 gl-display-inline-block middle-dot-divider]
+ true | nil | %w[gl-mb-3 middle-dot-divider-sm gl-display-block gl-sm-display-inline-block]
+ nil | :sm | %w[gl-mb-3 gl-display-inline-block middle-dot-divider-sm]
end
with_them do
diff --git a/spec/helpers/projects/ml/experiments_helper_spec.rb b/spec/helpers/projects/ml/experiments_helper_spec.rb
index 569fd0f9ec5..9ac518f664d 100644
--- a/spec/helpers/projects/ml/experiments_helper_spec.rb
+++ b/spec/helpers/projects/ml/experiments_helper_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
let_it_be(:project) { create(:project, :private) }
let_it_be(:experiment) { create(:ml_experiments, user_id: project.creator, project: project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:build) { create(:ci_build, user: project.creator, pipeline: pipeline) }
let_it_be(:candidate0) do
create(:ml_candidates,
:with_artifact,
@@ -46,7 +46,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
'ci_job' => { 'path' => "/#{project.full_path}/-/jobs/#{build.id}", 'name' => 'test' },
'name' => candidate0.name,
'created_at' => candidate0.created_at.strftime('%Y-%m-%dT%H:%M:%S.%LZ'),
- 'user' => { 'username' => candidate0.user.username, 'path' => "/#{candidate0.user.username}" } },
+ 'user' => { 'username' => build.user.username, 'path' => "/#{build.user.username}" } },
{ 'param2' => 'p3', 'param3' => 'p4', 'metric3' => '0.4000',
'artifact' => nil, 'details' => "/#{project.full_path}/-/ml/candidates/#{candidate1.iid}",
'ci_job' => nil,
@@ -66,6 +66,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
before do
allow(candidate0).to receive(:user).and_return(nil)
+ allow(candidate0.ci_build).to receive(:user).and_return(nil)
end
it 'has the user property, but is nil' do
@@ -80,8 +81,9 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
.and_return(false)
end
- it 'does not include ci info' do
+ it 'does not include ci info and user for candidate created through CI' do
expect(subject[0]['ci_job']).to be_nil
+ expect(subject[0]['user']).to be_nil
end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 9f9372f94cc..90d998e17c3 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -77,14 +77,6 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
end
end
- describe "#project_status_css_class" do
- it "returns appropriate class" do
- expect(project_status_css_class("started")).to eq("table-active")
- expect(project_status_css_class("failed")).to eq("table-danger")
- expect(project_status_css_class("finished")).to eq("table-success")
- end
- end
-
describe "can_change_visibility_level?" do
let_it_be(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
@@ -126,82 +118,6 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
end
end
- describe "readme_cache_key" do
- let(:project) { project_with_repo }
-
- it "returns a valid cach key" do
- expect(helper.send(:readme_cache_key)).to eq("#{project.full_path}-#{project.commit.id}-readme")
- end
-
- it "returns a valid cache key if HEAD does not exist" do
- allow(project).to receive(:commit) { nil }
-
- expect(helper.send(:readme_cache_key)).to eq("#{project.full_path}-nil-readme")
- end
- end
-
- describe "#project_list_cache_key", :clean_gitlab_redis_cache do
- let(:project) { project_with_repo }
-
- before do
- allow(helper).to receive(:can?).with(user, :read_cross_project) { true }
- allow(user).to receive(:max_member_access_for_project).and_return(40)
- allow(Gitlab::I18n).to receive(:locale).and_return('es')
- end
-
- it "includes the route" do
- expect(helper.project_list_cache_key(project)).to include(project.route.cache_key)
- end
-
- it "includes the project" do
- expect(helper.project_list_cache_key(project)).to include(project.cache_key)
- end
-
- it "includes the last activity date" do
- expect(helper.project_list_cache_key(project)).to include(project.last_activity_date)
- end
-
- it "includes the controller name" do
- expect(helper.controller).to receive(:controller_name).and_return("testcontroller")
-
- expect(helper.project_list_cache_key(project)).to include("testcontroller")
- end
-
- it "includes the controller action" do
- expect(helper.controller).to receive(:action_name).and_return("testaction")
-
- expect(helper.project_list_cache_key(project)).to include("testaction")
- end
-
- it "includes the application settings" do
- settings = Gitlab::CurrentSettings.current_application_settings
-
- expect(helper.project_list_cache_key(project)).to include(settings.cache_key)
- end
-
- it "includes a version" do
- expect(helper.project_list_cache_key(project).last).to start_with('v')
- end
-
- it 'includes whether or not the user can read cross project' do
- expect(helper.project_list_cache_key(project)).to include('cross-project:true')
- end
-
- it "includes the pipeline status when there is a status" do
- create(:ci_pipeline, :success, project: project, sha: project.commit.sha)
-
- expect(helper.project_list_cache_key(project)).to include("pipeline-status/#{project.commit.sha}-success")
- end
-
- it "includes the user locale" do
- expect(helper.project_list_cache_key(project)).to include('es')
- end
-
- it "includes the user max member access" do
- expect(helper.project_list_cache_key(project)).to include('access:40')
- end
- end
-
describe '#load_pipeline_status' do
it 'loads the pipeline status in batch' do
helper.load_pipeline_status([project])
@@ -753,26 +669,21 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
describe '#show_mobile_devops_project_promo?' do
using RSpec::Parameterized::TableSyntax
- where(:hide_cookie, :feature_flag_enabled, :mobile_target_platform, :result) do
- false | true | true | true
- false | false | true | false
- false | false | false | false
- false | true | false | false
- true | false | false | false
- true | true | false | false
- true | true | true | false
- true | false | true | false
+ where(:hide_cookie, :mobile_target_platform, :result) do
+ false | true | true
+ false | false | false
+ true | false | false
+ true | true | false
end
with_them do
before do
allow(Gitlab).to receive(:com?) { gitlab_com }
- Feature.enable(:mobile_devops_projects_promo, feature_flag_enabled)
project.project_setting.target_platforms << 'ios' if mobile_target_platform
helper.request.cookies["hide_mobile_devops_promo_#{project.id}"] = true if hide_cookie
end
- it 'resolves if the user can import members' do
+ it 'resolves if mobile devops promo banner should be displayed' do
expect(helper.show_mobile_devops_project_promo?(project)).to eq result
end
end
@@ -809,42 +720,6 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
end
end
- describe '#grafana_integration_url' do
- subject { helper.grafana_integration_url }
-
- it { is_expected.to eq(nil) }
-
- context 'grafana integration exists' do
- let!(:grafana_integration) { create(:grafana_integration, project: project) }
-
- it { is_expected.to eq(grafana_integration.grafana_url) }
- end
- end
-
- describe '#grafana_integration_token' do
- subject { helper.grafana_integration_masked_token }
-
- it { is_expected.to eq(nil) }
-
- context 'grafana integration exists' do
- let!(:grafana_integration) { create(:grafana_integration, project: project) }
-
- it { is_expected.to eq(grafana_integration.masked_token) }
- end
- end
-
- describe '#grafana_integration_enabled?' do
- subject { helper.grafana_integration_enabled? }
-
- it { is_expected.to eq(nil) }
-
- context 'grafana integration exists' do
- let!(:grafana_integration) { create(:grafana_integration, project: project) }
-
- it { is_expected.to eq(grafana_integration.enabled) }
- end
- end
-
describe '#project_license_name(project)', :request_store do
let_it_be(:repository) { project.repository }
@@ -1200,14 +1075,6 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
it_behaves_like 'configure import method modal'
end
- describe '#import_from_gitlab_message' do
- let(:import_method) { 'GitLab.com' }
-
- subject { helper.import_from_gitlab_message }
-
- it_behaves_like 'configure import method modal'
- end
-
describe '#show_inactive_project_deletion_banner?' do
shared_examples 'does not show the banner' do |pass_project: true|
it { expect(helper.show_inactive_project_deletion_banner?(pass_project ? project : nil)).to be(false) }
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 5a9deb5c63b..daf034edbb1 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe ReleasesHelper do
describe '#data_for_edit_release_page' do
it 'has the needed data to display the "edit release" page' do
- keys = %i(project_id
+ keys = %i[project_id
group_id
group_milestones_available
project_path
@@ -72,7 +72,7 @@ RSpec.describe ReleasesHelper do
new_milestone_path
upcoming_release_docs_path
edit_release_docs_path
- delete_release_docs_path)
+ delete_release_docs_path]
expect(helper.data_for_edit_release_page.keys).to match_array(keys)
end
@@ -80,7 +80,7 @@ RSpec.describe ReleasesHelper do
describe '#data_for_new_release_page' do
it 'has the needed data to display the "new release" page' do
- keys = %i(project_id
+ keys = %i[project_id
group_id
group_milestones_available
project_path
@@ -93,7 +93,7 @@ RSpec.describe ReleasesHelper do
new_milestone_path
default_branch
upcoming_release_docs_path
- edit_release_docs_path)
+ edit_release_docs_path]
expect(helper.data_for_new_release_page.keys).to match_array(keys)
end
@@ -101,9 +101,9 @@ RSpec.describe ReleasesHelper do
describe '#data_for_show_page' do
it 'has the needed data to display the individual "release" page' do
- keys = %i(project_id
+ keys = %i[project_id
project_path
- tag_name)
+ tag_name]
expect(helper.data_for_show_page.keys).to match_array(keys)
end
diff --git a/spec/helpers/sidekiq_helper_spec.rb b/spec/helpers/sidekiq_helper_spec.rb
deleted file mode 100644
index 594996bac95..00000000000
--- a/spec/helpers/sidekiq_helper_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SidekiqHelper, feature_category: :shared do
- describe 'parse_sidekiq_ps' do
- it 'parses line with time' do
- line = '55137 10,0 2,1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['55137', '10,0', '2,1', 'S+', '2:30pm', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
- end
-
- it 'parses line with date' do
- line = '55137 10,0 2,1 S+ Aug 4 sidekiq 4.1.4 gitlab [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['55137', '10,0', '2,1', 'S+', 'Aug 4', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
- end
-
- it 'parses line with two digit date' do
- line = '55137 10,0 2,1 S+ Aug 04 sidekiq 4.1.4 gitlab [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['55137', '10,0', '2,1', 'S+', 'Aug 04', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
- end
-
- it 'parses line with dot as float separator' do
- line = '55137 10.0 2.1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['55137', '10.0', '2.1', 'S+', '2:30pm', 'sidekiq 4.1.4 gitlab [0 of 25 busy]'])
- end
-
- it 'parses OSX output' do
- line = ' 1641 1.5 3.8 S+ 4:04PM sidekiq 4.2.1 gitlab [0 of 25 busy]'
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['1641', '1.5', '3.8', 'S+', '4:04PM', 'sidekiq 4.2.1 gitlab [0 of 25 busy]'])
- end
-
- it 'parses Ubuntu output' do
- # Ubuntu Linux 16.04 LTS / procps-3.3.10-4ubuntu2
- line = ' 938 1.4 2.5 Sl+ 21:23:21 sidekiq 4.2.1 gitlab [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['938', '1.4', '2.5', 'Sl+', '21:23:21', 'sidekiq 4.2.1 gitlab [0 of 25 busy]'])
- end
-
- it 'parses Debian output' do
- # Debian Linux Wheezy/Jessie
- line = '17725 1.0 12.1 Ssl 19:20:15 sidekiq 4.2.1 gitlab-rails [0 of 25 busy] '
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['17725', '1.0', '12.1', 'Ssl', '19:20:15', 'sidekiq 4.2.1 gitlab-rails [0 of 25 busy]'])
- end
-
- it 'parses OpenBSD output' do
- # OpenBSD 6.1
- line = '49258 0.5 2.3 R/0 Fri10PM ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['49258', '0.5', '2.3', 'R/0', 'Fri10PM', 'ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'])
- end
-
- it 'does fail gracefully on line not matching the format' do
- line = '55137 10.0 2.1 S+ 2:30pm something'
- parts = helper.parse_sidekiq_ps(line)
-
- expect(parts).to eq(['?', '?', '?', '?', '?', '?'])
- end
- end
-end
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index d625b46e286..0f53cc98415 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -76,20 +76,6 @@ RSpec.describe SortingHelper do
end
end
- describe '#issuable_sort_option_title' do
- it 'returns correct title for issuable_sort_option_overrides key' do
- expect(issuable_sort_option_title('created_asc')).to eq('Created date')
- end
-
- it 'returns correct title for a valid sort value' do
- expect(issuable_sort_option_title('priority')).to eq('Priority')
- end
-
- it 'returns nil for invalid sort value' do
- expect(issuable_sort_option_title('invalid_key')).to eq(nil)
- end
- end
-
describe '#issuable_sort_direction_button' do
before do
set_sorting_url 'test_label'
@@ -156,6 +142,23 @@ RSpec.describe SortingHelper do
end
end
+ describe '#groups_sort_options_hash' do
+ let(:expected_options) do
+ {
+ sort_value_name => sort_title_name,
+ sort_value_name_desc => sort_title_name_desc,
+ sort_value_recently_created => sort_title_recently_created,
+ sort_value_oldest_created => sort_title_oldest_created,
+ sort_value_latest_activity => sort_title_recently_updated,
+ sort_value_oldest_activity => sort_title_oldest_updated
+ }
+ end
+
+ it 'returns a hash of available sorting options for the groups' do
+ expect(groups_sort_options_hash).to eq(expected_options)
+ end
+ end
+
describe 'with `projects` controller' do
before do
stub_controller_path 'projects'
@@ -192,17 +195,6 @@ RSpec.describe SortingHelper do
stub_controller_path 'forks'
end
- describe '#forks_sort_options_hash' do
- it 'returns a hash of available sorting options' do
- expect(forks_sort_options_hash).to include({
- sort_value_recently_created => sort_title_created_date,
- sort_value_oldest_created => sort_title_created_date,
- sort_value_latest_activity => sort_title_latest_activity,
- sort_value_oldest_activity => sort_title_latest_activity
- })
- end
- end
-
describe '#forks_reverse_sort_options_hash' do
context 'for each sort option' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index dfb5cb995bc..4680a43058d 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -401,6 +401,15 @@ RSpec.describe TodosHelper do
end
end
end
+
+ context 'okr checkin reminder' do
+ it 'returns okr checkin reminder message' do
+ alert_todo.action = Todo::OKR_CHECKIN_REQUESTED
+ expect(helper.todo_action_name(alert_todo)).to eq(
+ format(s_("Todos|requested an OKR update for %{what}"), what: alert_todo.target.title)
+ )
+ end
+ end
end
describe '#todo_due_date' do
diff --git a/spec/helpers/tracking_helper_spec.rb b/spec/helpers/tracking_helper_spec.rb
index 81121275c92..9d3f2d07cbe 100644
--- a/spec/helpers/tracking_helper_spec.rb
+++ b/spec/helpers/tracking_helper_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe TrackingHelper do
describe '#tracking_attrs' do
using RSpec::Parameterized::TableSyntax
- let(:input) { %w(a b c) }
+ let(:input) { %w[a b c] }
let(:result) { { data: { track_label: 'a', track_action: 'b', track_property: 'c' } } }
before do
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index ad8aef276bb..20b5452d2d4 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -646,7 +646,7 @@ RSpec.describe UsersHelper do
expect(preload_queries).not_to exceed_query_limit(2)
expect(helper_queries).not_to exceed_query_limit(0)
- expect(access_queries).not_to exceed_query_limit(0)
+ expect(access_queries).not_to exceed_query_limit(1)
end
end
end
diff --git a/spec/helpers/wiki_helper_spec.rb b/spec/helpers/wiki_helper_spec.rb
index 497cd5d1e7f..6eaa603a43d 100644
--- a/spec/helpers/wiki_helper_spec.rb
+++ b/spec/helpers/wiki_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WikiHelper do
+RSpec.describe WikiHelper, feature_category: :wiki do
describe '#wiki_page_title' do
let_it_be(:page) { create(:wiki_page) }
@@ -75,38 +75,42 @@ RSpec.describe WikiHelper do
describe '#wiki_sort_controls' do
let(:wiki) { create(:project_wiki) }
- let(:wiki_link) { helper.wiki_sort_controls(wiki, direction) }
- let(:classes) { "gl-button btn btn-default btn-icon has-tooltip reverse-sort-btn rspec-reverse-sort" }
- def expected_link(direction, icon_class)
+ before do
+ allow(Pajamas::ButtonComponent).to receive(:new).and_call_original
+ end
+
+ def expected_link_args(direction, icon_class)
path = "/#{wiki.project.full_path}/-/wikis/pages?direction=#{direction}"
title = direction == 'desc' ? _('Sort direction: Ascending') : _('Sort direction: Descending')
- helper.link_to(path, type: 'button', class: classes, title: title) do
- helper.sprite_icon("sort-#{icon_class}")
- end
+ {
+ href: path,
+ icon: "sort-#{icon_class}",
+ button_options: hash_including(title: title)
+ }
end
- context 'initial call' do
- let(:direction) { nil }
+ context 'when initially rendering' do
+ it 'uses default values' do
+ helper.wiki_sort_controls(wiki, nil)
- it 'renders with default values' do
- expect(wiki_link).to eq(expected_link('desc', 'lowest'))
+ expect(Pajamas::ButtonComponent).to have_received(:new).with(expected_link_args('desc', 'lowest'))
end
end
- context 'sort by asc order' do
- let(:direction) { 'asc' }
-
+ context 'when the current sort order is ascending' do
it 'renders a link with opposite direction' do
- expect(wiki_link).to eq(expected_link('desc', 'lowest'))
+ helper.wiki_sort_controls(wiki, 'asc')
+
+ expect(Pajamas::ButtonComponent).to have_received(:new).with(expected_link_args('desc', 'lowest'))
end
end
- context 'sort by desc order' do
- let(:direction) { 'desc' }
-
+ context 'when the current sort order is descending' do
it 'renders a link with opposite direction' do
- expect(wiki_link).to eq(expected_link('asc', 'highest'))
+ helper.wiki_sort_controls(wiki, 'desc')
+
+ expect(Pajamas::ButtonComponent).to have_received(:new).with(expected_link_args('asc', 'highest'))
end
end
end
diff --git a/spec/initializers/direct_upload_support_spec.rb b/spec/initializers/direct_upload_support_spec.rb
index 68dd12fdb6e..29d19c8397c 100644
--- a/spec/initializers/direct_upload_support_spec.rb
+++ b/spec/initializers/direct_upload_support_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Direct upload support' do
end
where(:config_name) do
- %w(artifacts lfs uploads)
+ %w[artifacts lfs uploads]
end
with_them do
diff --git a/spec/initializers/enumerator_next_patch_spec.rb b/spec/initializers/enumerator_next_patch_spec.rb
index 99e73af5e86..bf8ab823e53 100644
--- a/spec/initializers/enumerator_next_patch_spec.rb
+++ b/spec/initializers/enumerator_next_patch_spec.rb
@@ -33,15 +33,15 @@ RSpec.describe 'Enumerator#next patch fix' do
end
def have_been_raised_by_next_and_not_fixed_up
- contain_unique_method_calls_in_order %w(call_enum_method)
+ contain_unique_method_calls_in_order %w[call_enum_method]
end
def have_been_raised_by_enum_object_and_fixed_up
- contain_unique_method_calls_in_order %w(make_error call_enum_method)
+ contain_unique_method_calls_in_order %w[make_error call_enum_method]
end
def have_been_raised_by_nested_next_and_fixed_up
- contain_unique_method_calls_in_order %w(call_nested_next call_enum_method)
+ contain_unique_method_calls_in_order %w[call_nested_next call_enum_method]
end
methods = [
diff --git a/spec/initializers/gitlab_http_spec.rb b/spec/initializers/gitlab_http_spec.rb
new file mode 100644
index 00000000000..7715112abf4
--- /dev/null
+++ b/spec/initializers/gitlab_http_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HTTP_V2, feature_category: :shared do
+ it 'handles log_exception_proc' do
+ expect(Gitlab::HTTP_V2::Client).to receive(:httparty_perform_request)
+ .and_raise(Net::ReadTimeout)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(Net::ReadTimeout, {})
+
+ expect { described_class.get('http://example.org') }.to raise_error(Net::ReadTimeout)
+ end
+
+ context 'when silent_mode_enabled is true' do
+ before do
+ stub_application_setting(silent_mode_enabled: true)
+ end
+
+ context 'when sending a POST request' do
+ it 'handles silent_mode_log_info_proc' do
+ expect(::Gitlab::AppJsonLogger).to receive(:info).with(
+ message: "Outbound HTTP request blocked",
+ outbound_http_request_method: 'Net::HTTP::Post',
+ silent_mode_enabled: true
+ )
+
+ expect { described_class.post('http://example.org', silent_mode_enabled: true) }.to raise_error(
+ Gitlab::HTTP_V2::SilentModeBlockedError
+ )
+ end
+ end
+
+ context 'when sending a GET request' do
+ before do
+ stub_request(:get, 'http://example.org').to_return(body: 'hello')
+ end
+
+ it 'does not raise an error' do
+ expect(::Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(described_class.get('http://example.org', silent_mode_enabled: true).body).to eq('hello')
+ end
+ end
+ end
+end
diff --git a/spec/initializers/hangouts_chat_http_override_spec.rb b/spec/initializers/hangouts_chat_http_override_spec.rb
deleted file mode 100644
index 42236c8c853..00000000000
--- a/spec/initializers/hangouts_chat_http_override_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'HangoutsChat::Sender Gitlab::HTTP override' do
- describe 'HangoutsChat::Sender::HTTP#post' do
- it 'calls Gitlab::HTTP.post with default protection settings' do
- webhook_url = 'https://example.gitlab.com'
- payload = { key: 'value' }
- http = HangoutsChat::Sender::HTTP.new(webhook_url)
- mock_response = double(response: 'the response')
-
- expect(Gitlab::HTTP).to receive(:post)
- .with(
- URI.parse(webhook_url),
- body: payload.to_json,
- headers: { 'Content-Type' => 'application/json' },
- parse: nil
- )
- .and_return(mock_response)
-
- expect(http.post(payload)).to eq(mock_response.response)
- end
-
- it_behaves_like 'a request using Gitlab::UrlBlocker' do
- let(:http_method) { :post }
- let(:url_blocked_error_class) { Gitlab::HTTP::BlockedUrlError }
-
- def make_request(uri)
- HangoutsChat::Sender::HTTP.new(uri).post({})
- end
- end
- end
-end
diff --git a/spec/initializers/net_http_patch_spec.rb b/spec/initializers/net_http_patch_spec.rb
index b9f5299b58c..959eae954c4 100644
--- a/spec/initializers/net_http_patch_spec.rb
+++ b/spec/initializers/net_http_patch_spec.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'net/http'
+# TODO: This spec file can be removed after fully migration to the gitlab-http gem.
+# It's already covered in gems/gitlab-http/spec/gitlab/http_v2/net_http_patch_spec.rb
-require_relative '../../config/initializers/net_http_patch'
+require 'spec_helper'
-RSpec.describe 'Net::HTTP patch proxy user and password encoding' do
+RSpec.describe 'Net::HTTP patch proxy user and password encoding', feature_category: :shared do
let(:net_http) { Net::HTTP.new('hostname.example') }
before do
diff --git a/spec/initializers/net_http_response_patch_spec.rb b/spec/initializers/net_http_response_patch_spec.rb
index cd261d7b997..8074047d6aa 100644
--- a/spec/initializers/net_http_response_patch_spec.rb
+++ b/spec/initializers/net_http_response_patch_spec.rb
@@ -1,5 +1,8 @@
# frozen_string_literal: true
+# TODO: This spec file can be removed after fully migration to the gitlab-http gem.
+# It's already covered in gems/gitlab-http/spec/gitlab/http_v2/net_http_response_patch_spec.rb
+
require 'spec_helper'
RSpec.describe 'Net::HTTPResponse patch header read timeout', feature_category: :shared do
diff --git a/spec/initializers/rack_multipart_patch_spec.rb b/spec/initializers/rack_multipart_patch_spec.rb
index 862fdc7901b..4b46da73236 100644
--- a/spec/initializers/rack_multipart_patch_spec.rb
+++ b/spec/initializers/rack_multipart_patch_spec.rb
@@ -37,7 +37,7 @@ EOF
expect(described_class).not_to receive(:log_multipart_warning)
params = described_class.parse_multipart(env)
- expect(params.keys).to include(*%w(reply fileupload))
+ expect(params.keys).to include(*%w[reply fileupload])
end
end
@@ -56,7 +56,7 @@ EOF
})
params = described_class.parse_multipart(env)
- expect(params.keys).to include(*%w(reply fileupload))
+ expect(params.keys).to include(*%w[reply fileupload])
end
end
diff --git a/spec/initializers/validate_database_config_spec.rb b/spec/initializers/validate_database_config_spec.rb
index 23a3d9a2950..ffafe937ba9 100644
--- a/spec/initializers/validate_database_config_spec.rb
+++ b/spec/initializers/validate_database_config_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe 'validate database config' do
- include RakeHelpers
include StubENV
let(:rails_configuration) { Rails::Application::Configuration.new(Rails.root) }
@@ -50,9 +49,7 @@ RSpec.describe 'validate database config' do
end
it 'validates configuration without errors and warnings' do
- expect(main_object).not_to receive(:warn)
-
- expect { subject }.not_to raise_error
+ expect { subject }.not_to output.to_stderr
end
end
diff --git a/spec/initializers/validate_puma_spec.rb b/spec/initializers/validate_puma_spec.rb
index 9ff0ef2c319..c46effd6525 100644
--- a/spec/initializers/validate_puma_spec.rb
+++ b/spec/initializers/validate_puma_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'validate puma' do
- include RakeHelpers
-
subject do
load Rails.root.join('config/initializers/validate_puma.rb')
end
@@ -36,8 +34,6 @@ RSpec.describe 'validate puma' do
context 'for other environments' do
before do
allow(Gitlab).to receive(:com?).and_return(false)
-
- allow(main_object).to receive(:warn)
end
context 'when worker count is 0' do
@@ -46,21 +42,17 @@ RSpec.describe 'validate puma' do
specify { expect { subject }.not_to raise_error }
it 'warns about running Puma in a Single mode' do
- expect(main_object).to receive(:warn) do |warning|
- expect(warning).to include('https://gitlab.com/groups/gitlab-org/-/epics/5303')
- end
-
- subject
+ expect { subject }
+ .to output(%r{https://gitlab.com/groups/gitlab-org/-/epics/5303})
+ .to_stderr
end
end
context 'when worker count is > 0' do
let(:workers) { 2 }
- specify { expect { subject }.not_to raise_error }
-
it 'does not issue a warning' do
- expect(main_object).not_to receive(:warn)
+ expect { subject }.not_to output.to_stderr
end
end
end
diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
index c36c8d23e88..e05be65bf19 100644
--- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do
it 'extracts the runner details', :aggregate_failures do
expect(details.keys).to match_array(
- %w(system_id name version revision platform architecture executor config ip_address)
+ %w[system_id name version revision platform architecture executor config ip_address]
)
expect(details['system_id']).to eq(system_id)
expect(details['name']).to eq(name)
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 70504a58af3..b74f5bf2de8 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe API::Ci::Helpers::Runner do
expect(current_runner_manager).not_to be_nil
expect(current_runner_manager.system_xid).to eq('new_system_id')
- expect(current_runner_manager.contacted_at).to eq(Time.current)
+ expect(current_runner_manager.contacted_at).to be_nil
expect(current_runner_manager.runner).to eq(runner)
end
diff --git a/spec/lib/api/entities/basic_project_details_spec.rb b/spec/lib/api/entities/basic_project_details_spec.rb
index 425252ea315..6e75f08d937 100644
--- a/spec/lib/api/entities/basic_project_details_spec.rb
+++ b/spec/lib/api/entities/basic_project_details_spec.rb
@@ -61,4 +61,32 @@ RSpec.describe API::Entities::BasicProjectDetails, feature_category: :api do
end
end
end
+
+ describe '#repository_storage' do
+ let_it_be(:project) { build(:project, :public) }
+
+ context 'with anonymous user' do
+ let_it_be(:current_user) { nil }
+
+ it 'is not included' do
+ expect(output).not_to include(:repository_storage)
+ end
+ end
+
+ context 'with normal user' do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'is not included' do
+ expect(output).not_to include(:repository_storage)
+ end
+ end
+
+ context 'with admin user' do
+ let_it_be(:current_user) { create(:user, :admin) }
+
+ it 'is included', :enable_admin_mode do
+ expect(output).to include repository_storage: project.repository_storage
+ end
+ end
+ end
end
diff --git a/spec/lib/api/entities/bulk_import_spec.rb b/spec/lib/api/entities/bulk_import_spec.rb
index 2db6862b079..cfa293463ad 100644
--- a/spec/lib/api/entities/bulk_import_spec.rb
+++ b/spec/lib/api/entities/bulk_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::BulkImport do
+RSpec.describe API::Entities::BulkImport, feature_category: :importers do
let_it_be(:import) { create(:bulk_import) }
subject { described_class.new(import).as_json }
@@ -13,7 +13,8 @@ RSpec.describe API::Entities::BulkImport do
:status,
:source_type,
:created_at,
- :updated_at
+ :updated_at,
+ :has_failures
)
end
end
diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb
index ba8a2ddffcb..791cd3a20e2 100644
--- a/spec/lib/api/entities/bulk_imports/entity_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::BulkImports::Entity do
+RSpec.describe API::Entities::BulkImports::Entity, feature_category: :importers do
let_it_be(:entity) { create(:bulk_import_entity) }
subject { described_class.new(entity).as_json }
@@ -22,7 +22,8 @@ RSpec.describe API::Entities::BulkImports::Entity do
:created_at,
:updated_at,
:failures,
- :migrate_projects
+ :migrate_projects,
+ :has_failures
)
end
end
diff --git a/spec/lib/api/entities/diff_spec.rb b/spec/lib/api/entities/diff_spec.rb
new file mode 100644
index 00000000000..27d9ed44c98
--- /dev/null
+++ b/spec/lib/api/entities/diff_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::API::Entities::Diff, feature_category: :source_code_management do
+ subject(:json) { entity.as_json }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository }
+ let_it_be(:diff) { repository.diff('HEAD~1', 'HEAD').first }
+
+ let(:entity) { described_class.new(diff, options) }
+ let(:options) { {} }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to eq(
+ {
+ diff: diff.diff,
+ new_path: diff.new_path,
+ old_path: diff.old_path,
+ a_mode: diff.a_mode,
+ b_mode: diff.b_mode,
+ new_file: diff.new_file?,
+ renamed_file: diff.renamed_file?,
+ deleted_file: diff.deleted_file?
+ }
+ )
+ end
+
+ context 'when enable_unidiff option is set' do
+ let(:options) { { enable_unidiff: true } }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to include(diff: diff.unidiff)
+ end
+ end
+
+ context 'when enable_unidiff option is false' do
+ let(:options) { { enable_unidiff: false } }
+
+ it 'returns expected data' do
+ expect(entity.as_json).to include(diff: diff.diff)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/namespace_basic_spec.rb b/spec/lib/api/entities/namespace_basic_spec.rb
new file mode 100644
index 00000000000..9a0352991c8
--- /dev/null
+++ b/spec/lib/api/entities/namespace_basic_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::API::Entities::NamespaceBasic, feature_category: :groups_and_projects do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:namespace) { create(:namespace) }
+
+ let(:options) { { current_user: current_user } }
+
+ let(:entity) do
+ described_class.new(namespace, options)
+ end
+
+ subject(:json) { entity.as_json }
+
+ shared_examples 'returns a response' do
+ it 'returns required fields' do
+ expect(json[:id]).to be_present
+ expect(json[:name]).to be_present
+ expect(json[:path]).to be_present
+ expect(json[:kind]).to be_present
+ expect(json[:full_path]).to be_present
+ expect(json[:web_url]).to be_present
+ end
+ end
+
+ include_examples 'returns a response'
+
+ context 'for a user namespace' do
+ let_it_be(:namespace) { create(:user_namespace) }
+
+ include_examples 'returns a response'
+
+ context 'when user namespece owner is missing' do
+ before do
+ namespace.update_column(:owner_id, non_existing_record_id)
+ end
+
+ include_examples 'returns a response'
+
+ it 'returns correct web_url' do
+ expect(json[:web_url]).to include(namespace.path)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 1d80aad2127..c5a98e6e42e 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe API::Entities::User do
# UserSafe
:id, :username, :name,
# UserBasic
- :state, :avatar_url, :web_url,
+ :state, :locked, :avatar_url, :web_url,
# User
:created_at, :bio, :location, :public_email, :skype, :linkedin, :twitter, :discord,
:website_url, :organization, :job_title, :pronouns, :bot, :work_information,
@@ -29,7 +29,7 @@ RSpec.describe API::Entities::User do
allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, user).and_return(can_read_user_profile)
end
- %i(followers following is_followed).each do |relationship|
+ %i[followers following is_followed].each do |relationship|
shared_examples 'does not expose relationship' do
it "does not expose #{relationship}" do
expect(subject).not_to include(relationship)
diff --git a/spec/lib/api/entities/wiki_page_spec.rb b/spec/lib/api/entities/wiki_page_spec.rb
index c75bba12484..a3566293c5c 100644
--- a/spec/lib/api/entities/wiki_page_spec.rb
+++ b/spec/lib/api/entities/wiki_page_spec.rb
@@ -22,6 +22,19 @@ RSpec.describe API::Entities::WikiPage do
expect(subject[:content]).to eq wiki_page.content
end
+ context "with front matter content" do
+ let(:wiki_page) { create(:wiki_page) }
+ let(:content_with_front_matter) { "---\nxxx: abc\n---\nHome Page" }
+
+ before do
+ wiki_page.update(content: content_with_front_matter) # rubocop:disable Rails/SaveBang
+ end
+
+ it 'returns the raw wiki page content' do
+ expect(subject[:content]).to eq content_with_front_matter
+ end
+ end
+
context 'when render_html param is passed' do
context 'when it is true' do
let(:params) { { render_html: true } }
diff --git a/spec/lib/api/helpers/common_helpers_spec.rb b/spec/lib/api/helpers/common_helpers_spec.rb
index 5162d2f1000..04a1837dec9 100644
--- a/spec/lib/api/helpers/common_helpers_spec.rb
+++ b/spec/lib/api/helpers/common_helpers_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe API::Helpers::CommonHelpers do
get '/test?array=&array_of_strings=test,me&array_of_ints=1,2'
expect(json_response['array']).to eq([])
- expect(json_response['array_of_strings']).to eq(%w(test me))
+ expect(json_response['array_of_strings']).to eq(%w[test me])
expect(json_response['array_of_ints']).to eq([1, 2])
end
end
diff --git a/spec/lib/api/helpers/import_github_helpers_spec.rb b/spec/lib/api/helpers/import_github_helpers_spec.rb
new file mode 100644
index 00000000000..3324e38660c
--- /dev/null
+++ b/spec/lib/api/helpers/import_github_helpers_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers do
+ subject do
+ helper = Class.new.include(described_class).new
+ def helper.params = {
+ personal_access_token: 'foo',
+ additional_access_tokens: 'bar',
+ github_hostname: 'github.example.com'
+ }
+ helper
+ end
+
+ describe '#client' do
+ it 'returns the new github client' do
+ expect(subject.client).to be_a(Gitlab::GithubImport::Client)
+ end
+ end
+
+ describe '#access_params' do
+ it 'makes the passed in personal access token and extra tokens accessible' do
+ expect(subject.access_params).to eq({ github_access_token: 'foo', additional_access_tokens: 'bar' })
+ end
+ end
+
+ describe '#provider' do
+ it 'is GitHub' do
+ expect(subject.provider).to eq(:github)
+ end
+ end
+
+ describe '#provider_unauthorized' do
+ it 'raises an error' do
+ expect(subject).to receive(:error!).with('Access denied to your GitHub account.', 401)
+ subject.provider_unauthorized
+ end
+ end
+
+ describe '#too_many_requests' do
+ it 'raises an error' do
+ expect(subject).to receive(:error!).with('Too Many Requests', 429)
+ subject.too_many_requests
+ end
+ end
+end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index dd62343890e..5d343ec2777 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -773,21 +773,43 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
describe '#track_event' do
- let(:user_id) { 345 }
- let(:namespace_id) { 12 }
- let(:project_id) { 56 }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project) }
let(:event_name) { 'i_compliance_dashboard' }
let(:unknown_event) { 'unknown' }
it 'tracks internal event' do
expect(Gitlab::InternalEvents).to receive(:track_event).with(
event_name,
- user_id: user_id,
- namespace_id: namespace_id,
- project_id: project_id
+ send_snowplow_event: true,
+ user: user,
+ namespace: namespace,
+ project: project
)
- helper.track_event(event_name, user_id: user_id, namespace_id: namespace_id, project_id: project_id)
+ helper.track_event(event_name,
+ user: user,
+ namespace_id: namespace.id,
+ project_id: project.id
+ )
+ end
+
+ it 'passes send_snowplow_event on to InternalEvents.track_event' do
+ expect(Gitlab::InternalEvents).to receive(:track_event).with(
+ event_name,
+ send_snowplow_event: false,
+ user: user,
+ namespace: namespace,
+ project: project
+ )
+
+ helper.track_event(event_name,
+ send_snowplow_event: false,
+ user: user,
+ namespace_id: namespace.id,
+ project_id: project.id
+ )
end
it 'logs an exception for unknown event' do
@@ -797,18 +819,29 @@ RSpec.describe API::Helpers, feature_category: :shared do
instance_of(Gitlab::InternalEvents::UnknownEventError),
event_name: unknown_event
)
- helper.track_event(unknown_event, user_id: user_id, namespace_id: namespace_id, project_id: project_id)
+
+ helper.track_event(unknown_event,
+ user: user,
+ namespace_id: namespace.id,
+ project_id: project.id
+ )
end
- it 'does not track event for nil user_id' do
+ it 'does not track event for nil user' do
expect(Gitlab::InternalEvents).not_to receive(:track_event)
- helper.track_event(unknown_event, user_id: nil, namespace_id: namespace_id, project_id: project_id)
+ helper.track_event(unknown_event,
+ user: nil,
+ namespace_id: namespace.id,
+ project_id: project.id
+ )
end
end
shared_examples '#order_options_with_tie_breaker' do
- subject { Class.new.include(described_class).new.order_options_with_tie_breaker }
+ subject { Class.new.include(described_class).new.order_options_with_tie_breaker(**reorder_params) }
+
+ let(:reorder_params) { {} }
before do
allow_any_instance_of(described_class).to receive(:params).and_return(params)
@@ -852,11 +885,25 @@ RSpec.describe API::Helpers, feature_category: :shared do
describe '#order_options_with_tie_breaker' do
include_examples '#order_options_with_tie_breaker'
- context 'with created_at order given' do
- let(:params) { { order_by: 'created_at', sort: 'asc' } }
+ context 'by default' do
+ context 'with created_at order given' do
+ let(:params) { { order_by: 'created_at', sort: 'asc' } }
- it 'converts to id' do
- is_expected.to eq({ 'id' => 'asc' })
+ it 'converts to id' do
+ is_expected.to eq({ 'id' => 'asc' })
+ end
+ end
+ end
+
+ context 'when override_created_at is false' do
+ let(:reorder_params) { { override_created_at: false } }
+
+ context 'with created_at order given' do
+ let(:params) { { order_by: 'created_at', sort: 'asc' } }
+
+ it 'does not convert to id' do
+ is_expected.to eq({ "created_at" => "asc", "id" => "asc" })
+ end
end
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
index 5ebb5ffed3b..0ed320e863c 100644
--- a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
let_it_be(:project) { create_default(:project, :repository) }
let_it_be(:merge_requests) { create_list(:merge_request, 2, :unique_branches) }
let_it_be(:notes) { create_list(:note, 2, system: false, noteable: merge_requests.first) }
+ let_it_be(:merge_request_reviewers) { create_pair(:merge_request_reviewer, merge_request: merge_requests[0]) }
subject { described_class.represent(merge_requests).as_json }
@@ -13,6 +14,10 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
expect(subject.first[:commentCount]).to eq(2)
end
+ it 'exposes reviewers' do
+ expect(subject.first[:reviewers].count).to eq(2)
+ end
+
context 'with user_notes_count option' do
let(:user_notes_count) { merge_requests.to_h { |merge_request| [merge_request.id, 1] } }
@@ -25,6 +30,11 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
merge_requests << create(:merge_request, :unique_branches)
+ # In normal use of this entity, reviewer data is preloaded in JiraConnect::SyncService
+ ActiveRecord::Associations::Preloader.new(
+ records: merge_requests, associations: { merge_request_reviewers: :reviewer }
+ ).call
+
expect { subject }.not_to exceed_query_limit(control_count)
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb
new file mode 100644
index 00000000000..5a8667ec830
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Atlassian::JiraConnect::Serializers::ReviewerEntity, feature_category: :integrations do
+ subject { described_class.represent(merge_request_reviewer, merge_request: merge_request) }
+
+ let_it_be_with_reload(:merge_request) { create(:merge_request) }
+ let_it_be(:reviewer) { create(:user) }
+ let(:merge_request_reviewer) { build(:merge_request_reviewer, merge_request: merge_request, reviewer: reviewer) }
+
+ describe '#to_json' do
+ it { expect(subject.to_json).to be_valid_json.and match_schema('jira_connect/reviewer') }
+ end
+
+ it 'exposes all fields' do
+ expect(subject.as_json.keys).to contain_exactly(:name, :email, :approvalStatus)
+ end
+
+ it 'exposes correct user\'s data' do
+ expect(subject.as_json[:name]).to eq(reviewer.name)
+ expect(subject.as_json[:email]).to eq(reviewer.email)
+ end
+
+ it 'exposes correct approval status' do
+ expect(subject.as_json[:approvalStatus]).to eq('UNAPPROVED')
+ end
+
+ context 'with MR is reviewer, but not approved' do
+ before do
+ merge_request_reviewer.reviewed!
+ end
+
+ it 'exposes correct approval status' do
+ expect(subject.as_json[:approvalStatus]).to eq('NEEDSWORK')
+ end
+ end
+
+ context 'when MR is approved' do
+ before do
+ create(:approval, user: reviewer, merge_request: merge_request)
+ end
+
+ it 'exposes correct approval status' do
+ expect(subject.as_json[:approvalStatus]).to eq('APPROVED')
+ end
+ end
+end
diff --git a/spec/lib/aws/s3_client_spec.rb b/spec/lib/aws/s3_client_spec.rb
new file mode 100644
index 00000000000..0e50eb4d910
--- /dev/null
+++ b/spec/lib/aws/s3_client_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Aws::S3Client, feature_category: :audit_events do
+ let_it_be(:region) { 'eu-west-1' }
+ let_it_be(:access_key_id) { 'AKIARANDOM123' }
+ let_it_be(:secret_access_key) { 'TOPSECRET/XYZ' }
+
+ let(:s3_client) { described_class.new(access_key_id, secret_access_key, region) }
+
+ describe '#upload_object' do
+ let(:key) { 'file.txt' }
+ let(:bucket_name) { 'gitlab-audit-logs' }
+ let(:body) { 'content' }
+ let(:content_type) { 'Text/plain' }
+
+ it 'calls put_object with correct params' do
+ allow_next_instance_of(Aws::S3::Client) do |s3_client|
+ expect(s3_client).to receive(:put_object).with(
+ {
+ key: key,
+ bucket: bucket_name,
+ body: body,
+ content_type: 'Text/plain'
+ }
+ )
+ end
+
+ s3_client.upload_object(key, bucket_name, body, content_type)
+ end
+ end
+end
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
index 5758ad2c1aa..c9d036b37f8 100644
--- a/spec/lib/backup/database_model_spec.rb
+++ b/spec/lib/backup/database_model_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do
+ using RSpec::Parameterized::TableSyntax
+
let(:gitlab_database_name) { 'main' }
describe '#connection' do
@@ -30,7 +32,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
).to receive(:configuration_hash).and_return(application_config)
end
- context 'when no GITLAB_BACKUP_PG* variables are set' do
+ shared_examples 'no configuration is overridden' do
it 'ActiveRecord backup configuration is expected to equal application configuration' do
expect(subject[:activerecord]).to eq(application_config)
end
@@ -45,9 +47,23 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
end
end
- context 'when GITLAB_BACKUP_PG* variables are set' do
- using RSpec::Parameterized::TableSyntax
+ shared_examples 'environment variables override application configuration' do
+ let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
+ end
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(subject[:pg_env]).to include({ pg_env => overridden_value })
+ end
+ end
+ context 'when no GITLAB_BACKUP_PG* variables are set' do
+ it_behaves_like 'no configuration is overridden'
+ end
+
+ context 'when GITLAB_BACKUP_PG* variables are set' do
where(:env_variable, :overridden_value) do
'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
'GITLAB_BACKUP_PGUSER' | 'some_user'
@@ -63,18 +79,76 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
with_them do
let(:pg_env) { env_variable[/GITLAB_BACKUP_(\w+)/, 1] }
- let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
before do
stub_env(env_variable, overridden_value)
end
- it 'ActiveRecord backup configuration overrides application configuration' do
- expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
+ it_behaves_like 'environment variables override application configuration'
+ end
+ end
+
+ context 'when GITLAB_BACKUP_<DBNAME>_PG* variables are set' do
+ context 'and environment variables are for the current database name' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
+ 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ end
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_BACKUP_MAIN_(\w+)/, 1] }
+
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ it_behaves_like 'environment variables override application configuration'
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_CI_PGPORT' | '1543'
+ 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ end
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_BACKUP_CI_(\w+)/, 1] }
+
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ it_behaves_like 'no configuration is overridden'
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ before do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specfic_user')
end
- it 'PostgreSQL ENV overrides application configuration' do
- expect(subject[:pg_env]).to include({ pg_env => overridden_value })
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ config = subject
+ expect(config.dig(:activerecord, :username)).to eq('specfic_user')
+ expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
end
end
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 2f14b403576..073efbbbfcc 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let(:output) { progress.string }
+ let(:backup_id) { 'some_id' }
let(:one_database_configured?) { base_models_for_backup.one? }
let(:timeout_service) do
instance_double(Gitlab::Database::TransactionTimeoutSettings, restore_timeouts: nil, disable_timeouts: nil)
@@ -17,7 +18,6 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
end
before(:all) do # rubocop:disable RSpec/BeforeAll
- Rake::Task.define_task(:environment)
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -26,7 +26,6 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
end
describe '#dump', :delete do
- let(:backup_id) { 'some_id' }
let(:force) { true }
subject { described_class.new(progress, force: force) }
@@ -222,7 +221,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir)
+ subject.restore(backup_dir, backup_id)
expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
@@ -240,7 +239,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir)
+ subject.restore(backup_dir, backup_id)
expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]")
@@ -260,7 +259,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- expect { subject.restore(backup_dir) }.to raise_error(Backup::Error)
+ expect { subject.restore(backup_dir, backup_id) }.to raise_error(Backup::Error)
end
end
@@ -276,7 +275,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir)
+ subject.restore(backup_dir, backup_id)
expect(output).to include("ERRORS")
expect(output).not_to include(noise)
@@ -305,7 +304,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(ENV).to receive(:merge!).with(hash_including { 'PGHOST' => 'test.example.com' })
expect(ENV).not_to receive(:[]=).with('PGPASSWORD', anything)
- subject.restore(backup_dir)
+ subject.restore(backup_dir, backup_id)
expect(ENV['PGPORT']).to eq(config['port']) if config['port']
expect(ENV['PGUSER']).to eq(config['username']) if config['username']
@@ -328,14 +327,14 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
end
expect do
- subject.restore('db')
+ subject.restore('db', backup_id)
end.to raise_error(Backup::Error, /Source database file does not exist/)
end
end
context 'for ci database' do
it 'ci database tolerates missing source file' do
- expect { subject.restore(backup_dir) }.not_to raise_error
+ expect { subject.restore(backup_dir, backup_id) }.not_to raise_error
end
end
end
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index f98b5e1414f..48c89e06dfa 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Backup::Files do
+RSpec.describe Backup::Files, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let!(:project) { create(:project) }
@@ -58,25 +58,25 @@ RSpec.describe Backup::Files do
it 'moves all necessary files' do
allow(subject).to receive(:backup_existing_files).and_call_original
expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
- subject.restore('registry.tar.gz')
+ subject.restore('registry.tar.gz', 'backup_id')
end
it 'raises no errors' do
- expect { subject.restore('registry.tar.gz') }.not_to raise_error
+ expect { subject.restore('registry.tar.gz', 'backup_id') }.not_to raise_error
end
it 'calls tar command with unlink' do
expect(subject).to receive(:tar).and_return('blabla-tar')
- expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args)
+ expect(subject).to receive(:run_pipeline!).with([%w[gzip -cd], %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.restore('registry.tar.gz')
+ subject.restore('registry.tar.gz', 'backup_id')
end
it 'raises an error on failure' do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
- expect { subject.restore('registry.tar.gz') }.to raise_error(/Restore operation failed:/)
+ expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/Restore operation failed:/)
end
end
@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do
it 'shows error message' do
expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
- subject.restore('registry.tar.gz')
+ subject.restore('registry.tar.gz', 'backup_id')
end
end
@@ -104,7 +104,7 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
.and_call_original
- expect { subject.restore('registry.tar.gz') }.to raise_error(/is a mountpoint/)
+ expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/is a mountpoint/)
end
end
end
@@ -124,7 +124,7 @@ RSpec.describe Backup::Files do
it 'excludes tmp dirs from archive' do
expect(subject).to receive(:tar).and_return('blabla-tar')
- expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args)
+ expect(subject).to receive(:run_pipeline!).with([%w[blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .], 'gzip -c -1'], any_args)
subject.dump('registry.tar.gz', 'backup_id')
end
@@ -146,7 +146,7 @@ RSpec.describe Backup::Files do
it 'excludes tmp dirs from rsync' do
expect(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
.and_return(['', 0])
subject.dump('registry.tar.gz', 'backup_id')
@@ -154,7 +154,7 @@ RSpec.describe Backup::Files do
it 'retries if rsync fails due to vanishing files' do
expect(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
.and_return(['rsync failed', 24], ['', 0])
expect do
@@ -164,7 +164,7 @@ RSpec.describe Backup::Files do
it 'raises an error and outputs an error message if rsync failed' do
allow(Gitlab::Popen).to receive(:popen)
- .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
+ .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
.and_return(['rsync failed', 1])
expect do
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 1733d21c23f..8f85cd9d8b3 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
let(:pre_restore_warning) { nil }
let(:post_restore_warning) { nil }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, human_name: 'my task', destination_path: 'my_task.tar.gz') } }
- let(:backup_information) { {} }
+ let(:backup_information) { { backup_created_at: Time.zone.parse('2019-01-01'), gitlab_version: '12.3' } }
let(:task) do
instance_double(Backup::Task,
pre_restore_warning: pre_restore_warning,
@@ -156,7 +156,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
describe '#create' do
let(:incremental_env) { 'false' }
- let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} }
+ let(:expected_backup_contents) { %w[backup_information.yml task1.tar.gz task2.tar.gz] }
let(:backup_time) { Time.zone.parse('2019-1-1') }
let(:backup_id) { "1546300800_2019_01_01_#{Gitlab::VERSION}" }
let(:full_backup_id) { backup_id }
@@ -179,8 +179,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
allow(Gitlab::BackupLogger).to receive(:info)
allow(Kernel).to receive(:system).and_return(true)
- allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'), full_backup_id)
- allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), full_backup_id)
+ allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'), backup_id)
+ allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), backup_id)
end
it 'creates a backup tar' do
@@ -223,7 +223,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
context 'when SKIP env is set' do
- let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
+ let(:expected_backup_contents) { %w[backup_information.yml task1.tar.gz] }
before do
stub_env('SKIP', 'task2')
@@ -237,7 +237,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
context 'when the destination is optional' do
- let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
+ let(:expected_backup_contents) { %w[backup_information.yml task1.tar.gz] }
let(:definitions) do
{
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
@@ -936,6 +936,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
let(:gitlab_version) { Gitlab::VERSION }
+ let(:backup_id) { "1546300800_2019_01_01_#{gitlab_version}" }
+
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
@@ -948,8 +950,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
Rake.application.rake_require 'tasks/cache'
allow(Gitlab::BackupLogger).to receive(:info)
- allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
- allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
+ allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'), backup_id)
+ allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), backup_id)
allow(YAML).to receive(:safe_load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'),
permitted_classes: described_class::YAML_PERMITTED_CLASSES)
.and_return(backup_information)
@@ -1013,7 +1015,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
context 'when BACKUP variable is set to a correct file' do
- let(:tar_cmdline) { %w{tar -xf 1451606400_2016_01_01_1.2.3_gitlab_backup.tar} }
+ let(:tar_cmdline) { %w[tar -xf 1451606400_2016_01_01_1.2.3_gitlab_backup.tar] }
+ let(:backup_id) { "1451606400_2016_01_01_1.2.3" }
before do
allow(Gitlab::BackupLogger).to receive(:info)
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 1f3818de4a0..ad5fb8ea84e 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
describe 'storages' do
- let(:storages) { %w{default} }
+ let(:storages) { %w[default] }
let_it_be(:project) { create(:project_with_design, :repository) }
@@ -215,9 +215,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
@@ -231,7 +231,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
- subject.restore(destination)
+ subject.restore(destination, backup_id)
pool_repository.reload
expect(pool_repository).not_to be_failed
@@ -242,7 +242,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
- subject.restore(destination)
+ subject.restore(destination, backup_id)
pool_repository.reload
expect(pool_repository).to be_obsolete
@@ -256,14 +256,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
it 'shows the appropriate error' do
- subject.restore(destination)
+ subject.restore(destination, backup_id)
expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
end
it 'removes the snippets from the DB' do
- expect { subject.restore(destination) }.to change(PersonalSnippet, :count).by(-1)
+ expect { subject.restore(destination, backup_id) }.to change(PersonalSnippet, :count).by(-1)
.and change(ProjectSnippet, :count).by(-1)
.and change(SnippetRepository, :count).by(-2)
end
@@ -273,14 +273,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
shard_name = personal_snippet.repository.shard
path = personal_snippet.disk_path + '.git'
- subject.restore(destination)
+ subject.restore(destination, backup_id)
expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
end
end
context 'storages' do
- let(:storages) { %w{default} }
+ let(:storages) { %w[default] }
before do
stub_storage_settings('test_second_storage' => {
@@ -296,9 +296,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
excluded_personal_snippet.track_snippet_repository('test_second_storage')
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
@@ -318,9 +318,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
@@ -339,9 +339,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
@@ -363,9 +363,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
@@ -383,9 +383,9 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination)
+ subject.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb
index 1de99729512..370d9e4a64f 100644
--- a/spec/lib/backup/task_spec.rb
+++ b/spec/lib/backup/task_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Backup::Task do
describe '#restore' do
it 'must be implemented by the subclass' do
- expect { subject.restore('some/path') }.to raise_error(NotImplementedError)
+ expect { subject.restore('some/path', 'backup_id') }.to raise_error(NotImplementedError)
end
end
end
diff --git a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
index 7a34bf13c8f..baa22e08971 100644
--- a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
+++ b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb
@@ -28,14 +28,14 @@ RSpec.describe Banzai::Filter::AssetProxyFilter, feature_category: :team_plannin
stub_application_setting(asset_proxy_enabled: true)
stub_application_setting(asset_proxy_secret_key: 'shared-secret')
stub_application_setting(asset_proxy_url: 'https://assets.example.com')
- stub_application_setting(asset_proxy_allowlist: %w(gitlab.com *.mydomain.com))
+ stub_application_setting(asset_proxy_allowlist: %w[gitlab.com *.mydomain.com])
described_class.initialize_settings
expect(Gitlab.config.asset_proxy.enabled).to be_truthy
expect(Gitlab.config.asset_proxy.secret_key).to eq 'shared-secret'
expect(Gitlab.config.asset_proxy.url).to eq 'https://assets.example.com'
- expect(Gitlab.config.asset_proxy.allowlist).to eq %w(gitlab.com *.mydomain.com)
+ expect(Gitlab.config.asset_proxy.allowlist).to eq %w[gitlab.com *.mydomain.com]
expect(Gitlab.config.asset_proxy.domain_regexp).to eq(/^(gitlab\.com|.*?\.mydomain\.com)$/i)
end
@@ -52,12 +52,12 @@ RSpec.describe Banzai::Filter::AssetProxyFilter, feature_category: :team_plannin
it 'supports deprecated whitelist settings' do
stub_application_setting(asset_proxy_enabled: true)
- stub_application_setting(asset_proxy_whitelist: %w(foo.com bar.com))
+ stub_application_setting(asset_proxy_whitelist: %w[foo.com bar.com])
stub_application_setting(asset_proxy_allowlist: [])
described_class.initialize_settings
- expect(Gitlab.config.asset_proxy.allowlist).to eq %w(foo.com bar.com)
+ expect(Gitlab.config.asset_proxy.allowlist).to eq %w[foo.com bar.com]
end
end
@@ -66,7 +66,7 @@ RSpec.describe Banzai::Filter::AssetProxyFilter, feature_category: :team_plannin
stub_asset_proxy_setting(enabled: true)
stub_asset_proxy_setting(secret_key: 'shared-secret')
stub_asset_proxy_setting(url: 'https://assets.example.com')
- stub_asset_proxy_setting(allowlist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
+ stub_asset_proxy_setting(allowlist: %W[gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}])
stub_asset_proxy_setting(domain_regexp: described_class.compile_allowlist(Gitlab.config.asset_proxy.allowlist))
@context = described_class.transform_context({})
end
diff --git a/spec/lib/banzai/filter/autolink_filter_spec.rb b/spec/lib/banzai/filter/autolink_filter_spec.rb
index fe642855f3b..89a9099a1f4 100644
--- a/spec/lib/banzai/filter/autolink_filter_spec.rb
+++ b/spec/lib/banzai/filter/autolink_filter_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe Banzai::Filter::AutolinkFilter, feature_category: :team_planning
it 'does not double-encode HTML entities' do
encoded_link = "#{link}?foo=bar&amp;baz=quux"
- expected_encoded_link = %{<a href="#{encoded_link}">#{encoded_link}</a>}
+ expected_encoded_link = %(<a href="#{encoded_link}">#{encoded_link}</a>)
actual = unescape(filter(encoded_link).to_html)
expect(actual).to eq(Rinku.auto_link(encoded_link))
diff --git a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
index 3b054862a26..69afddf2406 100644
--- a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb
@@ -14,13 +14,13 @@ RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter, feature_categ
subject { filter(exp).to_html }
context 'allows `a` elements' do
- let(:exp) { %q{<a href="/">Link</a>} }
+ let(:exp) { %q(<a href="/">Link</a>) }
it { is_expected.to eq(exp) }
end
context 'allows `br` elements' do
- let(:exp) { %q{Hello<br>World} }
+ let(:exp) { %q(Hello<br>World) }
it { is_expected.to eq(exp) }
end
@@ -29,21 +29,21 @@ RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter, feature_categ
let(:allowed_style) { 'color: red; border: blue; background: green; padding: 10px; margin: 10px; text-decoration: underline;' }
context 'allows specific properties' do
- let(:exp) { %{<a href="#" style="#{allowed_style}">Stylish Link</a>} }
+ let(:exp) { %(<a href="#" style="#{allowed_style}">Stylish Link</a>) }
it { is_expected.to eq(exp) }
end
it 'disallows other properties in `style` attribute on `a` elements' do
style = [allowed_style, 'position: fixed'].join(';')
- doc = filter(%{<a href="#" style="#{style}">Stylish Link</a>})
+ doc = filter(%(<a href="#" style="#{style}">Stylish Link</a>))
expect(doc.at_css('a')['style']).to eq(allowed_style)
end
end
context 'allows `class` on `a` elements' do
- let(:exp) { %q{<a href="#" class="btn">Button Link</a>} }
+ let(:exp) { %q(<a href="#" class="btn">Button Link</a>) }
it { is_expected.to eq(exp) }
end
diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb
index 6c9e798790f..75a4c550ec9 100644
--- a/spec/lib/banzai/filter/image_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/image_link_filter_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Banzai::Filter::ImageLinkFilter, feature_category: :team_planning
let(:context) { {} }
def image(path, alt: nil, data_src: nil)
- alt_tag = alt ? %{alt="#{alt}"} : ""
- data_src_tag = data_src ? %{data-src="#{data_src}"} : ""
+ alt_tag = alt ? %(alt="#{alt}") : ""
+ data_src_tag = data_src ? %(data-src="#{data_src}") : ""
%(<img src="#{path}" #{alt_tag} #{data_src_tag} />)
end
diff --git a/spec/lib/banzai/filter/inline_observability_filter_spec.rb b/spec/lib/banzai/filter/inline_observability_filter_spec.rb
deleted file mode 100644
index 81896faced8..00000000000
--- a/spec/lib/banzai/filter/inline_observability_filter_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineObservabilityFilter, feature_category: :metrics do
- include FilterSpecHelper
-
- let(:input) { %(<a href="#{url}">example</a>) }
- let(:doc) { filter(input) }
-
- before do
- allow(Gitlab::Observability).to receive(:embeddable_url).and_return('embeddable-url')
- stub_config_setting(url: "https://www.gitlab.com")
- end
-
- describe '#filter?' do
- context 'when the document contains a valid observability link' do
- let(:url) { "https://www.gitlab.com/groups/some-group/-/observability/test" }
-
- it 'leaves the original link unchanged' do
- expect(doc.at_css('a').to_s).to eq(input)
- end
-
- it 'appends an observability charts placeholder' do
- node = doc.at_css('.js-render-observability')
-
- expect(node).to be_present
- expect(node.attribute('data-frame-url').to_s).to eq('embeddable-url')
- expect(Gitlab::Observability).to have_received(:embeddable_url).with(url).once
- end
- end
-
- context 'with duplicate URLs' do
- let(:url) { "https://www.gitlab.com/groups/some-group/-/observability/test" }
- let(:input) { %(<a href="#{url}">example1</a><a href="#{url}">example2</a>) }
-
- where(:embeddable_url) do
- [
- 'not-nil',
- nil
- ]
- end
-
- with_them do
- it 'calls Gitlab::Observability.embeddable_url only once' do
- allow(Gitlab::Observability).to receive(:embeddable_url).with(url).and_return(embeddable_url)
-
- filter(input)
-
- expect(Gitlab::Observability).to have_received(:embeddable_url).with(url).once
- end
- end
- end
-
- shared_examples 'does not embed observabilty' do
- it 'leaves the original link unchanged' do
- expect(doc.at_css('a').to_s).to eq(input)
- end
-
- it 'does not append an observability charts placeholder' do
- node = doc.at_css('.js-render-observability')
-
- expect(node).not_to be_present
- end
- end
-
- context 'when the embeddable url is nil' do
- let(:url) { "https://www.gitlab.com/groups/some-group/-/something-else/test" }
-
- before do
- allow(Gitlab::Observability).to receive(:embeddable_url).and_return(nil)
- end
-
- it_behaves_like 'does not embed observabilty'
- end
-
- context 'when the document has an unrecognised link' do
- let(:url) { "https://www.gitlab.com/groups/some-group/-/something-else/test" }
-
- it_behaves_like 'does not embed observabilty'
-
- it 'does not build the embeddable url' do
- expect(Gitlab::Observability).not_to have_received(:embeddable_url)
- end
- end
-
- context 'when feature flag is disabled' do
- let(:url) { "https://www.gitlab.com/groups/some-group/-/observability/test" }
-
- before do
- stub_feature_flags(observability_group_tab: false)
- end
-
- it_behaves_like 'does not embed observabilty'
-
- it 'does not build the embeddable url' do
- expect(Gitlab::Observability).not_to have_received(:embeddable_url)
- end
- end
- end
-end
diff --git a/spec/lib/banzai/filter/math_filter_spec.rb b/spec/lib/banzai/filter/math_filter_spec.rb
index e4ebebc0fde..3fa0f9028e8 100644
--- a/spec/lib/banzai/filter/math_filter_spec.rb
+++ b/spec/lib/banzai/filter/math_filter_spec.rb
@@ -207,12 +207,21 @@ RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do
expect(doc.search('[data-math-style="display"]').count).to eq(1)
end
- it 'limits how many elements can be marked as math' do
- stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2)
+ context 'when limiting how many elements can be marked as math' do
+ subject { pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$') }
- doc = pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$')
+ it 'enforces limits by default' do
+ stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2)
+
+ expect(subject.search('.js-render-math').count).to eq(2)
+ end
- expect(doc.search('.js-render-math').count).to eq(2)
+ it 'does not limit when math_rendering_limits_enabled is false' do
+ stub_application_setting(math_rendering_limits_enabled: false)
+ stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2)
+
+ expect(subject.search('.js-render-math').count).to eq(3)
+ end
end
it 'protects against malicious backtracking' do
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 6ebf6c3cd1d..9723e9b39f1 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Alert #{reference}</#{elem}>"
@@ -47,7 +47,7 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
end
it 'escapes the title attribute' do
- allow(alert).to receive(:title).and_return(%{"></a>whatever<a title="})
+ allow(alert).to receive(:title).and_return(%("></a>whatever<a title="))
doc = reference_filter("Alert #{reference}")
expect(doc.text).to eq "Alert #{reference}"
@@ -79,7 +79,7 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
doc = reference_filter("Alert #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.details_project_alert_management_url(project, alert.iid, only_path: true)
end
end
diff --git a/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb
index 594a24fa279..9300945a9af 100644
--- a/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Banzai::Filter::References::CommitRangeReferenceFilter, feature_c
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Commit Range #{range.to_reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -96,7 +96,7 @@ RSpec.describe Banzai::Filter::References::CommitRangeReferenceFilter, feature_c
doc = reference_filter("See #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.project_compare_url(project, from: commit1.id, to: commit2.id, only_path: true)
end
end
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 73e3bf41ee9..6e0f9eda0e2 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Commit #{commit.id}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -61,7 +61,7 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
it 'escapes the title attribute' do
allow_next_instance_of(Commit) do |instance|
- allow(instance).to receive(:title).and_return(%{"></a>whatever<a title="})
+ allow(instance).to receive(:title).and_return(%("></a>whatever<a title="))
end
doc = reference_filter("See #{reference}")
@@ -93,7 +93,7 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
doc = reference_filter("See #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.project_commit_url(project, reference, only_path: true)
end
diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
index d97067de155..fd03d7c0d27 100644
--- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
end
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
context "wrapped in a <#{elem}/>" do
let(:input_text) { "<#{elem}>Design #{url_for_design(design)}</#{elem}>" }
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 86fb7d3964d..823f006c98a 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Issue #{reference}</#{elem}>"
@@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
it 'escapes the title attribute' do
allow(project.external_issue_tracker).to receive(:title)
- .and_return(%{"></a>whatever<a title="})
+ .and_return(%("></a>whatever<a title="))
doc = filter("Issue #{reference}")
expect(doc.text).to eq "Issue #{reference}"
diff --git a/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb
index c2f4bf6caa5..02be2296d5c 100644
--- a/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter, feature_c
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Feature Flag #{reference}</#{elem}>"
@@ -47,7 +47,7 @@ RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter, feature_c
end
it 'escapes the title attribute' do
- allow(feature_flag).to receive(:name).and_return(%{"></a>whatever<a title="})
+ allow(feature_flag).to receive(:name).and_return(%("></a>whatever<a title="))
doc = reference_filter("Feature Flag #{reference}")
expect(doc.text).to eq "Feature Flag #{reference}"
@@ -79,7 +79,7 @@ RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter, feature_c
doc = reference_filter("Feature Flag #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.edit_project_feature_flag_url(project, feature_flag.iid, only_path: true)
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index aadd726ac40..d16188e99a3 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Issue #{issue.to_reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -77,7 +77,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
end
it 'escapes the title attribute' do
- issue.update_attribute(:title, %{"></a>whatever<a title="})
+ issue.update_attribute(:title, %("></a>whatever<a title="))
doc = reference_filter("Issue #{written_reference}")
expect(doc.text).to eq "Issue #{reference}"
@@ -128,7 +128,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
it 'does not escape the data-original attribute' do
inner_html = 'element <code>node</code> inside'
- doc = reference_filter(%{<a href="#{written_reference}">#{inner_html}</a>})
+ doc = reference_filter(%(<a href="#{written_reference}">#{inner_html}</a>))
expect(doc.children.first.attr('data-original')).to eq inner_html
end
@@ -163,7 +163,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
doc = reference_filter("Issue #{written_reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq issue_path
end
@@ -381,7 +381,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
end
context 'cross-project reference in link href' do
- let(:reference_link) { %{<a href="#{reference}">Reference</a>} }
+ let(:reference_link) { %(<a href="#{reference}">Reference</a>) }
let(:reference) { issue.to_reference(project) }
let(:issue) { create(:issue, project: project2) }
let(:project2) { create(:project, :public, namespace: namespace) }
@@ -412,7 +412,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
end
context 'cross-project URL in link href' do
- let(:reference_link) { %{<a href="#{reference}">Reference</a>} }
+ let(:reference_link) { %(<a href="#{reference}">Reference</a>) }
let(:reference) { (issue_url + "#note_123").to_s }
let(:issue) { create(:issue, project: project2) }
let(:project2) { create(:project, :public, namespace: namespace) }
@@ -519,7 +519,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
it 'links to a valid reference for cross-reference in link href' do
reference = (issue_url + "#note_123").to_s
- reference_link = %{<a href="#{reference}">Reference</a>}
+ reference_link = %(<a href="#{reference}">Reference</a>)
doc = reference_filter("See #{reference_link}", context)
@@ -530,7 +530,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
it 'links to a valid reference for issue reference in the link href' do
reference = issue.to_reference(group)
- reference_link = %{<a href="#{reference}">Reference</a>}
+ reference_link = %(<a href="#{reference}">Reference</a>)
doc = reference_filter("See #{reference_link}", context)
link = doc.css('a').first
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index 91b051d71ec..a4587b70dfa 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Label #{reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -64,14 +64,14 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
doc = reference_filter("Label #{reference}")
link = doc.css('a').first.attr('href')
- expect(link).to match %r(https?://)
+ expect(link).to match %r{https?://}
end
it 'does not include protocol when :only_path true' do
doc = reference_filter("Label #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
end
it 'links to issue list when :label_url_method is not present' do
@@ -118,7 +118,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)})
end
it 'ignores invalid label IDs' do
@@ -142,7 +142,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}).")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.})
end
it 'ignores invalid label names' do
@@ -166,7 +166,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}).")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{label.name}</span></a></span>\)\.})
end
it 'ignores invalid label names' do
@@ -191,7 +191,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'does not include trailing punctuation', :aggregate_failures do
['.', ', ok?', '...', '?', '!', ': is that ok?'].each do |trailing_punctuation|
doc = filter("Label #{reference}#{trailing_punctuation}")
- expect(doc.to_html).to match(%r(<span.+><a.+><span.+>\?g\.fm&amp;</span></a></span>#{Regexp.escape(trailing_punctuation)}))
+ expect(doc.to_html).to match(%r{<span.+><a.+><span.+>\?g\.fm&amp;</span></a></span>#{Regexp.escape(trailing_punctuation)}})
end
end
@@ -217,7 +217,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)})
end
it 'ignores invalid label names' do
@@ -241,7 +241,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{label.name}</span></a></span>\.\)})
end
it 'ignores invalid label names' do
@@ -265,7 +265,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>g\.fm &amp; references\?</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>g\.fm &amp; references\?</span></a></span>\.\)})
end
it 'ignores invalid label names' do
@@ -344,7 +344,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
end
describe 'referencing a label in a link href' do
- let(:reference) { %{<a href="#{label.to_reference}">Label</a>} }
+ let(:reference) { %(<a href="#{label.to_reference}">Label</a>) }
it 'links to a valid reference' do
doc = reference_filter("See #{reference}")
@@ -355,7 +355,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+>Label</a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+>Label</a></span>\.\)})
end
it 'includes a data-project attribute' do
@@ -393,7 +393,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)})
end
it 'ignores invalid label names' do
@@ -416,7 +416,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
it 'links with adjacent text' do
doc = reference_filter("Label (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)))
+ expect(doc.to_html).to match(%r{\(<span.+><a.+><span.+>#{group_label.name}</span></a></span>\.\)})
end
it 'ignores invalid label names' do
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index 156455221cf..ccc8478c7d8 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Merge #{merge.to_reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -83,7 +83,7 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
end
it 'escapes the title attribute' do
- merge.update_attribute(:title, %{"></a>whatever<a title="})
+ merge.update_attribute(:title, %("></a>whatever<a title="))
doc = reference_filter("Merge #{reference}")
expect(doc.text).to eq "Merge #{reference}"
@@ -141,7 +141,7 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
doc = reference_filter("Merge #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.project_merge_request_url(project, merge, only_path: true)
end
end
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index 7caa6efff66..1fa62d70b72 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
end
shared_examples 'reference parsing' do
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>milestone #{reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -49,7 +49,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
doc = reference_filter("Milestone #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.milestone_path(milestone)
end
end
@@ -63,7 +63,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'links with adjacent text' do
doc = reference_filter("Milestone (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+>#{milestone.reference_link_text}</a>\.\)))
+ expect(doc.to_html).to match(%r{\(<a.+>#{milestone.reference_link_text}</a>\.\)})
end
it 'ignores invalid milestone IIDs' do
@@ -89,12 +89,12 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'links with adjacent text' do
doc = reference_filter("Milestone (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+>#{milestone.reference_link_text}</a>\.\)))
+ expect(doc.to_html).to match(%r{\(<a.+>#{milestone.reference_link_text}</a>\.\)})
end
it 'links with adjacent html tags' do
doc = reference_filter("Milestone <p>#{reference}</p>.")
- expect(doc.to_html).to match(%r(<p><a.+>#{milestone.reference_link_text}</a></p>))
+ expect(doc.to_html).to match(%r{<p><a.+>#{milestone.reference_link_text}</a></p>})
end
it 'ignores invalid milestone names' do
@@ -120,7 +120,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'links with adjacent text' do
doc = reference_filter("Milestone (#{reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+>#{milestone.reference_link_text}</a>\.\)))
+ expect(doc.to_html).to match(%r{\(<a.+>#{milestone.reference_link_text}</a>\.\)})
end
it 'ignores invalid milestone names' do
@@ -132,7 +132,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
shared_examples 'referencing a milestone in a link href' do
let(:unquoted_reference) { "#{Milestone.reference_prefix}#{milestone.name}" }
- let(:link_reference) { %{<a href="#{unquoted_reference}">Milestone</a>} }
+ let(:link_reference) { %(<a href="#{unquoted_reference}">Milestone</a>) }
before do
milestone.update!(name: 'gfm')
@@ -146,7 +146,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'links with adjacent text' do
doc = reference_filter("Milestone (#{link_reference}.)")
- expect(doc.to_html).to match(%r(\(<a.+>Milestone</a>\.\)))
+ expect(doc.to_html).to match(%r{\(<a.+>Milestone</a>\.\)})
end
it 'includes a data-project attribute' do
@@ -169,7 +169,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
shared_examples 'linking to a milestone as the entire link' do
let(:unquoted_reference) { "#{Milestone.reference_prefix}#{milestone.name}" }
let(:link) { urls.milestone_url(milestone) }
- let(:link_reference) { %{<a href="#{link}">#{link}</a>} }
+ let(:link_reference) { %(<a href="#{link}">#{link}</a>) }
it 'replaces the link text with the milestone reference' do
doc = reference_filter("See #{link}")
@@ -220,7 +220,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'escapes the name attribute' do
allow_next_instance_of(Milestone) do |instance|
- allow(instance).to receive(:title).and_return(%{"></a>whatever<a title="})
+ allow(instance).to receive(:title).and_return(%("></a>whatever<a title="))
end
doc = reference_filter("See #{reference}")
@@ -257,7 +257,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'escapes the name attribute' do
allow_next_instance_of(Milestone) do |instance|
- allow(instance).to receive(:title).and_return(%{"></a>whatever<a title="})
+ allow(instance).to receive(:title).and_return(%("></a>whatever<a title="))
end
doc = reference_filter("See #{reference}")
@@ -294,7 +294,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
it 'escapes the name attribute' do
allow_next_instance_of(Milestone) do |instance|
- allow(instance).to receive(:title).and_return(%{"></a>whatever<a title="})
+ allow(instance).to receive(:title).and_return(%("></a>whatever<a title="))
end
doc = reference_filter("See #{reference}")
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index b6d6ff2309a..9433862ac8a 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
expect(doc.css('a').first.attr('href')).to eq urls.project_url(subject)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Hey #{CGI.escapeHTML(reference)}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
diff --git a/spec/lib/banzai/filter/references/reference_filter_spec.rb b/spec/lib/banzai/filter/references/reference_filter_spec.rb
index b55b8fd41fa..a2aa4a33ed7 100644
--- a/spec/lib/banzai/filter/references/reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_filter_spec.rb
@@ -34,6 +34,13 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter, feature_category: :t
expect { |b| filter.each_node(&b) }.not_to yield_control
end
+
+ it 'skips text nodes in inline diff elements' do
+ document = Nokogiri::HTML.fragment('<span class="idiff">foo</span>')
+ filter = described_class.new(document, project: project)
+
+ expect { |b| filter.each_node(&b) }.not_to yield_control
+ end
end
describe '#nodes' do
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index 32d1cb095d3..b196d85ba8a 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Snippet #{reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
@@ -45,7 +45,7 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
end
it 'escapes the title attribute' do
- snippet.update_attribute(:title, %{"></a>whatever<a title="})
+ snippet.update_attribute(:title, %("></a>whatever<a title="))
doc = reference_filter("Snippet #{reference}")
expect(doc.text).to eq "Snippet #{reference}"
@@ -76,7 +76,7 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
doc = reference_filter("Snippet #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
- expect(link).not_to match %r(https?://)
+ expect(link).not_to match %r{https?://}
expect(link).to eq urls.project_snippet_url(project, snippet, only_path: true)
end
end
diff --git a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
index 276701a2984..7a11ff3ac3d 100644
--- a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Banzai::Filter::References::UserReferenceFilter, feature_category
expect(reference_filter(act).to_html).to eq(exp)
end
- %w(pre code a style).each do |elem|
+ %w[pre code a style].each do |elem|
it "ignores valid references contained inside '#{elem}' element" do
exp = act = "<#{elem}>Hey #{reference}</#{elem}>"
expect(reference_filter(act).to_html).to eq exp
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 4aacebe6024..5fe9433cef3 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -87,13 +87,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter, feature_category: :team_pl
end
context "languages that should be passed through" do
- %w(math mermaid plantuml suggestion).each do |lang|
+ %w[math mermaid plantuml suggestion].each do |lang|
context "when #{lang} is specified" do
it "highlights as plaintext but with the correct language attribute and class" do
- result = filter(%{<pre data-canonical-lang="#{lang}"><code>This is a test</code></pre>})
+ result = filter(%(<pre data-canonical-lang="#{lang}"><code>This is a test</code></pre>))
copy_code_btn = '<copy-code></copy-code>' unless lang == 'suggestion'
- expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre data-canonical-lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>#{copy_code_btn}</div>})
+ expect(result.to_html.delete("\n")).to eq(%(<div class="gl-relative markdown-code-block js-markdown-code"><pre data-canonical-lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>#{copy_code_btn}</div>))
end
include_examples "XSS prevention", lang
diff --git a/spec/lib/banzai/filter_array_spec.rb b/spec/lib/banzai/filter_array_spec.rb
index bb457568bee..1c401fdaf8a 100644
--- a/spec/lib/banzai/filter_array_spec.rb
+++ b/spec/lib/banzai/filter_array_spec.rb
@@ -5,37 +5,37 @@ require 'fast_spec_helper'
RSpec.describe Banzai::FilterArray, feature_category: :team_planning do
describe '#insert_after' do
it 'inserts an element after a provided element' do
- filters = described_class.new(%w(a b c))
+ filters = described_class.new(%w[a b c])
filters.insert_after('b', '1')
- expect(filters).to eq %w(a b 1 c)
+ expect(filters).to eq %w[a b 1 c]
end
it 'inserts an element at the end when the provided element does not exist' do
- filters = described_class.new(%w(a b c))
+ filters = described_class.new(%w[a b c])
filters.insert_after('d', '1')
- expect(filters).to eq %w(a b c 1)
+ expect(filters).to eq %w[a b c 1]
end
end
describe '#insert_before' do
it 'inserts an element before a provided element' do
- filters = described_class.new(%w(a b c))
+ filters = described_class.new(%w[a b c])
filters.insert_before('b', '1')
- expect(filters).to eq %w(a 1 b c)
+ expect(filters).to eq %w[a 1 b c]
end
it 'inserts an element at the beginning when the provided element does not exist' do
- filters = described_class.new(%w(a b c))
+ filters = described_class.new(%w[a b c])
filters.insert_before('d', '1')
- expect(filters).to eq %w(1 a b c)
+ expect(filters).to eq %w[1 a b c]
end
end
end
diff --git a/spec/lib/banzai/pipeline/description_pipeline_spec.rb b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
index fa25612a06e..2d831d7f7e0 100644
--- a/spec/lib/banzai/pipeline/description_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Banzai::Pipeline::DescriptionPipeline, feature_category: :team_pl
expect(doc.strip).to eq 'Description'
end
- %w(pre code img ol ul li).each do |elem|
+ %w[pre code img ol ul li].each do |elem|
it "removes '#{elem}' elements" do
act = "<#{elem}>Description</#{elem}>"
@@ -35,7 +35,7 @@ RSpec.describe Banzai::Pipeline::DescriptionPipeline, feature_category: :team_pl
end
end
- %w(b i strong em a ins del sup sub).each do |elem|
+ %w[b i strong em a ins del sup sub].each do |elem|
it "still allows '#{elem}' elements" do
exp = act = "<#{elem}>Description</#{elem}>"
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 6ef03b58f67..e00d0b6c279 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
it 'escapes the data-original attribute on a reference' do
markdown = %{[">bad things](#{issue.to_reference})}
result = described_class.to_html(markdown, project: project)
- expect(result).to include(%{data-original='\"&amp;gt;bad things'})
+ expect(result).to include(%(data-original='\"&amp;gt;bad things'))
end
end
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index 58d6b9b9a2c..a845e4fa7f4 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -177,7 +177,7 @@ RSpec.describe Banzai::Pipeline::GfmPipeline, feature_category: :team_planning d
stub_asset_proxy_setting(enabled: true)
stub_asset_proxy_setting(secret_key: 'shared-secret')
stub_asset_proxy_setting(url: 'https://assets.example.com')
- stub_asset_proxy_setting(allowlist: %W(gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}))
+ stub_asset_proxy_setting(allowlist: %W[gitlab.com *.mydomain.com #{Gitlab.config.gitlab.host}])
stub_asset_proxy_setting(domain_regexp: Banzai::Filter::AssetProxyFilter.compile_allowlist(Gitlab.config.asset_proxy.allowlist))
end
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index ae01939605e..3ed4a70d160 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
where(:markdown, :expected) do
%q![foo](/bar\@ "\@title")! | %q(<a href="/bar@" title="@title">foo</a>)
- %![foo]\n\n[foo]: /bar\\@ "\\@title"! | %q(<a href="/bar@" title="@title">foo</a>)
+ %([foo]\n\n[foo]: /bar\\@ "\\@title") | %q(<a href="/bar@" title="@title">foo</a>)
end
with_them do
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index bc7a93a7cde..b55ee01cceb 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Banzai::ReferenceParser::BaseParser, feature_category: :team_plan
links = Nokogiri::HTML.fragment("<a data-foo='1'></a><a data-foo='2'></a>").children
expect(subject).not_to receive(:references_relation)
- expect(subject.referenced_by(links, ids_only: true)).to eq(%w(1 2))
+ expect(subject.referenced_by(links, ids_only: true)).to eq(%w[1 2])
end
context 'and the html fragment does not contain any attributes' do
diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
index 072df6a23aa..e1abb9082f3 100644
--- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Banzai::ReferenceParser::IssueParser, feature_category: :team_pla
end
def issue_link(issue)
- Nokogiri::HTML.fragment(%{<a data-issue="#{issue.id}"></a>}).children[0]
+ Nokogiri::HTML.fragment(%(<a data-issue="#{issue.id}"></a>)).children[0]
end
before do
diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
index bab535b67bf..b9340812676 100644
--- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser, feature_category: :c
end
def merge_request_link(merge_request)
- Nokogiri::HTML.fragment(%{<a data-project="#{merge_request.project_id}" data-merge-request="#{merge_request.id}"></a>}).children[0]
+ Nokogiri::HTML.fragment(%(<a data-project="#{merge_request.project_id}" data-merge-request="#{merge_request.id}"></a>)).children[0]
end
before do
diff --git a/spec/lib/bitbucket/collection_spec.rb b/spec/lib/bitbucket/collection_spec.rb
index 715b78c95eb..9e384859149 100644
--- a/spec/lib/bitbucket/collection_spec.rb
+++ b/spec/lib/bitbucket/collection_spec.rb
@@ -21,6 +21,6 @@ RSpec.describe Bitbucket::Collection do
it "iterates paginator" do
collection = described_class.new(TestPaginator.new)
- expect(collection.to_a).to match(%w(result_1_page_1 result_2_page_1 result_1_page_2 result_2_page_2))
+ expect(collection.to_a).to match(%w[result_1_page_1 result_2_page_1 result_1_page_2 result_2_page_2])
end
end
diff --git a/spec/lib/bitbucket/representation/issue_spec.rb b/spec/lib/bitbucket/representation/issue_spec.rb
index a40bbcb7bf8..fb1f85cb184 100644
--- a/spec/lib/bitbucket/representation/issue_spec.rb
+++ b/spec/lib/bitbucket/representation/issue_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Bitbucket::Representation::Issue do
+RSpec.describe Bitbucket::Representation::Issue, feature_category: :importers do
describe '#iid' do
it { expect(described_class.new('id' => 1).iid).to eq(1) }
end
@@ -46,4 +46,32 @@ RSpec.describe Bitbucket::Representation::Issue do
describe '#updated_at' do
it { expect(described_class.new('edited_on' => Date.today).updated_at).to eq(Date.today) }
end
+
+ describe '#to_hash' do
+ it do
+ raw = {
+ 'id' => 111,
+ 'title' => 'title',
+ 'content' => { 'raw' => 'description' },
+ 'state' => 'resolved',
+ 'reporter' => { 'nickname' => 'User1' },
+ 'milestone' => { 'name' => 1 },
+ 'created_on' => 'created_at',
+ 'edited_on' => 'updated_at'
+ }
+
+ expected_hash = {
+ iid: 111,
+ title: 'title',
+ description: 'description',
+ state: 'closed',
+ author: 'User1',
+ milestone: 1,
+ created_at: 'created_at',
+ updated_at: 'updated_at'
+ }
+
+ expect(described_class.new(raw).to_hash).to eq(expected_hash)
+ end
+ end
end
diff --git a/spec/lib/bitbucket/representation/repo_spec.rb b/spec/lib/bitbucket/representation/repo_spec.rb
index b5b9f45f3d4..ba5a3306d07 100644
--- a/spec/lib/bitbucket/representation/repo_spec.rb
+++ b/spec/lib/bitbucket/representation/repo_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Bitbucket::Representation::Repo do
end
describe '#owner_and_slug' do
- it { expect(described_class.new({ 'full_name' => 'ben/test' }).owner_and_slug).to eq(%w(ben test)) }
+ it { expect(described_class.new({ 'full_name' => 'ben/test' }).owner_and_slug).to eq(%w[ben test]) }
end
describe '#owner' do
diff --git a/spec/lib/bitbucket_server/representation/pull_request_spec.rb b/spec/lib/bitbucket_server/representation/pull_request_spec.rb
index 4d8bb3a4407..2d67dd88b24 100644
--- a/spec/lib/bitbucket_server/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket_server/representation/pull_request_spec.rb
@@ -82,6 +82,18 @@ RSpec.describe BitbucketServer::Representation::PullRequest, feature_category: :
it { expect(subject.merged?).to be_truthy }
end
+ describe '#closed?' do
+ it { expect(subject.closed?).to be_falsey }
+
+ context 'for declined pull requests' do
+ before do
+ sample_data['state'] = 'DECLINED'
+ end
+
+ it { expect(subject.closed?).to be_truthy }
+ end
+ end
+
describe '#created_at' do
it { expect(subject.created_at.to_i).to eq(sample_data['createdDate'] / 1000) }
end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index bf1bfb77b26..08d0509b54f 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -72,9 +72,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
per_page: described_class::DEFAULT_PER_PAGE,
private_token: token
},
- headers: {
- 'Content-Type' => 'application/json'
- },
follow_redirects: true,
resend_on_redirect: false,
limit: 2
@@ -119,7 +116,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
def stub_http_get(path, query, response)
uri = "http://gitlab.example/api/v4/#{path}"
params = {
- headers: { "Content-Type" => "application/json" },
query: { private_token: token },
follow_redirects: true,
resend_on_redirect: false,
@@ -141,9 +137,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
per_page: described_class::DEFAULT_PER_PAGE,
private_token: token
},
- headers: {
- 'Content-Type' => 'application/json'
- },
follow_redirects: true,
resend_on_redirect: false,
limit: 2,
@@ -171,9 +164,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
'http://gitlab.example/api/v4/resource',
hash_including(
body: {},
- headers: {
- 'Content-Type' => 'application/json'
- },
query: {
page: described_class::DEFAULT_PAGE,
per_page: described_class::DEFAULT_PER_PAGE,
@@ -196,9 +186,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
[
'http://gitlab.example/api/v4/resource',
hash_including(
- headers: {
- 'Content-Type' => 'application/json'
- },
query: {
page: described_class::DEFAULT_PAGE,
per_page: described_class::DEFAULT_PER_PAGE,
@@ -219,9 +206,6 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
'http://gitlab.example/api/v4/resource',
hash_including(
stream_body: true,
- headers: {
- 'Content-Type' => 'application/json'
- },
query: {
page: described_class::DEFAULT_PAGE,
per_page: described_class::DEFAULT_PER_PAGE,
diff --git a/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
index 6c5465c8a66..a18d26bedf3 100644
--- a/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
@@ -33,6 +33,24 @@ RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do
expect(badge.image_url).to eq(badge_data['image_url'])
end
+ it 'skips already imported records' do
+ expect { pipeline.run }.to change(Badge, :count).by(2)
+
+ expect { pipeline.run }.to not_change(Badge, :count)
+ end
+
+ context 'with FF bulk_import_idempotent_workers disabled' do
+ before do
+ stub_feature_flags(bulk_import_idempotent_workers: false)
+ end
+
+ it 'creates duplicated badges' do
+ expect { pipeline.run }.to change(Badge, :count).by(2)
+
+ expect { pipeline.run }.to change(Badge, :count)
+ end
+ end
+
context 'when project entity' do
let(:first_page) { extracted_data(has_next_page: true) }
let(:last_page) { extracted_data(name: 'badge2', kind: 'project') }
diff --git a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
index 43da0131dd2..cc1f88d9613 100644
--- a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category:
expect(board).to be_present
expect(board.project.id).to eq(project.id)
expect(board.lists.count).to eq(3)
- expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
+ expect(board.lists.map(&:list_type).sort).to match_array(%w[backlog closed label])
expect(board.lists.find_by(list_type: "label").label.title).to eq("test")
end
end
@@ -90,7 +90,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category:
expect(board).to be_present
expect(board.group.id).to eq(group.id)
expect(board.lists.count).to eq(3)
- expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
+ expect(board.lists.map(&:list_type).sort).to match_array(%w[backlog closed label])
expect(board.lists.find_by(list_type: "label").label.title).to eq("test")
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index c91b031de30..5ba9bd08009 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -41,6 +41,17 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline, feature_categ
expect(portable.lfs_objects_projects.count).to eq(4)
expect(Dir.exist?(tmpdir)).to eq(false)
end
+
+ it 'does not call load on duplicates' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
+
+ pipeline.run
+
+ expect(pipeline).not_to receive(:load)
+ pipeline.run
+ end
end
describe '#extract' do
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index 675ae935c1c..3c2322c4a4f 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -40,6 +40,14 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category
expect(Dir.exist?(tmpdir)).to eq(false)
end
+ it 'skips loads on duplicates' do
+ pipeline.run
+
+ expect(pipeline).not_to receive(:load)
+
+ pipeline.run
+ end
+
context 'when importing avatar' do
let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
diff --git a/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
index 08a82bc84ed..435c7d5d6e4 100644
--- a/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe BulkImports::Common::Transformers::ProhibitedAttributesTransforme
'note_ids' => [1, 2, 3],
'remote_attachment_url' => 'http://something.dodgy',
'remote_attachment_request_header' => 'bad value',
- 'remote_attachment_urls' => %w(http://something.dodgy http://something.okay),
+ 'remote_attachment_urls' => %w[http://something.dodgy http://something.okay],
'attributes' => {
'issue_ids' => [1, 2, 3],
'merge_request_ids' => [1, 2, 3],
diff --git a/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
deleted file mode 100644
index ba74c173794..00000000000
--- a/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Common::Transformers::UserReferenceTransformer do
- describe '#transform' do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- let(:hash) do
- {
- 'user' => {
- 'public_email' => email
- }
- }
- end
-
- before do
- group.add_developer(user)
- end
-
- shared_examples 'sets user_id and removes user key' do
- it 'sets found user_id and removes user key' do
- transformed_hash = subject.transform(context, hash)
-
- expect(transformed_hash['user']).to be_nil
- expect(transformed_hash['user_id']).to eq(user.id)
- end
- end
-
- context 'when user can be found by email' do
- let(:email) { user.email }
-
- include_examples 'sets user_id and removes user key'
- end
-
- context 'when user cannot be found by email' do
- let(:user) { bulk_import.user }
- let(:email) { nil }
-
- include_examples 'sets user_id and removes user key'
- end
-
- context 'when there is no data to transform' do
- it 'returns' do
- expect(subject.transform(nil, nil)).to be_nil
- end
- end
-
- context 'when custom reference is provided' do
- shared_examples 'updates provided reference' do |reference|
- let(:hash) do
- {
- 'author' => {
- 'public_email' => user.email
- }
- }
- end
-
- it 'updates provided reference' do
- transformer = described_class.new(reference: reference)
- result = transformer.transform(context, hash)
-
- expect(result['author']).to be_nil
- expect(result['author_id']).to eq(user.id)
- end
- end
-
- include_examples 'updates provided reference', 'author'
- include_examples 'updates provided reference', :author
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 36b425f4f12..b470edae2c2 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
let_it_be(:user) { create(:user) }
let_it_be(:parent) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
@@ -63,6 +63,11 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
expect(imported_group.emails_disabled?).to eq(group_data['emails_disabled'])
expect(imported_group.mentions_disabled?).to eq(group_data['mentions_disabled'])
end
+
+ it 'skips duplicates on pipeline rerun' do
+ expect { subject.run }.to change { Group.count }.by(1)
+ expect { subject.run }.not_to change { Group.count }
+ end
end
describe 'pipeline parts' do
diff --git a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
index 0155dc8053e..f7076341f8f 100644
--- a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline, feature_
subject { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
let(:extracted_data) do
BulkImports::Pipeline::ExtractedData.new(data: {
'id' => 'gid://gitlab/Project/1234567',
@@ -49,6 +49,11 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline, feature_
expect(project_entity.destination_namespace).to eq(destination_group.full_path)
expect(project_entity.source_xid).to eq(1234567)
end
+
+ it 'does not create duplicate entities on rerun' do
+ expect { subject.run }.to change(BulkImports::Entity, :count).by(1)
+ expect { subject.run }.not_to change(BulkImports::Entity, :count)
+ end
end
describe 'pipeline parts' do
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index 6949ac59948..a50fe7ecd4c 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
})
end
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
@@ -38,6 +38,11 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
expect(subgroup_entity.destination_name).to eq 'sub-group'
expect(subgroup_entity.parent_id).to eq parent_entity.id
end
+
+ it 'does not create duplicate entities on rerun' do
+ expect { subject.run }.to change(BulkImports::Entity, :count).by(1)
+ expect { subject.run }.not_to change(BulkImports::Entity, :count)
+ end
end
describe '#load' do
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index 29f42ab3366..5611879868d 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+ let(:tracker) { instance_double(BulkImports::Tracker, bulk_import_entity_id: 1) }
+ let(:context) { instance_double(BulkImports::Pipeline::Context, tracker: tracker, extra: { batch_number: 1 }) }
let(:klass) do
Class.new do
@@ -13,11 +15,12 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
relation_name 'test'
- attr_reader :portable, :current_user
+ attr_reader :portable, :current_user, :context
- def initialize(portable, user)
+ def initialize(portable, user, context)
@portable = portable
@current_user = user
+ @context = context
end
end
end
@@ -26,12 +29,29 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
stub_const('NdjsonPipelineClass', klass)
end
- subject { NdjsonPipelineClass.new(group, user) }
+ subject { NdjsonPipelineClass.new(group, user, context) }
it 'marks pipeline as ndjson' do
expect(NdjsonPipelineClass.file_extraction_pipeline?).to eq(true)
end
+ describe 'caching' do
+ it 'saves completed entry in cache' do
+ subject.save_processed_entry("entry", 10)
+
+ expected_cache_key = "ndjson_pipeline_class/1/1"
+ expect(Gitlab::Cache::Import::Caching.read(expected_cache_key)).to eq("10")
+ end
+
+ it 'identifies completed entries' do
+ subject.save_processed_entry("entry", 10)
+
+ expect(subject.already_processed?("entry", 11)).to be_falsy
+ expect(subject.already_processed?("entry", 10)).to be_truthy
+ expect(subject.already_processed?("entry", 9)).to be_truthy
+ end
+ end
+
describe '#deep_transform_relation!' do
it 'transforms relation hash' do
transformed = subject.deep_transform_relation!({}, 'test', {}) do |key, hash|
@@ -238,7 +258,7 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
end
context 'when portable is project' do
- subject { NdjsonPipelineClass.new(project, user) }
+ subject { NdjsonPipelineClass.new(project, user, context) }
it 'returns group relation name override' do
expect(subject.relation_key_override('labels')).to eq('project_labels')
@@ -254,7 +274,7 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
end
context 'when portable is project' do
- subject { NdjsonPipelineClass.new(project, user) }
+ subject { NdjsonPipelineClass.new(project, user, context) }
it 'returns project relation factory' do
expect(subject.relation_factory).to eq(Gitlab::ImportExport::Project::RelationFactory)
diff --git a/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
index 045908de5c4..108ce05cb66 100644
--- a/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
@@ -50,4 +50,18 @@ RSpec.describe BulkImports::Pipeline::ExtractedData do
end
end
end
+
+ describe '#each_with_index' do
+ context 'when block is present' do
+ it 'yields each data item with index' do
+ expect { |b| subject.each_with_index(&b) }.to yield_control
+ end
+ end
+
+ context 'when block is not present' do
+ it 'returns enumerator' do
+ expect(subject.each_with_index).to be_instance_of(Enumerator)
+ end
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 2f54ab111c8..8d48606633a 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
'exception.message' => exception_message,
'correlation_id' => anything,
'class' => 'BulkImports::MyPipeline',
- 'message' => "Pipeline failed",
+ 'message' => 'An object of a pipeline failed to import',
'importer' => 'gitlab_migration',
'exception.backtrace' => anything,
'source_version' => entity.bulk_import.source_version_info.to_s
@@ -112,10 +112,10 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end
context 'when pipeline is not marked to abort on failure' do
- it 'does not mark entity as failed' do
+ it 'does not mark tracker and entity as failed' do
subject.run
- expect(tracker.failed?).to eq(true)
+ expect(tracker.failed?).to eq(false)
expect(entity.failed?).to eq(false)
end
end
@@ -277,6 +277,115 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
it_behaves_like 'failed pipeline', 'StandardError', 'Error!'
end
+
+ it 'saves entry in cache for de-duplication' do
+ expect_next_instance_of(BulkImports::Extractor) do |extractor|
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(extracted_data)
+ end
+
+ expect_next_instance_of(BulkImports::Transformer) do |transformer|
+ expect(transformer)
+ .to receive(:transform)
+ .with(context, extracted_data.data.first)
+ .and_return(extracted_data.data.first)
+ end
+
+ expect_next_instance_of(BulkImports::MyPipeline) do |klass|
+ expect(klass).to receive(:save_processed_entry).with(extracted_data.data.first, anything)
+ end
+
+ subject.run
+ end
+
+ context 'with FF bulk_import_idempotent_workers disabled' do
+ before do
+ stub_feature_flags(bulk_import_idempotent_workers: false)
+ end
+
+ it 'does not touch the cache' do
+ expect_next_instance_of(BulkImports::Extractor) do |extractor|
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(extracted_data)
+ end
+
+ expect_next_instance_of(BulkImports::Transformer) do |transformer|
+ expect(transformer)
+ .to receive(:transform)
+ .with(context, extracted_data.data.first)
+ .and_return(extracted_data.data.first)
+ end
+
+ expect_next_instance_of(BulkImports::MyPipeline) do |klass|
+ expect(klass).not_to receive(:save_processed_entry)
+ end
+
+ subject.run
+ end
+ end
+ end
+
+ context 'when the entry is already processed' do
+ before do
+ allow_next_instance_of(BulkImports::MyPipeline) do |klass|
+ allow(klass).to receive(:already_processed?).and_return true
+ end
+ end
+
+ it 'runs pipeline extractor, but not transformer or loader' do
+ expect_next_instance_of(BulkImports::Extractor) do |extractor|
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(extracted_data)
+ end
+
+ allow_next_instance_of(BulkImports::Transformer) do |transformer|
+ expect(transformer)
+ .not_to receive(:transform)
+ end
+
+ allow_next_instance_of(BulkImports::Loader) do |loader|
+ expect(loader)
+ .not_to receive(:load)
+ end
+
+ subject.run
+ end
+
+ context 'with FF bulk_import_idempotent_workers disabled' do
+ before do
+ stub_feature_flags(bulk_import_idempotent_workers: false)
+ end
+
+ it 'calls extractor, transformer, and loader' do
+ expect_next_instance_of(BulkImports::Extractor) do |extractor|
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(extracted_data)
+ end
+
+ expect_next_instance_of(BulkImports::Transformer) do |transformer|
+ expect(transformer)
+ .to receive(:transform)
+ .with(context, extracted_data.data.first)
+ .and_return(extracted_data.data.first)
+ end
+
+ expect_next_instance_of(BulkImports::Loader) do |loader|
+ expect(loader)
+ .to receive(:load)
+ .with(context, extracted_data.data.first)
+ end
+
+ subject.run
+ end
+ end
end
context 'when entity is marked as failed' do
diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
index 63e7cdf2e5a..0d32af27d4f 100644
--- a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
index e65339ffdd0..2a55f5ffae1 100644
--- a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -40,6 +40,20 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline, feature_c
expect(portable.design_repository.exists?).to eq(true)
end
+
+ it 'skips import if already cached' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(pipeline).not_to receive(:load)
+
+ pipeline.run
+ end
end
describe '#extract' do
diff --git a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
index 8f610fcc2ae..b7197814f9c 100644
--- a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:remove_tmp_dir)
diff --git a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
index fd13c10d61e..625078b1b2a 100644
--- a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
issue_with_index = [issue, 0]
diff --git a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb
index 5b85b3eee79..3fb7e28036e 100644
--- a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
group.add_maintainer(another_user)
diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
index 0bfd9410808..6ba555aa328 100644
--- a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do
+RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb
index 82b8bb3958a..8f514a20ae6 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline, feature_category: :importers do
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
@@ -50,6 +50,11 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline, feature_catego
expect(imported_project.visibility).to eq(project_data['visibility'])
expect(imported_project.created_at).to eq(project_data['created_at'])
end
+
+ it 'skips duplicate projects on pipeline re-run' do
+ expect { project_pipeline.run }.to change { Project.count }.by(1)
+ expect { project_pipeline.run }.not_to change { Project.count }
+ end
end
describe 'pipeline parts' do
diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
index af8bce47c3d..e2b99fe4db4 100644
--- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
@@ -134,7 +134,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
end
end
- describe '#transform' do
+ describe '#transform', :clean_gitlab_redis_cache do
it 'updates matching urls and usernames with new ones' do
transformed_mr = subject.transform(context, mr)
transformed_note = subject.transform(context, mr_note)
@@ -154,7 +154,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
expect(transformed_system_note.note).not_to include("@old_username")
expect(transformed_username_system_note.note).not_to include("@source_username")
- expect(transformed_issue.description).to eq('http://localhost:80/namespace1/project-1/-/issues/1')
+ expect(transformed_issue.description)
+ .to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
expect(transformed_note.note).to eq("#{expected_url} @same_username")
expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
index 339ca727b57..9e0b5af6bfe 100644
--- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
with_index = [release, 0]
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
index 2865215823a..68c47c43fe7 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -41,6 +41,16 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline, featu
expect(Dir.exist?(tmpdir)).to eq(false)
end
+ it 'skips import if already cached' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(pipeline).not_to receive(:load)
+
+ pipeline.run
+ end
+
context 'when something goes wrong during import' do
it 'marks entity as failed' do
allow(pipeline).to receive(:load).and_raise(StandardError)
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
index 41b3ea37804..1e3cfe20bf5 100644
--- a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
subject(:pipeline) { described_class.new(context) }
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
snippet_with_index = [exported_snippet.dup, 0]
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
index 56c0f8c8807..85946c5e0f9 100644
--- a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline, feature_category: :importers do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:bulk_import) { create(:bulk_import, user: user) }
@@ -55,7 +55,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
end
end
- describe '#run' do
+ describe '#run', :clean_gitlab_redis_cache do
let(:validation_response) { double(Hash, 'error?': false) }
before do
@@ -110,6 +110,18 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
.to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
end
+ it 'skips already cached snippets' do
+ pipeline.run
+
+ data.first.tap { |d| d['createdAt'] = matched_snippet.created_at.to_s } # Reset data to original state
+
+ expect(pipeline).not_to receive(:load)
+
+ pipeline.run
+
+ expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be true
+ end
+
it 'updates snippets statistics' do
allow_next_instance_of(Repository) do |repository|
allow(repository).to receive(:fetch_as_mirror)
@@ -149,7 +161,6 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
it 'logs the failure' do
pipeline.run
- expect(tracker.failed?).to eq(true)
expect(tracker.entity.failures.first).to be_present
expect(tracker.entity.failures.first.exception_message).to eq('Only allowed schemes are http, https')
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index a1425169dee..39409cf8d3a 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -89,13 +89,14 @@ RSpec.describe ContainerRegistry::Client do
it_behaves_like 'handling timeouts'
end
- shared_examples 'handling repository info' do
+ shared_examples 'handling registry info' do
context 'when the check is successful' do
context 'when using the GitLab container registry' do
before do
stub_registry_info(headers: {
'GitLab-Container-Registry-Version' => '2.9.1-gitlab',
- 'GitLab-Container-Registry-Features' => 'a,b,c'
+ 'GitLab-Container-Registry-Features' => 'a,b,c',
+ 'GitLab-Container-Registry-Database-Enabled' => 'true'
})
end
@@ -106,6 +107,10 @@ RSpec.describe ContainerRegistry::Client do
it 'identifies version and features' do
expect(subject).to include(version: '2.9.1-gitlab', features: %w[a b c])
end
+
+ it 'identifies the registry DB as enabled' do
+ expect(subject).to include(db_enabled: true)
+ end
end
context 'when using a third-party container registry' do
@@ -120,6 +125,10 @@ RSpec.describe ContainerRegistry::Client do
it 'does not identify version or features' do
expect(subject).to include(version: nil, features: [])
end
+
+ it 'does not identify the registry DB as enabled' do
+ expect(subject).to include(db_enabled: false)
+ end
end
end
@@ -130,6 +139,16 @@ RSpec.describe ContainerRegistry::Client do
expect(subject).to eq({})
end
end
+
+ context 'when the check returns an unexpected value in the database enabled header' do
+ it 'does not identify the registry DB as enabled' do
+ stub_registry_info(headers: {
+ 'GitLab-Container-Registry-Database-Enabled' => '123'
+ })
+
+ expect(subject).to include(db_enabled: false)
+ end
+ end
end
describe '#repository_manifest' do
@@ -360,7 +379,7 @@ RSpec.describe ContainerRegistry::Client do
describe '#registry_info' do
subject { client.registry_info }
- it_behaves_like 'handling repository info'
+ it_behaves_like 'handling registry info'
end
describe '.supports_tag_delete?' do
@@ -446,7 +465,7 @@ RSpec.describe ContainerRegistry::Client do
stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
end
- it_behaves_like 'handling repository info'
+ it_behaves_like 'handling registry info'
end
def stub_upload(path, content, digest, status = 200)
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index b53b5b44c2e..86675ba27f6 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -455,6 +455,56 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
end
end
+ describe '#rename_base_repository_path' do
+ let(:path) { 'path/repository' }
+ let(:name) { 'newname' }
+ let(:dry_run) { 'false' }
+ let(:status_code) { 204 }
+
+ subject { client.rename_base_repository_path(path, name: name, dry_run: dry_run) }
+
+ before do
+ stub_rename_base_repository(path, name: name, dry_run: dry_run, status_code: status_code)
+ end
+
+ where(:dry_run, :status_code, :expected_result) do
+ true | 202 | :accepted
+ true | 400 | :bad_request
+ true | 401 | :unauthorized
+ true | 404 | :not_found
+ true | 409 | :name_taken
+ true | 422 | :too_many_subrepositories
+
+ false | 204 | :ok
+ false | 400 | :bad_request
+ false | 401 | :unauthorized
+ false | 404 | :not_found
+ false | 409 | :name_taken
+ false | 422 | :too_many_subrepositories
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_result) }
+ end
+
+ context 'with a non-successful response' do
+ before do
+ stub_rename_base_repository(path, name: name, dry_run: false, status_code: 404)
+ end
+
+ it 'logs an error' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception).with(
+ instance_of(described_class::UnsuccessfulResponseError),
+ class: described_class.name,
+ url: "/gitlab/v1/repositories/#{path}/",
+ status_code: 404
+ )
+ subject
+ end
+ end
+ end
+
describe '.supports_gitlab_api?' do
subject { described_class.supports_gitlab_api? }
@@ -658,6 +708,40 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
end
end
+ describe '.rename_base_repository_path' do
+ let(:name) { 'newname' }
+ let(:expected_dry_run) { true }
+
+ before do
+ stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
+
+ expect_next_instance_of(described_class) do |client|
+ expect(client).to receive(:rename_base_repository_path).with(path.downcase, name: name.downcase, dry_run: expected_dry_run).and_return(:ok)
+ end
+ end
+
+ it 'passes on the parameters to #rename_base_repository_path' do
+ described_class.rename_base_repository_path(path, name: name, dry_run: true)
+ end
+
+ context 'when path and/or name have non-downcased letters' do
+ let(:path) { 'pAtH/to/PROject' }
+ let(:name) { 'nEwNamE' }
+
+ it 'passes the path and name downcased to #rename_base_repository_path' do
+ described_class.rename_base_repository_path(path, name: name, dry_run: true)
+ end
+ end
+
+ context 'when dry_run parameter is not given' do
+ let(:expected_dry_run) { false }
+
+ it 'defaults to false' do
+ described_class.rename_base_repository_path(path, name: 'newname')
+ end
+ end
+ end
+
describe '#each_sub_repositories_with_tag_page' do
let(:page_size) { 100 }
let(:project_path) { 'repo/project' }
@@ -864,12 +948,9 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/"
url += "?size=#{sizing}" if sizing
- headers = { 'Accept' => described_class::JSON_TYPE }
- headers['Authorization'] = "bearer #{token}" if token
-
stub_request(:get, url)
- .with(headers: headers)
- .to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
+ .with(headers: request_headers)
+ .to_return(status: status_code, body: respond_with.to_json, headers: headers_with_json_content_type)
end
def stub_tags(path, page_size: nil, input: {}, previous_page_url: nil, next_page_url: nil, status_code: 200, respond_with: {})
@@ -887,17 +968,12 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
url += "?#{params.map { |param, val| "#{param}=#{val}" }.join('&')}"
end
- request_headers = { 'Accept' => described_class::JSON_TYPE }
- request_headers['Authorization'] = "bearer #{token}" if token
-
- response_headers = { 'Content-Type' => described_class::JSON_TYPE }
- if next_page_url || previous_page_url
- previous_page_url = %(<#{previous_page_url}>; rel="previous") if previous_page_url
- next_page_url = %(<#{next_page_url}>; rel="next") if next_page_url
-
- link_header = [previous_page_url, next_page_url].compact.join(" ,")
- response_headers['Link'] = link_header
- end
+ response_headers =
+ add_link_to_headers_from_urls(
+ headers_with_json_content_type,
+ previous_page_url,
+ next_page_url
+ )
stub_request(:get, url)
.with(headers: request_headers)
@@ -917,13 +993,8 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
url += "?#{params.map { |param, val| "#{param}=#{val}" }.join('&')}"
end
- request_headers = { 'Accept' => described_class::JSON_TYPE }
- request_headers['Authorization'] = "bearer #{token}" if token
-
- response_headers = { 'Content-Type' => described_class::JSON_TYPE }
- if next_page_url
- response_headers['Link'] = "<#{next_page_url}>; rel=\"next\""
- end
+ response_headers =
+ add_link_to_headers_from_urls(headers_with_json_content_type, nil, next_page_url)
stub_request(:get, url)
.with(headers: request_headers)
@@ -933,4 +1004,34 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
headers: response_headers
)
end
+
+ def stub_rename_base_repository(path, name:, dry_run: false, status_code: 204)
+ url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/?dry_run=#{dry_run}"
+
+ stub_request(:patch, url)
+ .with(headers: request_headers, body: { name: name }.to_json)
+ .to_return(status: status_code, headers: headers_with_json_content_type)
+ end
+
+ def request_headers
+ headers = { 'Accept' => described_class::JSON_TYPE }
+ headers['Authorization'] = "bearer #{token}" if token
+
+ headers
+ end
+
+ def headers_with_json_content_type
+ { 'Content-Type' => described_class::JSON_TYPE }
+ end
+
+ def add_link_to_headers_from_urls(headers, previous_page_url, next_page_url)
+ return headers unless previous_page_url || next_page_url
+
+ previous_page_url = %(<#{previous_page_url}>; rel="previous") if previous_page_url
+ next_page_url = %(<#{next_page_url}>; rel="next") if next_page_url
+
+ headers['Link'] = [previous_page_url, next_page_url].compact.join(" ,")
+
+ headers
+ end
end
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index ad73665326a..695e63b6db1 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -187,6 +187,102 @@ RSpec.describe ExpandVariables, feature_category: :secrets_management do
end
end
+ shared_examples 'masked variable expansion with fail_on_masked true' do |expander|
+ using RSpec::Parameterized::TableSyntax
+
+ subject { expander.call(value, variables, fail_on_masked: true) }
+
+ where do
+ {
+ 'simple expansion with a masked variable': {
+ value: 'key$variable',
+ variables: [
+ { key: 'variable', value: 'value', masked: true }
+ ]
+ },
+ 'complex expansion with a masked variable': {
+ value: 'key${variable}${variable2}',
+ variables: [
+ { key: 'variable', value: 'value', masked: true },
+ { key: 'variable2', value: 'result', masked: false }
+ ]
+ },
+ 'expansion using % with a masked variable': {
+ value: 'key%variable%',
+ variables: [
+ { key: 'variable', value: 'value', masked: true }
+ ]
+ }
+ }
+ end
+
+ with_them do
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ ExpandVariables::VariableExpansionError, /masked variables cannot be expanded/
+ )
+ end
+ end
+
+ context 'expansion without a masked variable' do
+ let(:value) { 'key$variable${variable2}' }
+
+ let(:variables) do
+ [
+ { key: 'variable', value: 'value', masked: false },
+ { key: 'variable2', value: 'result', masked: false }
+ ]
+ end
+
+ it { is_expected.to eq('keyvalueresult') }
+ end
+ end
+
+ shared_examples 'masked variable expansion with fail_on_masked false' do |expander|
+ using RSpec::Parameterized::TableSyntax
+
+ subject { expander.call(value, variables, fail_on_masked: false) }
+
+ where do
+ {
+ 'simple expansion with a masked variable': {
+ value: 'key$variable',
+ result: 'keyvalue',
+ variables: [
+ { key: 'variable', value: 'value', masked: true }
+ ]
+ },
+ 'complex expansion with a masked variable': {
+ value: 'key${variable}${variable2}',
+ result: 'keyvalueresult',
+ variables: [
+ { key: 'variable', value: 'value', masked: true },
+ { key: 'variable2', value: 'result', masked: false }
+ ]
+ },
+ 'expansion using % with a masked variable': {
+ value: 'key%variable%',
+ result: 'keyvalue',
+ variables: [
+ { key: 'variable', value: 'value', masked: true }
+ ]
+ },
+ 'expansion without a masked variable': {
+ value: 'key$variable${variable2}',
+ result: 'keyvalueresult',
+ variables: [
+ { key: 'variable', value: 'value', masked: false },
+ { key: 'variable2', value: 'result', masked: false }
+ ]
+ }
+ }
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
+
describe '#expand' do
context 'table tests' do
it_behaves_like 'common variable expansion', described_class.method(:expand)
@@ -195,6 +291,10 @@ RSpec.describe ExpandVariables, feature_category: :secrets_management do
it_behaves_like 'file variable expansion with expand_file_refs false', described_class.method(:expand)
+ it_behaves_like 'masked variable expansion with fail_on_masked true', described_class.method(:expand)
+
+ it_behaves_like 'masked variable expansion with fail_on_masked false', described_class.method(:expand)
+
context 'with missing variables' do
using RSpec::Parameterized::TableSyntax
@@ -265,6 +365,10 @@ RSpec.describe ExpandVariables, feature_category: :secrets_management do
it_behaves_like 'file variable expansion with expand_file_refs false', described_class.method(:expand_existing)
+ it_behaves_like 'masked variable expansion with fail_on_masked true', described_class.method(:expand)
+
+ it_behaves_like 'masked variable expansion with fail_on_masked false', described_class.method(:expand)
+
context 'with missing variables' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index 5db2fbd923e..a10ff60a249 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ExtractsPath do
+RSpec.describe ExtractsPath, feature_category: :source_code_management do
include described_class
include RepoHelpers
include Gitlab::Routing
@@ -215,7 +215,7 @@ RSpec.describe ExtractsPath do
end
it 'raises an error if there are no matching refs' do
- expect { extract_ref_without_atom('foo.atom') }.to raise_error(ExtractsRef::InvalidPathError)
+ expect { extract_ref_without_atom('foo.atom') }.to raise_error(ExtractsPath::InvalidPathError)
end
end
end
diff --git a/spec/lib/extracts_ref/ref_extractor_spec.rb b/spec/lib/extracts_ref/ref_extractor_spec.rb
new file mode 100644
index 00000000000..23b283967ca
--- /dev/null
+++ b/spec/lib/extracts_ref/ref_extractor_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ExtractsRef::RefExtractor, feature_category: :source_code_management do
+ include RepoHelpers
+
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:container) { create(:snippet, :repository, author: owner) }
+
+ let(:ref) { sample_commit[:id] }
+ let(:path) { sample_commit[:line_code_path] }
+ let(:params) { { path: path, ref: ref } }
+
+ let(:ref_extractor) { described_class.new(container, params) }
+
+ before do
+ ref_names = ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0', 'release/app', 'release/app/v1.0.0']
+
+ allow(container.repository).to receive(:ref_names).and_return(ref_names)
+ end
+
+ describe '#extract_vars!' do
+ it_behaves_like 'extracts ref vars'
+
+ context 'when ref contains trailing space' do
+ let(:ref) { 'master ' }
+
+ it 'strips surrounding space' do
+ ref_extractor.extract!
+
+ expect(ref_extractor.ref).to eq('master')
+ end
+ end
+
+ context 'when ref and path are nil' do
+ let(:ref) { nil }
+ let(:path) { nil }
+
+ it 'does not set commit' do
+ expect(container.repository).not_to receive(:commit).with('')
+
+ ref_extractor.extract!
+
+ expect(ref_extractor.commit).to be_nil
+ end
+ end
+
+ context 'when a ref_type parameter is provided' do
+ let(:params) { { path: path, ref: ref, ref_type: 'tags' } }
+
+ it 'sets a fully_qualified_ref variable' do
+ fully_qualified_ref = "refs/tags/#{ref}"
+
+ expect(container.repository).to receive(:commit).with(fully_qualified_ref)
+
+ ref_extractor.extract!
+
+ expect(ref_extractor.fully_qualified_ref).to eq(fully_qualified_ref)
+ end
+ end
+ end
+
+ describe '#ref_type' do
+ let(:params) { { ref_type: 'heads' } }
+
+ it 'delegates to .ref_type' do
+ expect(described_class).to receive(:ref_type).with('heads')
+
+ ref_extractor.ref_type
+ end
+ end
+
+ describe '.ref_type' do
+ subject { described_class.ref_type(ref_type) }
+
+ context 'when ref_type is nil' do
+ let(:ref_type) { nil }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when ref_type is heads' do
+ let(:ref_type) { 'heads' }
+
+ it { is_expected.to eq('heads') }
+ end
+
+ context 'when ref_type is tags' do
+ let(:ref_type) { 'tags' }
+
+ it { is_expected.to eq('tags') }
+ end
+
+ context 'when ref_type is invalid' do
+ let(:ref_type) { 'invalid' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ describe '.qualify_ref' do
+ subject { described_class.qualify_ref(ref, ref_type) }
+
+ context 'when ref_type is nil' do
+ let(:ref_type) { nil }
+
+ it { is_expected.to eq(ref) }
+ end
+
+ context 'when ref_type valid' do
+ let(:ref_type) { 'heads' }
+
+ it { is_expected.to eq("refs/#{ref_type}/#{ref}") }
+ end
+
+ context 'when ref_type is invalid' do
+ let(:ref_type) { 'invalid' }
+
+ it { is_expected.to eq(ref) }
+ end
+ end
+
+ it_behaves_like 'extracts ref method'
+end
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index ac403ad642a..9ff11899e89 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -87,32 +87,16 @@ RSpec.describe ExtractsRef do
it { is_expected.to eq('tags') }
end
- context 'when ref_type is invalid' do
- let(:ref_type) { 'invalid' }
+ context 'when case does not match' do
+ let(:ref_type) { 'tAgS' }
- it { is_expected.to eq(nil) }
- end
- end
-
- describe '.qualify_ref' do
- subject { described_class.qualify_ref(ref, ref_type) }
-
- context 'when ref_type is nil' do
- let(:ref_type) { nil }
-
- it { is_expected.to eq(ref) }
- end
-
- context 'when ref_type valid' do
- let(:ref_type) { 'heads' }
-
- it { is_expected.to eq("refs/#{ref_type}/#{ref}") }
+ it { is_expected.to(eq('tags')) }
end
context 'when ref_type is invalid' do
let(:ref_type) { 'invalid' }
- it { is_expected.to eq(ref) }
+ it { is_expected.to eq(nil) }
end
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 044415b9952..7860d85457a 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -11,6 +11,38 @@ RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: fa
skip_feature_flags_yaml_validation
end
+ describe '.current_request' do
+ it 'returns a FlipperRequest with a flipper_id' do
+ flipper_request = described_class.current_request
+
+ expect(flipper_request.flipper_id).to include("FlipperRequest:")
+ end
+
+ context 'when request store is inactive' do
+ it 'does not cache flipper_id' do
+ previous_id = described_class.current_request.flipper_id
+
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
+ end
+
+ context 'when request store is active', :request_store do
+ it 'caches flipper_id when request store is active' do
+ previous_id = described_class.current_request.flipper_id
+
+ expect(described_class.current_request.flipper_id).to eq(previous_id)
+ end
+
+ it 'returns a new flipper_id when request ends' do
+ previous_id = described_class.current_request.flipper_id
+
+ RequestStore.end!
+
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
+ end
+ end
+
describe '.get' do
let(:feature) { double(:feature) }
let(:key) { 'my_feature' }
@@ -299,6 +331,36 @@ RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: fa
end
end
+ context 'with current_request actor' do
+ context 'when request store is inactive' do
+ it 'returns the approximate percentage set' do
+ number_of_times = 1_000
+ percentage = 50
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
+
+ gate_values = Array.new(number_of_times) do
+ described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+ end
+
+ margin_of_error = 0.05 * number_of_times
+ expected_size = number_of_times * percentage / 100
+ expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ end
+ end
+
+ context 'when request store is active', :request_store do
+ it 'always returns the same gate value' do
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
+
+ previous_gate_value = described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+
+ 1_000.times do
+ expect(described_class.enabled?(:enabled_feature_flag, described_class.current_request)).to eq(previous_gate_value)
+ end
+ end
+ end
+ end
+
context 'with a group member' do
let(:key) { :awesome_feature }
let(:guinea_pigs) { create_list(:user, 3) }
diff --git a/spec/lib/generators/batched_background_migration/batched_background_migration_generator_spec.rb b/spec/lib/generators/batched_background_migration/batched_background_migration_generator_spec.rb
index d60d0c3c853..2d48b83be4c 100644
--- a/spec/lib/generators/batched_background_migration/batched_background_migration_generator_spec.rb
+++ b/spec/lib/generators/batched_background_migration/batched_background_migration_generator_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator,
end
assert_migration('db/post_migrate/queue_my_batched_migration.rb') do |migration_file|
- expect(migration_file).to eq(expected_migration_file)
+ expect(migration_file).to eq(expected_migration_file.gsub('<migration_version>', fetch_migration_version))
end
assert_migration('spec/migrations/queue_my_batched_migration_spec.rb') do |migration_spec_file|
@@ -54,7 +54,7 @@ RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator,
end
let(:expected_ee_migration_job_file) { load_expected_file('ee_my_batched_migration.txt') }
- let(:expected_migration_job_spec_file) { load_expected_file('my_batched_migration_spec_matcher.txt') }
+ let(:expected_migration_job_spec_file) { load_expected_file('my_batched_migration_spec.txt') }
include_examples "generates files common to both types of migrations",
'foss_my_batched_migration.txt',
@@ -78,7 +78,7 @@ RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator,
run_generator %w[my_batched_migration --table_name=projects --column_name=id --feature_category=database]
end
- let(:expected_migration_job_spec_file) { load_expected_file('my_batched_migration_spec_matcher.txt') }
+ let(:expected_migration_job_spec_file) { load_expected_file('my_batched_migration_spec.txt') }
include_examples "generates files common to both types of migrations",
'my_batched_migration.txt',
@@ -88,8 +88,7 @@ RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator,
it 'generates expected files' do
assert_file('spec/lib/gitlab/background_migration/my_batched_migration_spec.rb') do |migration_job_spec_file|
- # Regex is used to match the dynamic schema: <version> in the specs
- expect(migration_job_spec_file).to match(/#{expected_migration_job_spec_file}/)
+ expect(migration_job_spec_file).to eq(expected_migration_job_spec_file)
end
end
end
@@ -99,4 +98,9 @@ RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator,
def load_expected_file(file_name)
File.read(File.expand_path("expected_files/#{file_name}", __dir__))
end
+
+ def fetch_migration_version
+ @migration_version ||= migration_file_name('db/post_migrate/queue_my_batched_migration.rb')
+ .match(%r{post_migrate/([0-9]+)_queue_my_batched_migration.rb})[1]
+ end
end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
index 6280d35177e..3b166bd4c4c 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
@@ -4,3 +4,7 @@ description: # Please capture what MyBatchedMigration does
feature_category: database
introduced_by_url: # URL of the MR \(or issue/commit\) that introduced the migration
milestone: [0-9\.]+
+queued_migration_version: [0-9]+
+# Replace with the approximate date you think it's best to ensure the completion of this BBM.
+finalize_after: # yyyy-mm-dd
+finalized_by: # version of the migration that ensured this bbm
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec.txt
index 185f6deeade..185f6deeade 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec.txt
diff --git a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
index 536e07d56aa..aa79062422b 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
@@ -17,6 +17,7 @@ class QueueMyBatchedMigration < Gitlab::Database::Migration[2.1]
:projects,
:id,
job_interval: DELAY_INTERVAL,
+ queued_migration_version: '<migration_version>',
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
index b75d75107ee..c52d17d4a5b 100644
--- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -59,6 +59,33 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
}
end
+ let(:key_path_all) { "count_total_#{event}" }
+ let(:metric_definition_path_all) { Dir.glob(File.join(temp_dir, "metrics/counts_all/#{key_path_all}.yml")).first }
+ let(:metric_definition_all) do
+ {
+ "key_path" => key_path_all,
+ "description" => description,
+ "product_section" => section,
+ "product_stage" => stage,
+ "product_group" => group,
+ "performance_indicator_type" => [],
+ "value_type" => "number",
+ "status" => "active",
+ "milestone" => "13.9",
+ "introduced_by_url" => mr,
+ "time_frame" => "all",
+ "data_source" => "internal_events",
+ "data_category" => "optional",
+ "instrumentation_class" => "TotalCountMetric",
+ "distribution" => %w[ce ee],
+ "tier" => %w[free premium ultimate],
+ "options" => {
+ "events" => [event]
+ },
+ "events" => [{ "name" => event }]
+ }
+ end
+
before do
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
@@ -165,18 +192,27 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
context 'for single time frame' do
let(:time_frames) { %w[7d] }
- it 'creates a metric definition file using the template' do
+ it 'creates a metric definition file' do
described_class.new([], options).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
end
+ context 'with time frame "all"' do
+ let(:time_frames) { %w[all] }
+
+ it 'creates a total count metric definition file' do
+ described_class.new([], options).invoke_all
+ expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
+ end
+ end
+
context 'for ultimate only feature' do
let(:metric_definition_path_7d) do
Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
end
- it 'creates a metric definition file using the template' do
+ it 'creates a metric definition file' do
described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path_7d)))
@@ -200,14 +236,6 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
end
- context 'with unique value passed with a dot' do
- it 'creates a metric definition file using the template' do
- described_class.new([], options.merge(unique: 'user.id')).invoke_all
-
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
- end
- end
-
context 'without at least one tier available' do
it 'raises error' do
expect { described_class.new([], options.merge(tiers: [])).invoke_all }
@@ -253,7 +281,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
context 'for multiple time frames' do
- let(:time_frames) { %w[7d 28d] }
+ let(:time_frames) { %w[7d 28d all] }
let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
let(:metric_definition_28d) do
@@ -263,11 +291,12 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
)
end
- it 'creates a metric definition file using the template' do
+ it 'creates metric definition files' do
described_class.new([], options).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
+ expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
end
end
@@ -282,11 +311,12 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
)
end
- it 'creates a metric definition file using the template' do
+ it 'creates metric definition files' do
described_class.new([], options.without('time_frames')).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
+ expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
end
end
end
diff --git a/spec/lib/generators/model/mocks/migration_file.txt b/spec/lib/generators/model/mocks/migration_file.txt
index 091e086ba65..c9e51e51863 100644
--- a/spec/lib/generators/model/mocks/migration_file.txt
+++ b/spec/lib/generators/model/mocks/migration_file.txt
@@ -17,6 +17,9 @@ class CreateModelGeneratorTestFoos < Gitlab::Database::Migration[2.1]
# comments:
# disable_ddl_transaction!
+ # Add dependent 'batched_background_migrations.queued_migration_version' values.
+ # DEPENDENT_BATCHED_BACKGROUND_MIGRATIONS = []
+
def change
create_table :model_generator_test_foos do |t|
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index b0ec46a3a0e..95199ae18de 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -1149,4 +1149,75 @@ RSpec.describe Gitlab::Auth::AuthFinders, feature_category: :system_access do
end
end
end
+
+ describe '#authentication_token_present?' do
+ subject { authentication_token_present? }
+
+ context 'no auth header/param/oauth' do
+ before do
+ request.headers['Random'] = 'Something'
+ set_param(:random, 'something')
+ end
+
+ it { is_expected.to be(false) }
+ end
+
+ context 'with auth header' do
+ before do
+ request.headers[header] = 'invalid'
+ end
+
+ context 'with private-token' do
+ let(:header) { 'Private-Token' }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'with job-token' do
+ let(:header) { 'Job-Token' }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'with deploy-token' do
+ let(:header) { 'Deploy-Token' }
+
+ it { is_expected.to be(true) }
+ end
+ end
+
+ context 'with authorization bearer (oauth token)' do
+ before do
+ request.headers['Authorization'] = 'Bearer invalid'
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'with auth param' do
+ context 'with private_token' do
+ it 'returns true' do
+ set_param(:private_token, 'invalid')
+
+ expect(subject).to be(true)
+ end
+ end
+
+ context 'with job_token' do
+ it 'returns true' do
+ set_param(:job_token, 'invalid')
+
+ expect(subject).to be(true)
+ end
+ end
+
+ context 'with token' do
+ it 'returns true' do
+ set_param(:token, 'invalid')
+
+ expect(subject).to be(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index 160fd78b2b9..48039b58216 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Auth::Ldap::Config do
+RSpec.describe Gitlab::Auth::Ldap::Config, feature_category: :system_access do
include LdapHelpers
before do
@@ -362,6 +362,19 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
expect(config.omniauth_options.keys).not_to include(:bind_dn, :password)
end
+ it 'defaults to plain encryption when not configured' do
+ stub_ldap_config(
+ options: {
+ 'host' => 'ldap.example.com',
+ 'port' => 386,
+ 'base' => 'ou=users,dc=example,dc=com',
+ 'uid' => 'uid'
+ }
+ )
+
+ expect(config.omniauth_options).to include(encryption: 'plain')
+ end
+
it 'includes authentication options when auth is configured' do
stub_ldap_config(
options: {
diff --git a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
index 8c50b2acac6..5d01f09df41 100644
--- a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management do
- let(:provider) { 'ldap' }
+ let(:provider) { 'openid_connect' }
let(:auth_hash) do
described_class.new(
OmniAuth::AuthHash.new(
@@ -19,7 +19,6 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management
)
end
- let(:provider_config) { { 'args' => { 'gitlab_username_claim' => 'first_name' } } }
let(:uid_raw) do
+"CN=Onur K\xC3\xBC\xC3\xA7\xC3\xBCk,OU=Test,DC=example,DC=net"
end
@@ -90,6 +89,22 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management
end
end
+ context 'when username claim is in email format' do
+ let(:info_hash) do
+ {
+ email: nil,
+ name: 'GitLab test',
+ nickname: 'GitLab@gitlabsandbox.onmicrosoft.com',
+ uid: uid_ascii
+ }
+ end
+
+ it 'creates proper email and username fields' do
+ expect(auth_hash.username).to eql 'GitLab'
+ expect(auth_hash.email).to eql 'temp-email-for-oauth-GitLab@gitlab.localhost'
+ end
+ end
+
context 'name not provided' do
before do
info_hash.delete(:name)
@@ -101,8 +116,17 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management
end
context 'custom username field provided' do
+ let(:provider_config) do
+ GitlabSettings::Options.build(
+ {
+ name: provider,
+ args: { 'gitlab_username_claim' => 'first_name' }
+ }
+ )
+ end
+
before do
- allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for).and_return(provider_config)
+ stub_omniauth_setting(providers: [provider_config])
end
it 'uses the custom field for the username within info' do
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 78e0df91103..8a9182f6457 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -535,6 +535,37 @@ RSpec.describe Gitlab::Auth::OAuth::User, feature_category: :system_access do
end
end
+ context "and a corresponding LDAP person with some values being nil" do
+ before do
+ allow(ldap_user).to receive(:uid) { uid }
+ allow(ldap_user).to receive(:username) { uid }
+ allow(ldap_user).to receive(:name) { nil }
+ allow(ldap_user).to receive(:email) { nil }
+ allow(ldap_user).to receive(:dn) { dn }
+
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
+
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ end
+
+ it "creates the user correctly" do
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eq(uid)
+ expect(gl_user.name).to eq(info_hash[:name])
+ expect(gl_user.email).to eq(info_hash[:email])
+ end
+
+ it "does not have the attributes not provided by LDAP set as synced" do
+ expect(gl_user.user_synced_attributes_metadata.name_synced).to be_falsey
+ expect(gl_user.user_synced_attributes_metadata.email_synced).to be_falsey
+ end
+
+ it "does not have the attributes not provided by LDAP set as read-only" do
+ expect(gl_user.read_only_attribute?(:name)).to be_falsey
+ expect(gl_user.read_only_attribute?(:email)).to be_falsey
+ end
+ end
+
context 'and a corresponding LDAP person with a non-default username' do
before do
allow(ldap_user).to receive(:uid) { uid }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 8da617175ca..f5b9555916c 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
end
- context 'available_scopes' do
+ describe 'available_scopes' do
before do
stub_container_registry_config(enabled: true)
end
@@ -43,26 +43,26 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
- it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
+ it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes and ai_features' do
user = build_stubbed(:user, admin: false)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
end
- it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
+ it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes and ai_features' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
end
- it 'contains for project all resource bot scopes without observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
+ it 'contains for project all resource bot scopes without ai_features' do
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
end
it 'contains for group all resource bot scopes' do
- group = build_stubbed(:group)
+ group = build_stubbed(:group).tap { |g| g.namespace_settings = build_stubbed(:namespace_settings, namespace: g) }
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
end
it 'contains for unsupported type no scopes' do
@@ -73,44 +73,101 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
- context 'with observability_group_tab feature flag' do
+ describe 'ai_features scope' do
+ let(:resource) { nil }
+
+ subject { described_class.available_scopes_for(resource) }
+
+ context 'when resource is user', 'and user has a group with ai features' do
+ let(:resource) { build_stubbed(:user) }
+
+ it { is_expected.not_to include(:ai_features) }
+ end
+
+ context 'when resource is project' do
+ let(:resource) { build_stubbed(:project) }
+
+ it 'does not include ai_features scope' do
+ is_expected.not_to include(:ai_features)
+ end
+ end
+
+ context 'when resource is group' do
+ let(:resource) { build_stubbed(:group) }
+
+ it 'does not include ai_features scope' do
+ is_expected.not_to include(:ai_features)
+ end
+ end
+ end
+
+ context 'with observability_tracing feature flag' do
context 'when disabled' do
before do
- stub_feature_flags(observability_group_tab: false)
+ stub_feature_flags(observability_tracing: false)
end
it 'contains for group all resource bot scopes without observability scopes' do
- group = build_stubbed(:group)
+ group = build_stubbed(:group).tap do |g|
+ g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
+ end
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ end
+
+ it 'contains for project all resource bot scopes without observability scopes' do
+ group = build_stubbed(:group).tap do |g|
+ g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
+ end
+ project = build_stubbed(:project, namespace: group)
+
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
end
end
- context 'when enabled for specific group' do
- let(:group) { build_stubbed(:group) }
+ context 'when enabled for specific root group' do
+ let(:parent) { build_stubbed(:group) }
+ let(:group) do
+ build_stubbed(:group, parent: parent).tap { |g| g.namespace_settings = build_stubbed(:namespace_settings, namespace: g) }
+ end
+
+ let(:project) { build_stubbed(:project, namespace: group) }
before do
- stub_feature_flags(observability_group_tab: group)
+ stub_feature_flags(observability_tracing: parent)
end
- it 'contains for other group all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
+ it 'contains for group all resource bot scopes including observability scopes' do
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
end
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
end
- it 'contains for project all resource bot scopes without observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
+ it 'contains for project all resource bot scopes including observability scopes' do
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
end
it 'contains for other group all resource bot scopes without observability scopes' do
- other_group = build_stubbed(:group)
+ other_parent = build_stubbed(:group)
+ other_group = build_stubbed(:group, parent: other_parent).tap do |g|
+ g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
+ end
+
+ expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ end
+
+ it 'contains for other project all resource bot scopes without observability scopes' do
+ other_parent = build_stubbed(:group)
+ other_group = build_stubbed(:group, parent: other_parent).tap do |g|
+ g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
+ end
+ other_project = build_stubbed(:project, namespace: other_group)
- expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(other_project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_finding_id_in_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/backfill_finding_id_in_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..3dbb1b34726
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_finding_id_in_vulnerabilities_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+RSpec.describe Gitlab::BackgroundMigration::BackfillFindingIdInVulnerabilities, schema: 20230912105945, feature_category: :vulnerability_management do # rubocop:disable Layout/LineLength
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:members) { table(:members) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let!(:user) { create_user(email: "test1@example.com", username: "test1") }
+ let!(:namespace) { namespaces.create!(name: "test-1", path: "test-1", owner_id: user.id) }
+ let!(:project) do
+ projects.create!(
+ id: 9999, namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ creator_id: user.id
+ )
+ end
+
+ let!(:membership) do
+ members.create!(access_level: 50, source_id: project.id, source_type: "Project", user_id: user.id, state: 0,
+ notification_level: 3, type: "ProjectMember", member_namespace_id: namespace.id)
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: vulnerabilities.first.id,
+ end_id: vulnerabilities.last.id,
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ describe "#perform" do
+ subject(:background_migration) { described_class.new(**migration_attrs).perform }
+
+ # This scenario is what usually happens because we first create a Vulnerabilities::Finding record, then create
+ # a Vulnerability record and populate the Vulnerabilities::Finding#vulnerability_id
+ let(:vulnerabilities_finding_1) { create_finding(project, vulnerability_id: vulnerability_without_finding_id.id) }
+ let(:vulnerability_without_finding_id) { create_vulnerability }
+
+ # This scenario can occur because we have modified our Vulnerabilities ingestion pipeline to populate
+ # vulnerabilities.finding_id as soon as possible
+ let(:vulnerabilities_finding_2) { create_finding(project) }
+ let(:vulnerability_with_finding_id) { create_vulnerability(finding_id: vulnerabilities_finding_2.id) }
+
+ it 'backfills finding_id column in the vulnerabilities table' do
+ expect { background_migration }.to change { vulnerability_without_finding_id.reload.finding_id }
+ .from(nil).to(vulnerabilities_finding_1.id)
+ end
+
+ it 'does not affect rows with finding_id populated' do
+ expect { background_migration }.not_to change { vulnerability_with_finding_id.reload.finding_id }
+ end
+ end
+
+ private
+
+ def create_scanner(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "test_vulnerability_scanner",
+ name: "Test Vulnerabilities::Scanner"
+ }.merge(overrides)
+
+ vulnerability_scanners.create!(attrs)
+ end
+
+ def create_identifier(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_finding(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: create_scanner(project).id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier(project).id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1,
+ state: 1,
+ detected_at: Time.zone.now
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_user(overrides = {})
+ attrs = {
+ email: "test@example.com",
+ notification_email: "test@example.com",
+ name: "test",
+ username: "test",
+ state: "active",
+ projects_limit: 10
+ }.merge(overrides)
+
+ users.create!(attrs)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_has_remediations_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_has_remediations_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..0e7a0210758
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_has_remediations_of_vulnerability_reads_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillHasRemediationsOfVulnerabilityReads,
+ feature_category: :database do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+
+ let(:vulnerability_1) { create_vulnerability(title: 'vulnerability 1') }
+ let(:vulnerability_2) { create_vulnerability(title: 'vulnerability 2') }
+
+ let!(:vulnerability_read_1) { create_vulnerability_read(vulnerability_id: vulnerability_1.id) }
+ let!(:vulnerability_read_2) { create_vulnerability_read(vulnerability_id: vulnerability_2.id) }
+
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_findings_remediations) { table(:vulnerability_findings_remediations) }
+ let(:vulnerability_remediations) { table(:vulnerability_remediations) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: vulnerability_reads.first.vulnerability_id,
+ end_id: vulnerability_reads.last.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: vulnerability_reads.count,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ it 'updates vulnerability_reads records which has remediations' do
+ vuln_remediation = create_remediation
+ vuln_finding = create_finding(vulnerability_id: vulnerability_1.id)
+ vulnerability_findings_remediations.create!(
+ vulnerability_occurrence_id: vuln_finding.id,
+ vulnerability_remediation_id: vuln_remediation.id
+ )
+
+ expect { perform_migration }.to change { vulnerability_read_1.reload.has_remediations }.from(false).to(true)
+ .and not_change { vulnerability_read_2.reload.has_remediations }.from(false)
+ end
+
+ it 'does not modify has_remediations of vulnerabilities which do not have remediations' do
+ expect { perform_migration }.to not_change { vulnerability_read_1.reload.has_remediations }.from(false)
+ .and not_change { vulnerability_read_2.reload.has_remediations }.from(false)
+ end
+
+ private
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_vulnerability_read(overrides = {})
+ attrs = {
+ project_id: project.id,
+ vulnerability_id: 1,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ }.merge(overrides)
+
+ vulnerability_reads.create!(attrs)
+ end
+
+ def create_finding(overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: scanner.id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier.id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_remediation(overrides = {})
+ remediation_hash = { summary: 'summary', diff: "ZGlmZiAtLWdp" }
+
+ attrs = {
+ project_id: project.id,
+ summary: remediation_hash[:summary],
+ checksum: checksum(remediation_hash[:diff]),
+ file: Tempfile.new.path
+ }.merge(overrides)
+
+ vulnerability_remediations.create!(attrs)
+ end
+
+ def create_identifier(overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def checksum(value)
+ sha = Digest::SHA256.hexdigest(value)
+ Gitlab::Database::ShaAttribute.new.serialize(sha)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/delete_orphans_approval_merge_request_rules2_spec.rb b/spec/lib/gitlab/background_migration/delete_orphans_approval_merge_request_rules2_spec.rb
new file mode 100644
index 00000000000..81dd37f0fe9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_orphans_approval_merge_request_rules2_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphansApprovalMergeRequestRules2, feature_category: :security_policy_management do
+ describe '#perform' do
+ let(:batch_table) { :approval_merge_request_rules }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 1 }
+ let(:pause_ms) { 0 }
+ let(:connection) { ApplicationRecord.connection }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:approval_project_rules) { table(:approval_project_rules) }
+ let(:approval_merge_request_rules) { table(:approval_merge_request_rules) }
+ let(:approval_merge_request_rule_sources) { table(:approval_merge_request_rule_sources) }
+ let(:security_orchestration_policy_configurations) { table(:security_orchestration_policy_configurations) }
+ let(:namespace) { namespaces.create!(name: 'name', path: 'path') }
+ let(:project) do
+ projects
+ .create!(name: "project", path: "project", namespace_id: namespace.id, project_namespace_id: namespace.id)
+ end
+
+ let(:namespace_2) { namespaces.create!(name: 'name_2', path: 'path_2') }
+ let(:security_project) do
+ projects
+ .create!(name: "security_project", path: "security_project", namespace_id: namespace_2.id,
+ project_namespace_id: namespace_2.id)
+ end
+
+ let!(:security_orchestration_policy_configuration) do
+ security_orchestration_policy_configurations
+ .create!(project_id: project.id, security_policy_management_project_id: security_project.id)
+ end
+
+ let(:merge_request) do
+ table(:merge_requests).create!(target_project_id: project.id, target_branch: 'main', source_branch: 'feature')
+ end
+
+ let!(:approval_rule) do
+ approval_merge_request_rules.create!(
+ name: 'rule',
+ merge_request_id: merge_request.id,
+ report_type: 4,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:approval_rule_other_report_type) do
+ approval_merge_request_rules.create!(
+ name: 'rule 2',
+ merge_request_id: merge_request.id,
+ report_type: 1,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:approval_rule_license_scanning) do
+ approval_merge_request_rules.create!(
+ name: 'rule 4',
+ merge_request_id: merge_request.id,
+ report_type: 2,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:approval_rule_license_scanning_without_policy_id) do
+ approval_merge_request_rules.create!(name: 'rule 5', merge_request_id: merge_request.id, report_type: 2)
+ end
+
+ let!(:approval_rule_last) do
+ approval_merge_request_rules.create!(name: 'rule 3', merge_request_id: merge_request.id, report_type: 4)
+ end
+
+ subject do
+ described_class.new(
+ start_id: approval_rule.id,
+ end_id: approval_rule_last.id,
+ batch_table: batch_table,
+ batch_column: batch_column,
+ sub_batch_size: sub_batch_size,
+ pause_ms: pause_ms,
+ connection: connection
+ ).perform
+ end
+
+ it 'delete only approval rules without association with the security project and report_type equals to 4 or 2' do
+ expect { subject }.to change { approval_merge_request_rules.all }.to(
+ contain_exactly(approval_rule,
+ approval_rule_other_report_type,
+ approval_rule_license_scanning))
+ end
+
+ context 'with rule sources' do # rubocop: disable RSpec/MultipleMemoizedHelpers
+ let(:project_approval_rule_1) { approval_project_rules.create!(project_id: project.id, name: '1') }
+ let(:project_approval_rule_2) { approval_project_rules.create!(project_id: project.id, name: '2') }
+
+ let!(:rule_source_1) do
+ approval_merge_request_rule_sources.create!(
+ approval_merge_request_rule_id: approval_rule_license_scanning_without_policy_id.id,
+ approval_project_rule_id: project_approval_rule_1.id)
+ end
+
+ let!(:rule_source_2) do
+ approval_merge_request_rule_sources.create!(
+ approval_merge_request_rule_id: approval_rule_other_report_type.id,
+ approval_project_rule_id: project_approval_rule_2.id)
+ end
+
+ it 'deletes referenced sources' do
+ # rubocop: disable CodeReuse/ActiveRecord
+ expect { subject }.to change { approval_merge_request_rule_sources.exists?(rule_source_1.id) }.to(false)
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+
+ it 'does not delete unreferenced sources' do
+ # rubocop: disable CodeReuse/ActiveRecord
+ expect { subject }.not_to change { approval_merge_request_rule_sources.exists?(rule_source_2.id) }
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/delete_orphans_approval_project_rules2_spec.rb b/spec/lib/gitlab/background_migration/delete_orphans_approval_project_rules2_spec.rb
new file mode 100644
index 00000000000..c6563efe173
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_orphans_approval_project_rules2_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphansApprovalProjectRules2, feature_category: :security_policy_management do
+ describe '#perform' do
+ let(:batch_table) { :approval_project_rules }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 1 }
+ let(:pause_ms) { 0 }
+ let(:connection) { ApplicationRecord.connection }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:approval_project_rules) { table(:approval_project_rules) }
+ let(:approval_merge_request_rules) { table(:approval_merge_request_rules) }
+ let(:approval_merge_request_rule_sources) { table(:approval_merge_request_rule_sources) }
+ let(:security_orchestration_policy_configurations) { table(:security_orchestration_policy_configurations) }
+ let(:namespace) { namespaces.create!(name: 'name', path: 'path') }
+ let(:project) do
+ projects
+ .create!(name: "project", path: "project", namespace_id: namespace.id, project_namespace_id: namespace.id)
+ end
+
+ let(:merge_request) do
+ table(:merge_requests).create!(target_project_id: project.id, target_branch: 'main', source_branch: 'feature')
+ end
+
+ let(:namespace_2) { namespaces.create!(name: 'name_2', path: 'path_2') }
+ let(:security_project) do
+ projects
+ .create!(name: "security_project", path: "security_project", namespace_id: namespace_2.id,
+ project_namespace_id: namespace_2.id)
+ end
+
+ let!(:security_orchestration_policy_configuration) do
+ security_orchestration_policy_configurations
+ .create!(project_id: project.id, security_policy_management_project_id: security_project.id)
+ end
+
+ let!(:project_rule) do
+ approval_project_rules.create!(
+ name: 'rule',
+ project_id: project.id,
+ report_type: 4,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:project_rule_other_report_type) do
+ approval_project_rules.create!(
+ name: 'rule 2',
+ project_id: project.id,
+ report_type: 1,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:project_rule_license_scanning) do
+ approval_project_rules.create!(
+ name: 'rule 4',
+ project_id: project.id,
+ report_type: 2,
+ security_orchestration_policy_configuration_id: security_orchestration_policy_configuration.id)
+ end
+
+ let!(:project_rule_license_scanning_without_policy_id) do
+ approval_project_rules.create!(name: 'rule 5', project_id: project.id, report_type: 2)
+ end
+
+ let!(:project_rule_last) do
+ approval_project_rules.create!(name: 'rule 3', project_id: project.id, report_type: 4)
+ end
+
+ subject do
+ described_class.new(
+ start_id: project_rule.id,
+ end_id: project_rule_last.id,
+ batch_table: batch_table,
+ batch_column: batch_column,
+ sub_batch_size: sub_batch_size,
+ pause_ms: pause_ms,
+ connection: connection
+ ).perform
+ end
+
+ it 'delete only approval rules without association with the security project and report_type equals to 4' do
+ expect { subject }.to change { approval_project_rules.all }.to(
+ contain_exactly(project_rule,
+ project_rule_other_report_type,
+ project_rule_license_scanning))
+ end
+
+ context 'with rule sources' do # rubocop: disable RSpec/MultipleMemoizedHelpers
+ let(:approval_merge_request_rule_1) do
+ approval_merge_request_rules.create!(merge_request_id: merge_request.id, name: '1')
+ end
+
+ let(:approval_merge_request_rule_2) do
+ approval_merge_request_rules.create!(merge_request_id: merge_request.id, name: '2')
+ end
+
+ let!(:rule_source_1) do
+ approval_merge_request_rule_sources.create!(
+ approval_merge_request_rule_id: approval_merge_request_rule_1.id,
+ approval_project_rule_id: project_rule_license_scanning_without_policy_id.id)
+ end
+
+ let!(:rule_source_2) do
+ approval_merge_request_rule_sources.create!(
+ approval_merge_request_rule_id: approval_merge_request_rule_2.id,
+ approval_project_rule_id: project_rule_other_report_type.id)
+ end
+
+ it 'deletes referenced sources' do
+ expect { subject }.to change { approval_merge_request_rule_sources.exists?(rule_source_1.id) }.to(false)
+ end
+
+ it 'does not delete unreferenced sources' do
+ expect { subject }.not_to change { approval_merge_request_rule_sources.exists?(rule_source_2.id) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 9786e7a364e..517d557d665 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -112,6 +112,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
end
let(:pull_request_author) { 'other' }
+ let(:comments) { [@inline_note, @reply] }
let(:author_line) { "*Created by: someuser*\n\n" }
@@ -145,8 +146,6 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
has_parent?: true,
parent_id: 2)
- comments = [@inline_note, @reply]
-
allow(subject.client).to receive(:repo)
allow(subject.client).to receive(:pull_requests).and_return([pull_request])
allow(subject.client).to receive(:pull_request_comments).with(anything, pull_request.iid).and_return(comments)
@@ -202,6 +201,12 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
end
end
+ it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
+ expect(subject.instance_values['ref_converter']).to receive(:convert_note).twice
+
+ subject.execute
+ end
+
context 'when importing a pull request throws an exception' do
before do
allow(pull_request).to receive(:raw).and_return({ error: "broken" })
@@ -384,6 +389,12 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
expect(label_after_import.attributes).to eq(existing_label.attributes)
end
end
+
+ it 'raises an error if a label is not valid' do
+ stub_const("#{described_class}::LABELS", [{ title: nil, color: nil }])
+
+ expect { importer.create_labels }.to raise_error(StandardError, /Failed to create label/)
+ end
end
it 'maps statuses to open or closed' do
@@ -444,26 +455,33 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
end
context 'with issue comments' do
+ let(:note) { 'Hello world' }
let(:inline_note) do
- instance_double(Bitbucket::Representation::Comment, note: 'Hello world', author: 'someuser', created_at: Time.now, updated_at: Time.now)
+ instance_double(Bitbucket::Representation::Comment, note: note, author: 'someuser', created_at: Time.now, updated_at: Time.now)
end
before do
allow_next_instance_of(Bitbucket::Client) do |instance|
allow(instance).to receive(:issue_comments).and_return([inline_note])
end
+ allow(importer).to receive(:import_wiki)
end
it 'imports issue comments' do
- allow(importer).to receive(:import_wiki)
importer.execute
comment = project.notes.first
expect(project.notes.size).to eq(7)
- expect(comment.note).to include(inline_note.note)
+ expect(comment.note).to include(note)
expect(comment.note).to include(inline_note.author)
expect(importer.errors).to be_empty
end
+
+ it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
+ expect(importer.instance_values['ref_converter']).to receive(:convert_note).exactly(7).times
+
+ importer.execute
+ end
end
context 'when issue was already imported' do
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issue_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issue_importer_spec.rb
new file mode 100644
index 00000000000..8f79390d2d9
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/issue_importer_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::IssueImporter, :clean_gitlab_redis_cache, feature_category: :importers do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:bitbucket_user) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: bitbucket_user, extern_uid: 'bitbucket_user', provider: :bitbucket) }
+ let_it_be(:default_work_item_type) { create(:work_item_type) }
+ let_it_be(:label) { create(:label, project: project) }
+
+ let(:hash) do
+ {
+ iid: 111,
+ title: 'title',
+ description: 'description',
+ state: 'closed',
+ author: 'bitbucket_user',
+ milestone: 'my milestone',
+ issue_type_id: default_work_item_type.id,
+ label_id: label.id,
+ created_at: Date.today,
+ updated_at: Date.today
+ }
+ end
+
+ subject(:importer) { described_class.new(project, hash) }
+
+ before do
+ allow(Gitlab::Git).to receive(:ref_name).and_return('refname')
+ end
+
+ describe '#execute' do
+ it 'creates an issue' do
+ expect { importer.execute }.to change { project.issues.count }.from(0).to(1)
+
+ issue = project.issues.first
+
+ expect(issue.description).to eq('description')
+ expect(issue.author).to eq(bitbucket_user)
+ expect(issue.closed?).to be_truthy
+ expect(issue.milestone).to eq(project.milestones.first)
+ expect(issue.work_item_type).to eq(default_work_item_type)
+ expect(issue.labels).to eq([label])
+ expect(issue.created_at).to eq(Date.today)
+ expect(issue.updated_at).to eq(Date.today)
+ end
+
+ context 'when the author does not have a bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'sets the author to the project creator and adds the author to the description' do
+ importer.execute
+
+ issue = project.issues.first
+
+ expect(issue.author).to eq(project.creator)
+ expect(issue.description).to eq("*Created by: bitbucket_user*\n\ndescription")
+ end
+ end
+
+ context 'when a milestone with the same title exists' do
+ let_it_be(:milestone) { create(:milestone, project: project, title: 'my milestone') }
+
+ it 'assigns the milestone and does not create a new milestone' do
+ expect { importer.execute }.not_to change { project.milestones.count }
+
+ expect(project.issues.first.milestone).to eq(milestone)
+ end
+ end
+
+ context 'when a milestone with the same title does not exist' do
+ it 'creates a new milestone and assigns it' do
+ expect { importer.execute }.to change { project.milestones.count }.from(0).to(1)
+
+ expect(project.issues.first.milestone).to eq(project.milestones.first)
+ end
+ end
+
+ context 'when an error is raised' do
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ described_class.new(project, hash.except(:title)).execute
+ end
+ end
+
+ it 'logs its progress' do
+ allow(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute)
+
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'starting', iid: anything)).and_call_original
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'finished', iid: anything)).and_call_original
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issue_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issue_notes_importer_spec.rb
new file mode 100644
index 00000000000..1a2a43d6877
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/issue_notes_importer_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::IssueNotesImporter, :clean_gitlab_redis_cache, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started, import_source: 'namespace/repo',
+ import_data_attributes: {
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ let_it_be(:bitbucket_user) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: bitbucket_user, extern_uid: 'bitbucket_user', provider: :bitbucket) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:hash) { { iid: issue.iid } }
+ let(:note_body) { 'body' }
+ let(:client) { Bitbucket::Client.new({}) }
+
+ subject(:importer) { described_class.new(project, hash) }
+
+ describe '#execute' do
+ let(:issue_comments_response) do
+ [
+ Bitbucket::Representation::Comment.new({
+ 'user' => { 'nickname' => 'bitbucket_user' },
+ 'content' => { 'raw' => note_body },
+ 'created_on' => Date.today,
+ 'updated_on' => Date.today
+ })
+ ]
+ end
+
+ before do
+ allow(Bitbucket::Client).to receive(:new).and_return(client)
+ allow(client).to receive(:issue_comments).and_return(issue_comments_response)
+ end
+
+ it 'creates a new note with the correct attributes' do
+ expect { importer.execute }.to change { issue.notes.count }.from(0).to(1)
+
+ note = issue.notes.first
+
+ expect(note.project).to eq(project)
+ expect(note.note).to eq(note_body)
+ expect(note.author).to eq(bitbucket_user)
+ expect(note.created_at).to eq(Date.today)
+ expect(note.updated_at).to eq(Date.today)
+ end
+
+ context 'when the author does not have a bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'sets the author to the project creator and adds the author to the note' do
+ importer.execute
+
+ note = issue.notes.first
+
+ expect(note.author).to eq(project.creator)
+ expect(note.note).to eq("*Created by: bitbucket_user*\n\nbody")
+ end
+ end
+
+ it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
+ expect(importer.instance_values['ref_converter']).to receive(:convert_note).once
+
+ importer.execute
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(client).to receive(:issue_comments).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
new file mode 100644
index 00000000000..a361a9343dd
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::IssuesImporter, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:issues).and_return(
+ [
+ Bitbucket::Representation::Issue.new({ 'id' => 1 }),
+ Bitbucket::Representation::Issue.new({ 'id' => 2 })
+ ],
+ []
+ )
+ end
+ end
+
+ it 'imports each issue in parallel', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportIssueWorker).to receive(:perform_in).twice
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key))
+ .to match_array(%w[1 2])
+ end
+
+ context 'when the client raises an error' do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:issues).and_raise(StandardError)
+ end
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ context 'when issue was already enqueued' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 1)
+ end
+
+ it 'does not schedule job for enqueued issues', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportIssueWorker).to receive(:perform_in).once
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
new file mode 100644
index 00000000000..043cd7f17b9
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::IssuesNotesImporter, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+ # let_it_be(:merge_request_1) { create(:merge_request, source_project: project) }
+ # let_it_be(:merge_request_2) { create(:merge_request, source_project: project, source_branch: 'other-branch') }
+ let_it_be(:issue_1) { create(:issue, project: project) }
+ let_it_be(:issue_2) { create(:issue, project: project) }
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ it 'imports the notes from each issue in parallel', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportIssueNotesWorker).to receive(:perform_in).twice
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key))
+ .to match_array(%w[1 2])
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(importer).to receive(:mark_as_enqueued).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ context 'when issue was already enqueued' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 2)
+ end
+
+ it 'does not schedule job for enqueued issues', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportIssueNotesWorker).to receive(:perform_in).once
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/lfs_object_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/lfs_object_importer_spec.rb
new file mode 100644
index 00000000000..4d56853032a
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/lfs_object_importer_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::LfsObjectImporter, feature_category: :importers do
+ let_it_be(:project) { create(:project) }
+ let(:oid) { 'a' * 64 }
+
+ let(:lfs_attributes) do
+ {
+ 'oid' => oid,
+ 'size' => 1,
+ 'link' => 'http://www.gitlab.com/lfs_objects/oid',
+ 'headers' => { 'X-Some-Header' => '456' }
+ }
+ end
+
+ let(:importer) { described_class.new(project, lfs_attributes) }
+
+ describe '#execute' do
+ it 'calls the LfsDownloadService with the lfs object attributes' do
+ expect_next_instance_of(
+ Projects::LfsPointers::LfsDownloadService, project, have_attributes(lfs_attributes)
+ ) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ importer.execute
+ end
+
+ context 'when the object is not valid' do
+ let(:oid) { 'invalid' }
+
+ it 'tracks the validation errors and does not continue' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
+
+ importer.execute
+ end
+ end
+
+ context 'when an error is raised' do
+ let(:exception) { StandardError.new('messsage') }
+
+ before do
+ allow_next_instance_of(Projects::LfsPointers::LfsDownloadService) do |service|
+ allow(service).to receive(:execute).and_raise(exception)
+ end
+ end
+
+ it 'rescues and logs the exception' do
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(hash_including(exception: exception))
+
+ importer.execute
+ end
+ end
+
+ it 'logs its progress' do
+ allow_next_instance_of(Projects::LfsPointers::LfsDownloadService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ common_log_message = {
+ oid: oid,
+ import_stage: 'import_lfs_object',
+ class: described_class.name,
+ project_id: project.id,
+ project_path: project.full_path
+ }
+
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(common_log_message.merge(message: 'starting')).and_call_original
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(common_log_message.merge(message: 'finished')).and_call_original
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/lfs_objects_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/lfs_objects_importer_spec.rb
new file mode 100644
index 00000000000..fbce8337264
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/lfs_objects_importer_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::LfsObjectsImporter, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'token' => 'token' }
+ }
+ )
+ end
+
+ let(:lfs_attributes) do
+ {
+ oid: 'a' * 64,
+ size: 1,
+ link: 'http://www.gitlab.com/lfs_objects/oid',
+ headers: { 'X-Some-Header' => '456' }
+ }
+ end
+
+ let(:lfs_download_object) { LfsDownloadObject.new(**lfs_attributes) }
+
+ let(:common_log_messages) do
+ {
+ import_stage: 'import_lfs_objects',
+ class: described_class.name,
+ project_id: project.id,
+ project_path: project.full_path
+ }
+ end
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ context 'when lfs is enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ it 'imports each lfs object in parallel' do
+ importer = described_class.new(project)
+
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |service|
+ expect(service).to receive(:each_list_item).and_yield(lfs_download_object)
+ end
+
+ expect(Gitlab::BitbucketImport::ImportLfsObjectWorker).to receive(:perform_in)
+ .with(1.second, project.id, lfs_attributes.stringify_keys, start_with(Gitlab::JobWaiter::KEY_PREFIX))
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+
+ it 'logs its progress' do
+ importer = described_class.new(project)
+
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(common_log_messages.merge(message: 'starting')).and_call_original
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(common_log_messages.merge(message: 'finished')).and_call_original
+
+ importer.execute
+ end
+
+ context 'when LFS list download fails' do
+ let(:exception) { StandardError.new('Invalid Project URL') }
+
+ before do
+ allow_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |service|
+ allow(service).to receive(:each_list_item).and_raise(exception)
+ end
+ end
+
+ it 'rescues and logs the exception' do
+ importer = described_class.new(project)
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name
+ ).and_call_original
+
+ expect(Gitlab::BitbucketImport::ImportLfsObjectWorker).not_to receive(:perform_in)
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(0)
+ end
+ end
+ end
+
+ context 'when LFS is not enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ end
+
+ it 'logs progress but does nothing' do
+ importer = described_class.new(project)
+
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info).twice
+ expect(Gitlab::BitbucketImport::ImportLfsObjectWorker).not_to receive(:perform_in)
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_notes_importer_spec.rb
new file mode 100644
index 00000000000..4a30f225d66
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_notes_importer_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestNotesImporter, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:hash) { { iid: merge_request.iid } }
+ let(:importer_helper) { Gitlab::BitbucketImport::Importer.new(project) }
+
+ subject(:importer) { described_class.new(project, hash) }
+
+ before do
+ allow(Gitlab::BitbucketImport::Importer).to receive(:new).and_return(importer_helper)
+ end
+
+ describe '#execute' do
+ it 'calls Importer.import_pull_request_comments' do
+ expect(importer_helper).to receive(:import_pull_request_comments).once
+
+ importer.execute
+ end
+
+ context 'when the merge request does not exist' do
+ let(:hash) { { iid: 'nonexistent' } }
+
+ it 'does not call Importer.import_pull_request_comments' do
+ expect(importer_helper).not_to receive(:import_pull_request_comments)
+
+ importer.execute
+ end
+ end
+
+ context 'when the merge request exists but not for this project' do
+ let_it_be(:another_project) { create(:project) }
+
+ before do
+ merge_request.update!(source_project: another_project, target_project: another_project)
+ end
+
+ it 'does not call Importer.import_pull_request_comments' do
+ expect(importer_helper).not_to receive(:import_pull_request_comments)
+
+ importer.execute
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(importer_helper).to receive(:import_pull_request_comments).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
new file mode 100644
index 00000000000..c44fc259c3b
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+ let_it_be(:merge_request_1) { create(:merge_request, source_project: project) }
+ let_it_be(:merge_request_2) { create(:merge_request, source_project: project, source_branch: 'other-branch') }
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ it 'imports the notes from each merge request in parallel', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestNotesWorker).to receive(:perform_in).twice
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key))
+ .to match_array(%w[1 2])
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(importer).to receive(:mark_as_enqueued).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ context 'when merge request was already enqueued' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 2)
+ end
+
+ it 'does not schedule job for enqueued merge requests', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestNotesWorker).to receive(:perform_in).once
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
new file mode 100644
index 00000000000..578b661d86b
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::RefConverter, feature_category: :importers do
+ let_it_be(:project_identifier) { 'namespace/repo' }
+ let_it_be(:project) { create(:project, import_source: project_identifier) }
+ let(:path) { project.full_path }
+
+ let(:ref_converter) { described_class.new(project) }
+
+ shared_examples 'converts the ref correctly' do
+ it 'converts the ref to a gitlab reference' do
+ actual = ref_converter.convert_note(note)
+
+ expect(actual).to eq(expected)
+ end
+ end
+
+ context 'when the note has an issue ref' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/issues/1/first-issue](https://bitbucket.org/namespace/repo/issues/1/first-issue){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/issues/1/](http://localhost/#{path}/-/issues/1/)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note has a pull request ref' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/pull-requests/7](https://bitbucket.org/namespace/repo/pull-requests/7){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/merge_requests/7](http://localhost/#{path}/-/merge_requests/7)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note has a reference to a branch' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/src/master/](https://bitbucket.org/namespace/repo/src/master/){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/blob/master/](http://localhost/#{path}/-/blob/master/)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note has a reference to a line in a file' do
+ let(:note) do
+ "[https://bitbucket.org/namespace/repo/src/0f16a22c21671421780980c9a7433eb8c986b9af/.gitignore#lines-6] \
+ (https://bitbucket.org/namespace/repo/src/0f16a22c21671421780980c9a7433eb8c986b9af/.gitignore#lines-6) \
+ {: data-inline-card='' }"
+ end
+
+ let(:expected) do
+ "[http://localhost/#{path}/-/blob/0f16a22c21671421780980c9a7433eb8c986b9af/.gitignore#L6] \
+ (http://localhost/#{path}/-/blob/0f16a22c21671421780980c9a7433eb8c986b9af/.gitignore#L6)"
+ end
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note has a reference to a file' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/src/master/.gitignore](https://bitbucket.org/namespace/repo/src/master/.gitignore){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/blob/master/.gitignore](http://localhost/#{path}/-/blob/master/.gitignore)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note does not have a ref' do
+ let(:note) { 'Hello world' }
+ let(:expected) { 'Hello world' }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
index 3c84d888c92..1ae68f9efb8 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
@@ -48,6 +48,68 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
end
end
+ describe 'merge request diff head_commit_sha' do
+ before do
+ allow(pull_request).to receive(:source_branch_sha).and_return(source_branch_sha)
+ end
+
+ context 'when a commit with the source_branch_sha exists' do
+ let(:source_branch_sha) { project.repository.head_commit.sha }
+
+ it 'is equal to the source_branch_sha' do
+ importer.execute
+
+ merge_request = project.merge_requests.find_by_iid(pull_request.iid)
+
+ expect(merge_request.merge_request_diffs.first.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+
+ context 'when a commit with the source_branch_sha does not exist' do
+ let(:source_branch_sha) { 'x' * Commit::MIN_SHA_LENGTH }
+
+ it 'is nil' do
+ importer.execute
+
+ merge_request = project.merge_requests.find_by_iid(pull_request.iid)
+
+ expect(merge_request.merge_request_diffs.first.head_commit_sha).to be_nil
+ end
+
+ context 'when a commit containing the sha in the message exists' do
+ let(:source_branch_sha) { project.repository.head_commit.sha }
+
+ it 'is equal to the sha' do
+ message = "
+ Squashed commit of the following:
+
+ commit #{source_branch_sha}
+ Author: John Smith <john@smith.com>
+ Date: Mon Sep 18 15:58:38 2023 +0200
+
+ My commit message
+ "
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: message,
+ file_path: 'files/lfs/ruby.rb',
+ file_content: 'testing'
+ ).execute
+
+ importer.execute
+
+ merge_request = project.merge_requests.find_by_iid(pull_request.iid)
+
+ expect(merge_request.merge_request_diffs.first.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+ end
+ end
+
it 'logs its progress' do
expect(Gitlab::BitbucketServerImport::Logger)
.to receive(:info).with(include(message: 'starting', iid: pull_request.iid)).and_call_original
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
index b9a9c8dac29..af8a0202083 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, feature_category: :importers do
let_it_be(:project) do
- create(:project, :import_started,
+ create(:project, :with_import_url, :import_started, :empty_repo,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
@@ -19,8 +19,30 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
allow_next_instance_of(BitbucketServer::Client) do |client|
allow(client).to receive(:pull_requests).and_return(
[
- BitbucketServer::Representation::PullRequest.new({ 'id' => 1 }),
- BitbucketServer::Representation::PullRequest.new({ 'id' => 2 })
+ BitbucketServer::Representation::PullRequest.new(
+ {
+ 'id' => 1,
+ 'state' => 'MERGED',
+ 'fromRef' => { 'latestCommit' => 'aaaa1' },
+ 'toRef' => { 'latestCommit' => 'aaaa2' }
+ }
+ ),
+ BitbucketServer::Representation::PullRequest.new(
+ {
+ 'id' => 2,
+ 'state' => 'DECLINED',
+ 'fromRef' => { 'latestCommit' => 'bbbb1' },
+ 'toRef' => { 'latestCommit' => 'bbbb2' }
+ }
+ ),
+ BitbucketServer::Representation::PullRequest.new(
+ {
+ 'id' => 3,
+ 'state' => 'OPEN',
+ 'fromRef' => { 'latestCommit' => 'cccc1' },
+ 'toRef' => { 'latestCommit' => 'cccc2' }
+ }
+ )
],
[]
)
@@ -28,14 +50,14 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
end
it 'imports each pull request in parallel', :aggregate_failures do
- expect(Gitlab::BitbucketServerImport::ImportPullRequestWorker).to receive(:perform_in).twice
+ expect(Gitlab::BitbucketServerImport::ImportPullRequestWorker).to receive(:perform_in).thrice
waiter = importer.execute
expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
- expect(waiter.jobs_remaining).to eq(2)
+ expect(waiter.jobs_remaining).to eq(3)
expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_processed_cache_key))
- .to match_array(%w[1 2])
+ .to match_array(%w[1 2 3])
end
context 'when pull request was already processed' do
@@ -44,12 +66,68 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
end
it 'does not schedule job for processed pull requests', :aggregate_failures do
- expect(Gitlab::BitbucketServerImport::ImportPullRequestWorker).to receive(:perform_in).once
+ expect(Gitlab::BitbucketServerImport::ImportPullRequestWorker).to receive(:perform_in).twice
waiter = importer.execute
expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
- expect(waiter.jobs_remaining).to eq(2)
+ expect(waiter.jobs_remaining).to eq(3)
+ end
+ end
+
+ context 'when pull requests are in merged or declined status' do
+ it 'fetches latest commits from the remote repository' do
+ expect(project.repository).to receive(:fetch_remote).with(
+ project.import_url,
+ refmap: %w[aaaa1 aaaa2 bbbb1 bbbb2],
+ prune: false
+ )
+
+ importer.execute
+ end
+
+ context 'when feature flag "fetch_commits_for_bitbucket_server" is disabled' do
+ before do
+ stub_feature_flags(fetch_commits_for_bitbucket_server: false)
+ end
+
+ it 'does not fetch anything' do
+ expect(project.repository).not_to receive(:fetch_remote)
+ importer.execute
+ end
+ end
+
+ context 'when there are no commits to process' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_processed_cache_key, 1)
+ Gitlab::Cache::Import::Caching.set_add(importer.already_processed_cache_key, 2)
+ end
+
+ it 'does not fetch anything' do
+ expect(project.repository).not_to receive(:fetch_remote)
+
+ importer.execute
+ end
+ end
+
+ context 'when fetch process is failed' do
+ let(:exception) { ArgumentError.new('blank or empty URL') }
+
+ before do
+ allow(project.repository).to receive(:fetch_remote).and_raise(exception)
+ end
+
+ it 'rescues and logs the exception' do
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name
+ ).and_call_original
+
+ importer.execute
+ end
end
end
end
diff --git a/spec/lib/gitlab/chat_spec.rb b/spec/lib/gitlab/chat_spec.rb
deleted file mode 100644
index a9df35ace98..00000000000
--- a/spec/lib/gitlab/chat_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Chat, :use_clean_rails_memory_store_caching do
- describe '.available?' do
- it 'returns true when the chatops feature is available' do
- stub_feature_flags(chatops: true)
-
- expect(described_class).to be_available
- end
-
- it 'returns false when the chatops feature is not available' do
- stub_feature_flags(chatops: false)
-
- expect(described_class).not_to be_available
- end
- end
-end
diff --git a/spec/lib/gitlab/checks/global_file_size_check_spec.rb b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
index a2b3ee0f761..db615053356 100644
--- a/spec/lib/gitlab/checks/global_file_size_check_spec.rb
+++ b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
@@ -34,7 +34,10 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
end
context 'when there are oversized blobs' do
- let(:blob_double) { instance_double(Gitlab::Git::Blob, size: 10) }
+ let(:mock_blob_id) { "88acbfafb1b8fdb7c51db870babce21bd861ac4f" }
+ let(:mock_blob_size) { 300 * 1024 * 1024 } # 300 MiB
+ let(:size_msg) { "300.0" } # it is (mock_blob_size / 1024.0 / 1024.0).round(2).to_s
+ let(:blob_double) { instance_double(Gitlab::Git::Blob, size: mock_blob_size, id: mock_blob_id) }
before do
allow_next_instance_of(Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBlobs,
@@ -48,8 +51,15 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
it 'logs a message with blob size and raises an exception' do
expect(Gitlab::AppJsonLogger).to receive(:info).with('Checking for blobs over the file size limit')
- expect(Gitlab::AppJsonLogger).to receive(:info).with(message: 'Found blob over global limit', blob_sizes: [10])
- expect { subject.validate! }.to raise_exception(Gitlab::GitAccess::ForbiddenError)
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ message: 'Found blob over global limit',
+ blob_sizes: [mock_blob_size],
+ blob_details: { mock_blob_id => { "size" => mock_blob_size } }
+ )
+ expect do
+ subject.validate!
+ end.to raise_exception(Gitlab::GitAccess::ForbiddenError,
+ /- #{mock_blob_id} \(#{size_msg} MiB\)/)
end
context 'when the enforce_global_file_size_limit feature flag is disabled' do
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index 60d3eb4bfb3..b5aafde006f 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -41,6 +41,36 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
expect { subject.validate! }.not_to raise_error
end
end
+
+ it "prohibits tag names that include characters incompatible with UTF-8" do
+ allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "Tag names must be valid when converted to UTF-8 encoding")
+ end
+
+ it "doesn't prohibit UTF-8 compatible characters" do
+ allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+
+ expect { subject.validate! }.not_to raise_error
+ end
+
+ context "when prohibited_tag_name_encoding_check feature flag is disabled" do
+ before do
+ stub_feature_flags(prohibited_tag_name_encoding_check: false)
+ end
+
+ it "doesn't prohibit tag names that include characters incompatible with UTF-8" do
+ allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+
+ expect { subject.validate! }.not_to raise_error
+ end
+
+ it "doesn't prohibit UTF-8 compatible characters" do
+ allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
end
context 'with protected tag' do
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
index 6047eb1b1e0..fae02e140f2 100644
--- a/spec/lib/gitlab/ci/build/context/build_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -4,7 +4,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_composition do
let(:pipeline) { create(:ci_pipeline) }
- let(:seed_attributes) { { 'name' => 'some-job' } }
+ let(:seed_attributes) do
+ {
+ name: 'some-job',
+ tag_list: %w[ruby docker postgresql],
+ needs_attributes: [{ name: 'setup-test-env', artifacts: true, optional: false }]
+ }
+ end
subject(:context) { described_class.new(pipeline, seed_attributes) }
@@ -23,7 +29,7 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
end
context 'when environment:name is provided' do
- let(:seed_attributes) { { 'name' => 'some-job', 'environment' => 'test' } }
+ let(:seed_attributes) { { name: 'some-job', environment: 'test' } }
it { is_expected.to include('CI_ENVIRONMENT_NAME' => 'test') }
end
@@ -35,6 +41,16 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
it_behaves_like 'variables collection'
+
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(reduced_build_attributes_list_for_rules: false)
+ end
+
+ it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
+
+ it_behaves_like 'variables collection'
+ end
end
describe '#variables_hash' do
@@ -43,5 +59,15 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
it { expect(context.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
it_behaves_like 'variables collection'
+
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(reduced_build_attributes_list_for_rules: false)
+ end
+
+ it { expect(context.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
+
+ it_behaves_like 'variables collection'
+ end
end
end
diff --git a/spec/lib/gitlab/ci/build/duration_parser_spec.rb b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
index bc905aa0a35..7f5ff1eb0ee 100644
--- a/spec/lib/gitlab/ci/build/duration_parser_spec.rb
+++ b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do
it { is_expected.to be_truthy }
it 'caches data' do
- expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original
- expect(ChronicDuration).to receive(:parse).with(other_value, use_complete_matcher: true).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(other_value).once.and_call_original
2.times do
expect(described_class.validate_duration(value)).to eq(86400)
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do
it { is_expected.to be_falsy }
it 'caches data' do
- expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original
2.times do
expect(described_class.validate_duration(value)).to be_falsey
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index 97843781891..0bdcfcfd546 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline_composition do
let_it_be(:user) { create(:user) }
- let(:path) { described_class.new(address: address, content_filename: 'template.yml') }
+ let(:path) { described_class.new(address: address) }
let(:settings) { GitlabSettings::Options.build({ 'component_fqdn' => current_host }) }
let(:current_host) { 'acme.com/' }
@@ -44,9 +44,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
context 'when the component is simple (single file template)' do
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_1')
+ expect(result.path).to eq('templates/secret-detection.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('templates/secret-detection.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -56,9 +57,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let(:address) { "acme.com/#{project_path}/dast@#{version}" }
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_2')
+ expect(result.path).to eq('templates/dast/template.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('templates/dast/template.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -67,7 +69,8 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let(:address) { "acme.com/#{project_path}/dast/another-folder@#{version}" }
it 'returns nil' do
- expect(path.fetch_content!(current_user: user)).to be_nil
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to be_nil
end
end
@@ -75,7 +78,8 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let(:address) { "acme.com/#{project_path}/dast/another-template@#{version}" }
it 'returns nil' do
- expect(path.fetch_content!(current_user: user)).to be_nil
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to be_nil
end
end
end
@@ -110,9 +114,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_2')
+ expect(result.path).to eq('templates/secret-detection.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('templates/secret-detection.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(latest_sha)
end
@@ -124,7 +129,6 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
it 'returns nil', :aggregate_failures do
expect(path.fetch_content!(current_user: user)).to be_nil
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to be_nil
expect(path.project).to eq(project)
expect(path.sha).to be_nil
end
@@ -135,9 +139,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let(:current_host) { 'acme.com/gitlab/' }
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_1')
+ expect(result.path).to eq('templates/secret-detection.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('templates/secret-detection.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -164,9 +169,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine')
+ expect(result.path).to eq('component/template.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('component/template.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -184,9 +190,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine')
+ expect(result.path).to eq('component/template.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('component/template.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -197,9 +204,10 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let(:current_host) { 'acme.com/gitlab/' }
it 'fetches the component content', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine')
+ expect(result.path).to eq('component/template.yml')
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to eq('component/template.yml')
expect(path.project).to eq(project)
expect(path.sha).to eq(project.commit('master').id)
end
@@ -211,7 +219,6 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
it 'returns nil', :aggregate_failures do
expect(path.fetch_content!(current_user: user)).to be_nil
expect(path.host).to eq(current_host)
- expect(path.project_file_path).to be_nil
expect(path.project).to eq(project)
expect(path.sha).to be_nil
end
diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
index 0f7b811b5df..88e272ac3fd 100644
--- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
@@ -99,7 +99,9 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
let(:response) do
ServiceResponse.success(payload: {
content: content,
- path: instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345')
+ path: 'templates/component.yml',
+ project: project,
+ sha: '12345'
})
end
@@ -132,7 +134,9 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
let(:response) do
ServiceResponse.success(payload: {
content: content,
- path: instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345')
+ path: 'templates/component.yml',
+ project: project,
+ sha: '12345'
})
end
@@ -158,15 +162,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
describe '#metadata' do
subject(:metadata) { external_resource.metadata }
- let(:component_path) do
- instance_double(::Gitlab::Ci::Components::InstancePath,
- project: project,
- sha: '12345',
- project_file_path: 'my-component/template.yml')
- end
-
let(:response) do
- ServiceResponse.success(payload: { path: component_path })
+ ServiceResponse.success(payload: { path: 'my-component/template.yml', project: project, sha: '12345' })
end
it 'returns the metadata' do
@@ -183,14 +180,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
end
describe '#expand_context' do
- let(:component_path) do
- instance_double(::Gitlab::Ci::Components::InstancePath,
- project: project,
- sha: '12345')
- end
-
let(:response) do
- ServiceResponse.success(payload: { path: component_path })
+ ServiceResponse.success(payload: { path: 'templates/component.yml', project: project, sha: '12345' })
end
subject { external_resource.send(:expand_context_attrs) }
@@ -207,11 +198,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
describe '#to_hash' do
context 'when interpolation is being used' do
let(:response) do
- ServiceResponse.success(payload: { content: content, path: path })
- end
-
- let(:path) do
- instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345')
+ ServiceResponse.success(payload: { content: content, path: 'templates/component.yml', project: project,
+ sha: '12345' })
end
let(:content) do
diff --git a/spec/lib/gitlab/ci/config/header/input_spec.rb b/spec/lib/gitlab/ci/config/header/input_spec.rb
index b5155dff6e8..5d1fa4a8e6e 100644
--- a/spec/lib/gitlab/ci/config/header/input_spec.rb
+++ b/spec/lib/gitlab/ci/config/header/input_spec.rb
@@ -46,6 +46,12 @@ RSpec.describe Gitlab::Ci::Config::Header::Input, feature_category: :pipeline_co
it_behaves_like 'a valid input'
end
+ context 'when has a description value' do
+ let(:input_hash) { { description: 'bar' } }
+
+ it_behaves_like 'a valid input'
+ end
+
context 'when is a required input' do
let(:input_hash) { nil }
@@ -62,6 +68,12 @@ RSpec.describe Gitlab::Ci::Config::Header::Input, feature_category: :pipeline_co
end
end
+ context 'when the input has RegEx validation' do
+ let(:input_hash) { { regex: '\w+' } }
+
+ it_behaves_like 'a valid input'
+ end
+
context 'when given an invalid type' do
let(:input_hash) { { type: 'datetime' } }
let(:expected_errors) { ['foo input type unknown value: datetime'] }
@@ -84,4 +96,11 @@ RSpec.describe Gitlab::Ci::Config::Header::Input, feature_category: :pipeline_co
it_behaves_like 'an invalid input'
end
+
+ context 'when RegEx validation value is not a string' do
+ let(:input_hash) { { regex: [] } }
+ let(:expected_errors) { ['foo input regex should be a string'] }
+
+ it_behaves_like 'an invalid input'
+ end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/context_spec.rb b/spec/lib/gitlab/ci/config/interpolation/context_spec.rb
index c90866c986a..56a572312eb 100644
--- a/spec/lib/gitlab/ci/config/interpolation/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/context_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Context, feature_category: :pi
end
end
+ describe '.new' do
+ it 'returns variables as a Variables::Collection object' do
+ expect(subject.variables.class).to eq(Gitlab::Ci::Variables::Collection)
+ end
+ end
+
describe '#to_h' do
it 'returns the context hash' do
expect(subject.to_h).to eq(ctx)
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb
index c193e88dbe2..a2b575afb6f 100644
--- a/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/functions/base_spec.rb
@@ -18,6 +18,6 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::Base, feature_categ
it 'defines an expected interface for child classes' do
expect { described_class.function_expression_pattern }.to raise_error(NotImplementedError)
expect { described_class.name }.to raise_error(NotImplementedError)
- expect { custom_function_klass.new('test').execute('input') }.to raise_error(NotImplementedError)
+ expect { custom_function_klass.new('test', nil).execute('input') }.to raise_error(NotImplementedError)
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions/expand_vars_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions/expand_vars_spec.rb
new file mode 100644
index 00000000000..2a627b435d3
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/functions/expand_vars_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::ExpandVars, feature_category: :pipeline_composition do
+ let(:variables) do
+ Gitlab::Ci::Variables::Collection.new([
+ { key: 'VAR1', value: 'value1', masked: false },
+ { key: 'VAR2', value: 'value2', masked: false },
+ { key: 'NESTED_VAR', value: '$MY_VAR', masked: false },
+ { key: 'MASKED_VAR', value: 'masked', masked: true }
+ ])
+ end
+
+ let(:function_expression) { 'expand_vars' }
+ let(:ctx) { Gitlab::Ci::Config::Interpolation::Context.new({}, variables: variables) }
+
+ subject(:function) { described_class.new(function_expression, ctx) }
+
+ describe '#execute' do
+ let(:input_value) { '$VAR1' }
+
+ subject(:execute) { function.execute(input_value) }
+
+ it 'expands the variable' do
+ expect(execute).to eq('value1')
+ expect(function).to be_valid
+ end
+
+ context 'when the variable contains another variable' do
+ let(:input_value) { '$NESTED_VAR' }
+
+ it 'does not expand the inner variable' do
+ expect(execute).to eq('$MY_VAR')
+ expect(function).to be_valid
+ end
+ end
+
+ context 'when the variable is masked' do
+ let(:input_value) { '$MASKED_VAR' }
+
+ it 'returns an error' do
+ expect(execute).to be_nil
+ expect(function).not_to be_valid
+ expect(function.errors).to contain_exactly(
+ 'error in `expand_vars` function: variable expansion error: masked variables cannot be expanded'
+ )
+ end
+ end
+
+ context 'when the variable is unknown' do
+ let(:input_value) { '$UNKNOWN_VAR' }
+
+ it 'does not expand the variable' do
+ expect(execute).to eq('$UNKNOWN_VAR')
+ expect(function).to be_valid
+ end
+ end
+
+ context 'when there are multiple variables' do
+ let(:input_value) { '${VAR1} $VAR2 %VAR1%' }
+
+ it 'expands the variables' do
+ expect(execute).to eq('value1 value2 value1')
+ expect(function).to be_valid
+ end
+ end
+
+ context 'when the input is not a string' do
+ let(:input_value) { 100 }
+
+ it 'returns an error' do
+ expect(execute).to be_nil
+ expect(function).not_to be_valid
+ expect(function.errors).to contain_exactly(
+ 'error in `expand_vars` function: invalid input type: expand_vars can only be used with string inputs'
+ )
+ end
+ end
+ end
+
+ describe '.matches?' do
+ it 'matches exactly the expand_vars function with no arguments' do
+ expect(described_class.matches?('expand_vars')).to be_truthy
+ expect(described_class.matches?('expand_vars()')).to be_falsey
+ expect(described_class.matches?('expand_vars(1)')).to be_falsey
+ expect(described_class.matches?('unknown')).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb
index c521eff9811..93e5d4ef48c 100644
--- a/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/functions/truncate_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::Truncate, feature_c
end
it 'truncates the given input' do
- function = described_class.new('truncate(1,2)')
+ function = described_class.new('truncate(1,2)', nil)
output = function.execute('test')
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Functions::Truncate, feature_c
context 'when given a non-string input' do
it 'returns an error' do
- function = described_class.new('truncate(1,2)')
+ function = described_class.new('truncate(1,2)', nil)
function.execute(100)
diff --git a/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb b/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb
index 881f092c440..9ac0ef05c61 100644
--- a/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/functions_stack_spec.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Interpolation::FunctionsStack, feature_category: :pipeline_composition do
let(:functions) { ['truncate(0,4)', 'truncate(1,2)'] }
let(:input_value) { 'test_input_value' }
- subject { described_class.new(functions).evaluate(input_value) }
+ subject { described_class.new(functions, nil).evaluate(input_value) }
it 'modifies the given input value according to the function expressions' do
expect(subject).to be_success
diff --git a/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb b/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb
index ea06f181fa4..b0618081207 100644
--- a/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/inputs_spec.rb
@@ -7,130 +7,303 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Inputs, feature_category: :pip
let(:specs) { { foo: { default: 'bar' } } }
let(:args) { {} }
- context 'when inputs are valid' do
- where(:specs, :args, :merged) do
- [
- [
- { foo: { default: 'bar' } }, {},
- { foo: 'bar' }
- ],
- [
- { foo: { default: 'bar' } }, { foo: 'test' },
- { foo: 'test' }
- ],
- [
- { foo: nil }, { foo: 'bar' },
- { foo: 'bar' }
- ],
- [
- { foo: { type: 'string' } }, { foo: 'bar' },
- { foo: 'bar' }
- ],
- [
- { foo: { type: 'string', default: 'bar' } }, { foo: 'test' },
- { foo: 'test' }
- ],
- [
- { foo: { type: 'string', default: 'bar' } }, {},
- { foo: 'bar' }
- ],
- [
- { foo: { default: 'bar' }, baz: nil }, { baz: 'test' },
- { foo: 'bar', baz: 'test' }
- ],
- [
- { number_input: { type: 'number' } },
- { number_input: 8 },
- { number_input: 8 }
- ],
- [
- { default_number_input: { default: 9, type: 'number' } },
- {},
- { default_number_input: 9 }
- ],
- [
- { true_input: { type: 'boolean' }, false_input: { type: 'boolean' } },
- { true_input: true, false_input: false },
- { true_input: true, false_input: false }
- ],
- [
- { default_boolean_input: { default: true, type: 'boolean' } },
- {},
- { default_boolean_input: true }
- ]
- ]
- end
-
- with_them do
- it 'contains the merged inputs' do
+ context 'when given unrecognized inputs' do
+ let(:specs) { { foo: nil } }
+ let(:args) { { foo: 'bar', test: 'bar' } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('unknown input arguments: test')
+ end
+ end
+
+ context 'when given unrecognized configuration keywords' do
+ let(:specs) { { foo: 123 } }
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ 'unknown input specification for `foo` (valid types: boolean, number, string)'
+ )
+ end
+ end
+
+ context 'when the inputs have multiple errors' do
+ let(:specs) { { foo: nil } }
+ let(:args) { { test: 'bar', gitlab: '1' } }
+
+ it 'reports all of them' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ 'unknown input arguments: test, gitlab',
+ '`foo` input: required value has not been provided'
+ )
+ end
+ end
+
+ describe 'required inputs' do
+ let(:specs) { { foo: nil } }
+
+ context 'when a value is given' do
+ let(:args) { { foo: 'bar' } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(foo: 'bar')
+ end
+ end
+
+ context 'when no value is given' do
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`foo` input: required value has not been provided')
+ end
+ end
+ end
+
+ describe 'inputs with a default value' do
+ let(:specs) { { foo: { default: 'bar' } } }
+
+ context 'when a value is given' do
+ let(:args) { { foo: 'test' } }
+
+ it 'uses the given value' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(foo: 'test')
+ end
+ end
+
+ context 'when no value is given' do
+ let(:args) { {} }
+
+ it 'uses the default value' do
expect(inputs).to be_valid
- expect(inputs.to_hash).to eq(merged)
+ expect(inputs.to_hash).to eq(foo: 'bar')
end
end
end
- context 'when inputs are invalid' do
- where(:specs, :args, :errors) do
- [
- [
- { foo: nil }, { foo: 'bar', test: 'bar' },
- ['unknown input arguments: test']
- ],
- [
- { foo: nil }, { test: 'bar', gitlab: '1' },
- ['unknown input arguments: test, gitlab', '`foo` input: required value has not been provided']
- ],
- [
- { foo: 123 }, {},
- ['unknown input specification for `foo` (valid types: boolean, number, string)']
- ],
- [
- { a: nil, foo: 123 }, { a: '123' },
- ['unknown input specification for `foo` (valid types: boolean, number, string)']
- ],
- [
- { foo: nil }, {},
- ['`foo` input: required value has not been provided']
- ],
- [
- { foo: { default: 123 } }, { foo: 'test' },
- ['`foo` input: default value is not a string']
- ],
- [
- { foo: { default: 'test' } }, { foo: 123 },
- ['`foo` input: provided value is not a string']
- ],
- [
- { foo: nil }, { foo: 123 },
- ['`foo` input: provided value is not a string']
- ],
- [
- { number_input: { type: 'number' } },
- { number_input: 'NaN' },
- ['`number_input` input: provided value is not a number']
- ],
- [
- { default_number_input: { default: 'NaN', type: 'number' } },
- {},
- ['`default_number_input` input: default value is not a number']
- ],
- [
- { boolean_input: { type: 'boolean' } },
- { boolean_input: 'string' },
- ['`boolean_input` input: provided value is not a boolean']
- ],
- [
- { default_boolean_input: { default: 'string', type: 'boolean' } },
- {},
- ['`default_boolean_input` input: default value is not a boolean']
- ]
- ]
- end
-
- with_them do
- it 'contains the merged inputs', :aggregate_failures do
+ describe 'inputs with type validation' do
+ describe 'string validation' do
+ let(:specs) { { a_input: nil, b_input: { default: 'test' }, c_input: { default: 123 } } }
+ let(:args) { { a_input: 123, b_input: 123, c_input: 'test' } }
+
+ it 'is the default type' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ '`a_input` input: provided value is not a string',
+ '`b_input` input: provided value is not a string',
+ '`c_input` input: default value is not a string'
+ )
+ end
+
+ context 'when the value is a string' do
+ let(:specs) { { foo: { type: 'string' } } }
+ let(:args) { { foo: 'bar' } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(foo: 'bar')
+ end
+ end
+
+ context 'when the default is a string' do
+ let(:specs) { { foo: { type: 'string', default: 'bar' } } }
+ let(:args) { {} }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(foo: 'bar')
+ end
+ end
+
+ context 'when the value is not a string' do
+ let(:specs) { { foo: { type: 'string' } } }
+ let(:args) { { foo: 123 } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`foo` input: provided value is not a string')
+ end
+ end
+
+ context 'when the default is not a string' do
+ let(:specs) { { foo: { default: 123, type: 'string' } } }
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`foo` input: default value is not a string')
+ end
+ end
+ end
+
+ describe 'number validation' do
+ let(:specs) { { integer: { type: 'number' }, float: { type: 'number' } } }
+
+ context 'when the value is a float or integer' do
+ let(:args) { { integer: 6, float: 6.6 } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(integer: 6, float: 6.6)
+ end
+ end
+
+ context 'when the default is a float or integer' do
+ let(:specs) { { integer: { default: 6, type: 'number' }, float: { default: 6.6, type: 'number' } } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(integer: 6, float: 6.6)
+ end
+ end
+
+ context 'when the value is not a number' do
+ let(:specs) { { number_input: { type: 'number' } } }
+ let(:args) { { number_input: 'NaN' } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`number_input` input: provided value is not a number')
+ end
+ end
+
+ context 'when the default is not a number' do
+ let(:specs) { { number_input: { default: 'NaN', type: 'number' } } }
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`number_input` input: default value is not a number')
+ end
+ end
+ end
+
+ describe 'boolean validation' do
+ context 'when the value is true or false' do
+ let(:specs) { { truthy: { type: 'boolean' }, falsey: { type: 'boolean' } } }
+ let(:args) { { truthy: true, falsey: false } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(truthy: true, falsey: false)
+ end
+ end
+
+ context 'when the default is true or false' do
+ let(:specs) { { truthy: { default: true, type: 'boolean' }, falsey: { default: false, type: 'boolean' } } }
+ let(:args) { {} }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(truthy: true, falsey: false)
+ end
+ end
+
+ context 'when the value is not a boolean' do
+ let(:specs) { { boolean_input: { type: 'boolean' } } }
+ let(:args) { { boolean_input: 'string' } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`boolean_input` input: provided value is not a boolean')
+ end
+ end
+
+ context 'when the default is not a boolean' do
+ let(:specs) { { boolean_input: { default: 'string', type: 'boolean' } } }
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly('`boolean_input` input: default value is not a boolean')
+ end
+ end
+ end
+
+ context 'when given an unknown type' do
+ let(:specs) { { unknown: { type: 'datetime' } } }
+ let(:args) { { unknown: '2023-10-31' } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ 'unknown input specification for `unknown` (valid types: boolean, number, string)'
+ )
+ end
+ end
+ end
+
+ describe 'inputs with RegEx validation' do
+ context 'when given a value that matches the pattern' do
+ let(:specs) { { test_input: { regex: '^input_value$' } } }
+ let(:args) { { test_input: 'input_value' } }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(test_input: 'input_value')
+ end
+ end
+
+ context 'when given a default that matches the pattern' do
+ let(:specs) { { test_input: { default: 'input_value', regex: '^input_value$' } } }
+ let(:args) { {} }
+
+ it 'is valid' do
+ expect(inputs).to be_valid
+ expect(inputs.to_hash).to eq(test_input: 'input_value')
+ end
+ end
+
+ context 'when given a value that does not match the pattern' do
+ let(:specs) { { test_input: { regex: '^input_value$' } } }
+ let(:args) { { test_input: 'input' } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ '`test_input` input: provided value does not match required RegEx pattern'
+ )
+ end
+ end
+
+ context 'when given a default that does not match the pattern' do
+ let(:specs) { { test_input: { default: 'input', regex: '^input_value$' } } }
+ let(:args) { {} }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ '`test_input` input: default value does not match required RegEx pattern'
+ )
+ end
+ end
+
+ context 'when used with any type other than `string`' do
+ let(:specs) { { test_input: { regex: '^input_value$', type: 'number' } } }
+ let(:args) { { test_input: 999 } }
+
+ it 'is invalid' do
+ expect(inputs).not_to be_valid
+ expect(inputs.errors).to contain_exactly(
+ '`test_input` input: RegEx validation can only be used with string inputs'
+ )
+ end
+ end
+
+ context 'when the pattern is unsafe' do
+ let(:specs) { { test_input: { regex: 'a++' } } }
+ let(:args) { { test_input: 'aaaaaaaaaaaaaaaaaaaaa' } }
+
+ it 'is invalid' do
expect(inputs).not_to be_valid
- expect(inputs.errors).to contain_exactly(*errors)
+ expect(inputs.errors).to contain_exactly(
+ '`test_input` input: invalid regular expression'
+ )
end
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
index 804164c933a..c924323837b 100644
--- a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Interpolator, feature_category
let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(config: [header, content]) }
- subject { described_class.new(result, arguments) }
+ subject { described_class.new(result, arguments, []) }
context 'when input data is valid' do
let(:header) do
diff --git a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
index 57a9a47d699..684da1df43b 100644
--- a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
@@ -58,4 +58,36 @@ RSpec.describe ::Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_c
end
end
end
+
+ describe '#load_uninterpolated_yaml' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ ---
+ test_job:
+ script:
+ - echo "$[[ inputs.test_input ]]"
+ YAML
+ end
+
+ subject(:result) { described_class.new(yaml).load_uninterpolated_yaml }
+
+ it 'returns the config' do
+ expected_content = { test_job: { script: ["echo \"$[[ inputs.test_input ]]\""] } }
+ expect(result).to be_valid
+ expect(result.content).to eq(expected_content)
+ end
+
+ context 'when there is a format error in the yaml' do
+ let(:yaml) { 'invalid: yaml: all the time' }
+
+ it 'returns an error' do
+ expect(result).not_to be_valid
+ expect(result.error).to include('mapping values are not allowed in this context')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/yaml/result_spec.rb b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
index a66c630dfc9..5e9dee02190 100644
--- a/spec/lib/gitlab/ci/config/yaml/result_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
@@ -3,12 +3,44 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_composition do
+ it 'raises an error when reading a header when there is none' do
+ result = described_class.new(config: { b: 2 })
+
+ expect { result.header }.to raise_error(ArgumentError)
+ end
+
+ it 'stores an error / exception when initialized with it' do
+ result = described_class.new(error: ArgumentError.new('abc'))
+
+ expect(result).not_to be_valid
+ expect(result.error).to be_a ArgumentError
+ end
+
it 'does not have a header when config is a single hash' do
result = described_class.new(config: { a: 1, b: 2 })
expect(result).not_to have_header
end
+ describe '#inputs' do
+ it 'returns the value of the spec inputs' do
+ result = described_class.new(config: [{ spec: { inputs: { website: nil } } }, { b: 2 }])
+
+ expect(result).to have_header
+ expect(result.inputs).to eq({ website: nil })
+ end
+ end
+
+ describe '#interpolated?' do
+ it 'defaults to false' do
+ expect(described_class.new).not_to be_interpolated
+ end
+
+ it 'returns the value passed to the initializer' do
+ expect(described_class.new(interpolated: true)).to be_interpolated
+ end
+ end
+
context 'when config is an array of hashes' do
context 'when first document matches the header schema' do
it 'has a header' do
@@ -38,27 +70,4 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_com
expect(result.content).to be_empty
end
end
-
- it 'raises an error when reading a header when there is none' do
- result = described_class.new(config: { b: 2 })
-
- expect { result.header }.to raise_error(ArgumentError)
- end
-
- it 'stores an error / exception when initialized with it' do
- result = described_class.new(error: ArgumentError.new('abc'))
-
- expect(result).not_to be_valid
- expect(result.error).to be_a ArgumentError
- end
-
- describe '#interpolated?' do
- it 'defaults to false' do
- expect(described_class.new).not_to be_interpolated
- end
-
- it 'returns the value passed to the initializer' do
- expect(described_class.new(interpolated: true)).to be_interpolated
- end
- end
end
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index 4196aad2db4..1637d084c42 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -7,8 +7,18 @@ RSpec.describe Gitlab::Ci::Lint, feature_category: :pipeline_composition do
let_it_be(:user) { create(:user) }
let(:sha) { nil }
+ let(:verify_project_sha) { nil }
let(:ref) { project.default_branch }
- let(:lint) { described_class.new(project: project, current_user: user, sha: sha) }
+ let(:kwargs) do
+ {
+ project: project,
+ current_user: user,
+ sha: sha,
+ verify_project_sha: verify_project_sha
+ }.compact
+ end
+
+ let(:lint) { described_class.new(**kwargs) }
describe '#validate' do
subject { lint.validate(content, dry_run: dry_run, ref: ref) }
@@ -252,6 +262,19 @@ RSpec.describe Gitlab::Ci::Lint, feature_category: :pipeline_composition do
subject
end
+ shared_examples 'when sha is not provided' do
+ it 'runs YamlProcessor with verify_project_sha: false' do
+ expect(Gitlab::Ci::YamlProcessor)
+ .to receive(:new)
+ .with(content, a_hash_including(verify_project_sha: false))
+ .and_call_original
+
+ subject
+ end
+ end
+
+ it_behaves_like 'when sha is not provided'
+
context 'when sha is provided' do
let(:sha) { project.commit.sha }
@@ -288,20 +311,16 @@ RSpec.describe Gitlab::Ci::Lint, feature_category: :pipeline_composition do
context 'when a project ref does not contain the sha' do
it 'returns an error' do
expect(subject).not_to be_valid
- expect(subject.errors).to include(/Could not validate configuration/)
+ expect(subject.errors).to include(
+ /configuration originates from an external project or a commit not associated with a Git reference/)
end
end
end
- end
- context 'when sha is not provided' do
- it 'runs YamlProcessor with verify_project_sha: false' do
- expect(Gitlab::Ci::YamlProcessor)
- .to receive(:new)
- .with(content, a_hash_including(verify_project_sha: false))
- .and_call_original
+ context 'when verify_project_sha is false' do
+ let(:verify_project_sha) { false }
- subject
+ it_behaves_like 'when sha is not provided'
end
end
end
@@ -468,7 +487,7 @@ RSpec.describe Gitlab::Ci::Lint, feature_category: :pipeline_composition do
end
context 'when project is not provided' do
- let(:project) { nil }
+ let(:lint) { described_class.new(project: nil, **kwargs) }
let(:project_nil_loggable_data) do
expected_data.except('project_id')
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 9470d59f502..648b8ac2db9 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -370,6 +370,14 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
end
end
+ describe 'setting CVSS' do
+ let(:cvss_vectors) { report.findings.filter_map(&:cvss).reject(&:empty?) }
+
+ it 'ingests the provided CVSS vectors' do
+ expect(cvss_vectors.count).to eq(1)
+ end
+ end
+
describe 'setting the uuid' do
let(:finding_uuids) { report.findings.map(&:uuid) }
let(:uuid_1) do
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index 821a5057d2e..1bab27c877d 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -111,7 +111,21 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
it_behaves_like '<testcase> XML parser',
::Gitlab::Ci::Reports::TestCase::STATUS_FAILED,
- 'Some failure'
+ "System Err:\n\nSome failure"
+ end
+
+ context 'and has failure with message, system-out and system-err' do
+ let(:testcase_content) do
+ <<-EOF.strip_heredoc
+ <failure>Some failure</failure>
+ <system-out>This is the system output</system-out>
+ <system-err>This is the system err</system-err>
+ EOF
+ end
+
+ it_behaves_like '<testcase> XML parser',
+ ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED,
+ "Some failure\n\nSystem Out:\n\nThis is the system output\n\nSystem Err:\n\nThis is the system err"
end
context 'and has error' do
@@ -132,7 +146,21 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
it_behaves_like '<testcase> XML parser',
::Gitlab::Ci::Reports::TestCase::STATUS_ERROR,
- 'Some error'
+ "System Err:\n\nSome error"
+ end
+
+ context 'and has error with message, system-out and system-err' do
+ let(:testcase_content) do
+ <<-EOF.strip_heredoc
+ <error>Some error</error>
+ <system-out>This is the system output</system-out>
+ <system-err>This is the system err</system-err>
+ EOF
+ end
+
+ it_behaves_like '<testcase> XML parser',
+ ::Gitlab::Ci::Reports::TestCase::STATUS_ERROR,
+ "Some error\n\nSystem Out:\n\nThis is the system output\n\nSystem Err:\n\nThis is the system err"
end
context 'and has skipped' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
index c3516c467d4..2a26747f65a 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities, feature_categor
it 'adds an error about imports' do
expect(pipeline.errors.to_a)
- .to include /Import in progress/
+ .to include /before project import is complete/
end
it 'breaks the pipeline builder chain' do
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern/regular_expression_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern/regular_expression_spec.rb
new file mode 100644
index 00000000000..145777a9476
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern/regular_expression_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern::RegularExpression, feature_category: :continuous_integration do
+ describe '#initialize' do
+ it 'initializes the pattern' do
+ pattern = described_class.new('/foo/')
+
+ expect(pattern.value).to eq('/foo/')
+ end
+ end
+
+ describe '#valid?' do
+ subject { described_class.new(pattern).valid? }
+
+ context 'with valid expressions' do
+ let(:pattern) { '/foo\\/bar/' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the value is not a valid regular expression' do
+ let(:pattern) { 'foo' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#expression' do
+ subject { described_class.new(pattern).expression }
+
+ context 'with valid expressions' do
+ let(:pattern) { '/bar/' }
+
+ it { is_expected.to eq Gitlab::UntrustedRegexp.new('bar') }
+ end
+
+ context 'when the value is not a valid regular expression' do
+ let(:pattern) { 'foo' }
+
+ it { expect { subject }.to raise_error(RegexpError) }
+ end
+
+ context 'when the request store is activated', :request_store do
+ let(:pattern) { '/foo\\/bar/' }
+
+ it 'fabricates once' do
+ expect(Gitlab::UntrustedRegexp::RubySyntax).to receive(:fabricate!).once.and_call_original
+
+ 2.times do
+ expect(described_class.new(pattern).expression).to be_a(Gitlab::UntrustedRegexp)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
index be205395b69..09899cb9fc4 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern do
+RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern, feature_category: :continuous_integration do
describe '#initialize' do
context 'when the value is a valid regular expression' do
it 'initializes the pattern' do
@@ -164,14 +164,5 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern do
expect(regexp.evaluate).to eq Gitlab::UntrustedRegexp.new('abc')
end
-
- it 'raises error if evaluated regexp is not valid' do
- allow(Gitlab::UntrustedRegexp::RubySyntax).to receive(:valid?).and_return(true)
-
- regexp = described_class.new('/invalid ( .*/')
-
- expect { regexp.evaluate }
- .to raise_error(Gitlab::Ci::Pipeline::Expression::RuntimeError)
- end
end
end
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
index 040c3ec7f6e..ca1b00e2f5b 100644
--- a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|created')
+ expect(status.text).to eq s_('CiStatusText|Created')
expect(status.icon).to eq 'status_created'
expect(status.favicon).to eq 'favicon_status_created'
expect(status.label).to eq 'created'
@@ -49,11 +49,11 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.text).to eq s_('CiStatusText|Failed')
expect(status.icon).to eq 'status_failed'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed'
- expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)"
+ expect(status.status_tooltip).to eq "#{s_('CiStatusLabel|failed')} - (unknown failure)"
expect(status).not_to have_details
expect(status).to have_action
end
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
it 'fabricates correct status_tooltip' do
expect(status.status_tooltip).to eq(
- "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created, Pipeline will not run for the selected trigger. " \
+ "#{s_('CiStatusLabel|failed')} - (downstream pipeline can not be created, Pipeline will not run for the selected trigger. " \
"The rules configuration prevented any jobs from being added to the pipeline., other error)"
)
end
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|manual')
+ expect(status.text).to eq s_('CiStatusText|Manual')
expect(status.group).to eq 'manual'
expect(status.icon).to eq 'status_manual'
expect(status.favicon).to eq 'favicon_status_manual'
@@ -128,7 +128,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
it 'fabricates status with correct details' do
- expect(status.text).to eq 'waiting'
+ expect(status.text).to eq 'Waiting'
expect(status.group).to eq 'waiting-for-resource'
expect(status.icon).to eq 'status_pending'
expect(status.favicon).to eq 'favicon_status_pending'
@@ -154,7 +154,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|passed')
+ expect(status.text).to eq s_('CiStatusText|Passed')
expect(status.icon).to eq 'status_success'
expect(status.favicon).to eq 'favicon_status_success'
expect(status).to have_action
diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb
index f71f3d47452..1d043966321 100644
--- a/spec/lib/gitlab/ci/status/build/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|passed')
+ expect(status.text).to eq s_('CiStatusText|Passed')
expect(status.icon).to eq 'status_success'
expect(status.favicon).to eq 'favicon_status_success'
expect(status.label).to eq s_('CiStatusLabel|passed')
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|passed')
+ expect(status.text).to eq s_('CiStatusText|Passed')
expect(status.icon).to eq 'status_success'
expect(status.favicon).to eq 'favicon_status_success'
expect(status.label).to eq s_('CiStatusLabel|passed')
@@ -86,11 +86,11 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.text).to eq s_('CiStatusText|Failed')
expect(status.icon).to eq 'status_failed'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq s_('CiStatusLabel|failed')
- expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)"
+ expect(status.status_tooltip).to eq "#{s_('CiStatusLabel|failed')} - (unknown failure)"
expect(status).to have_details
expect(status).to have_action
end
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.text).to eq s_('CiStatusText|Failed')
expect(status.icon).to eq 'status_warning'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed (allowed to fail)'
@@ -144,7 +144,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.text).to eq s_('CiStatusText|Failed')
expect(status.icon).to eq 'status_failed'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq s_('CiStatusLabel|failed')
@@ -173,7 +173,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|canceled')
+ expect(status.text).to eq s_('CiStatusText|Canceled')
expect(status.icon).to eq 'status_canceled'
expect(status.favicon).to eq 'favicon_status_canceled'
expect(status.illustration).to include(:image, :size, :title)
@@ -200,10 +200,10 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatus|running')
+ expect(status.text).to eq s_('CiStatusText|Running')
expect(status.icon).to eq 'status_running'
expect(status.favicon).to eq 'favicon_status_running'
- expect(status.label).to eq s_('CiStatus|running')
+ expect(status.label).to eq s_('CiStatusLabel|running')
expect(status).to have_details
expect(status).to have_action
end
@@ -226,7 +226,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|pending')
+ expect(status.text).to eq s_('CiStatusText|Pending')
expect(status.icon).to eq 'status_pending'
expect(status.favicon).to eq 'favicon_status_pending'
expect(status.illustration).to include(:image, :size, :title, :content)
@@ -252,7 +252,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|skipped')
+ expect(status.text).to eq s_('CiStatusText|Skipped')
expect(status.icon).to eq 'status_skipped'
expect(status.favicon).to eq 'favicon_status_skipped'
expect(status.illustration).to include(:image, :size, :title)
@@ -282,7 +282,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|manual')
+ expect(status.text).to eq s_('CiStatusText|Manual')
expect(status.group).to eq 'manual'
expect(status.icon).to eq 'status_manual'
expect(status.favicon).to eq 'favicon_status_manual'
@@ -339,7 +339,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|manual')
+ expect(status.text).to eq s_('CiStatusText|Manual')
expect(status.group).to eq 'manual'
expect(status.icon).to eq 'status_manual'
expect(status.favicon).to eq 'favicon_status_manual'
@@ -370,7 +370,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|scheduled')
+ expect(status.text).to eq s_('CiStatusText|Scheduled')
expect(status.group).to eq 'scheduled'
expect(status.icon).to eq 'status_scheduled'
expect(status.favicon).to eq 'favicon_status_scheduled'
diff --git a/spec/lib/gitlab/ci/status/canceled_spec.rb b/spec/lib/gitlab/ci/status/canceled_spec.rb
index 7fae76f61ea..ddb8b7ecff9 100644
--- a/spec/lib/gitlab/ci/status/canceled_spec.rb
+++ b/spec/lib/gitlab/ci/status/canceled_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Canceled do
end
describe '#text' do
- it { expect(subject.text).to eq 'canceled' }
+ it { expect(subject.text).to eq 'Canceled' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/created_spec.rb b/spec/lib/gitlab/ci/status/created_spec.rb
index 1e54d1ed8c5..19fecbb33b9 100644
--- a/spec/lib/gitlab/ci/status/created_spec.rb
+++ b/spec/lib/gitlab/ci/status/created_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Created do
end
describe '#text' do
- it { expect(subject.text).to eq 'created' }
+ it { expect(subject.text).to eq 'Created' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/factory_spec.rb b/spec/lib/gitlab/ci/status/factory_spec.rb
index 94a6255f1e2..277b440a21d 100644
--- a/spec/lib/gitlab/ci/status/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/factory_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Ci::Status::Factory do
end
it 'delegates to core status' do
- expect(fabricated_status.text).to eq 'passed'
+ expect(fabricated_status.text).to eq 'Passed'
end
it 'latest matches status becomes a status name' do
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::Ci::Status::Factory do
end
it 'delegates to core status' do
- expect(fabricated_status.text).to eq 'passed'
+ expect(fabricated_status.text).to eq 'Passed'
end
it 'matches correct core status' do
diff --git a/spec/lib/gitlab/ci/status/failed_spec.rb b/spec/lib/gitlab/ci/status/failed_spec.rb
index f3f3304b04d..48df3e99855 100644
--- a/spec/lib/gitlab/ci/status/failed_spec.rb
+++ b/spec/lib/gitlab/ci/status/failed_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Failed do
end
describe '#text' do
- it { expect(subject.text).to eq 'failed' }
+ it { expect(subject.text).to eq 'Failed' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/manual_spec.rb b/spec/lib/gitlab/ci/status/manual_spec.rb
index a9203438898..6e02772f670 100644
--- a/spec/lib/gitlab/ci/status/manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/manual_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Manual do
end
describe '#text' do
- it { expect(subject.text).to eq 'manual' }
+ it { expect(subject.text).to eq 'Manual' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/pending_spec.rb b/spec/lib/gitlab/ci/status/pending_spec.rb
index 1c062a0133d..82ea987e4c9 100644
--- a/spec/lib/gitlab/ci/status/pending_spec.rb
+++ b/spec/lib/gitlab/ci/status/pending_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Pending do
end
describe '#text' do
- it { expect(subject.text).to eq 'pending' }
+ it { expect(subject.text).to eq 'Pending' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/pipeline/blocked_spec.rb b/spec/lib/gitlab/ci/status/pipeline/blocked_spec.rb
index 8fd974972e4..8948d83f9cb 100644
--- a/spec/lib/gitlab/ci/status/pipeline/blocked_spec.rb
+++ b/spec/lib/gitlab/ci/status/pipeline/blocked_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Ci::Status::Pipeline::Blocked do
describe '#text' do
it 'overrides status text' do
- expect(subject.text).to eq 'blocked'
+ expect(subject.text).to eq 'Blocked'
end
end
diff --git a/spec/lib/gitlab/ci/status/pipeline/delayed_spec.rb b/spec/lib/gitlab/ci/status/pipeline/delayed_spec.rb
index 1302c2069ff..072ea642e70 100644
--- a/spec/lib/gitlab/ci/status/pipeline/delayed_spec.rb
+++ b/spec/lib/gitlab/ci/status/pipeline/delayed_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Ci::Status::Pipeline::Delayed do
describe '#text' do
it 'overrides status text' do
- expect(subject.text).to eq 'delayed'
+ expect(subject.text).to eq 'Delayed'
end
end
diff --git a/spec/lib/gitlab/ci/status/preparing_spec.rb b/spec/lib/gitlab/ci/status/preparing_spec.rb
index ec1850c1959..f9033bce5f2 100644
--- a/spec/lib/gitlab/ci/status/preparing_spec.rb
+++ b/spec/lib/gitlab/ci/status/preparing_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Preparing do
end
describe '#text' do
- it { expect(subject.text).to eq 'preparing' }
+ it { expect(subject.text).to eq 'Preparing' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/running_spec.rb b/spec/lib/gitlab/ci/status/running_spec.rb
index e40d696ee4d..aefc7e90e85 100644
--- a/spec/lib/gitlab/ci/status/running_spec.rb
+++ b/spec/lib/gitlab/ci/status/running_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Running do
end
describe '#text' do
- it { expect(subject.text).to eq 'running' }
+ it { expect(subject.text).to eq 'Running' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/scheduled_spec.rb b/spec/lib/gitlab/ci/status/scheduled_spec.rb
index df72455d3c1..1a8e48052ec 100644
--- a/spec/lib/gitlab/ci/status/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/scheduled_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Scheduled, feature_category: :continuous_inte
end
describe '#text' do
- it { expect(subject.text).to eq 'scheduled' }
+ it { expect(subject.text).to eq 'Scheduled' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/skipped_spec.rb b/spec/lib/gitlab/ci/status/skipped_spec.rb
index ac3c2f253f7..da674df2090 100644
--- a/spec/lib/gitlab/ci/status/skipped_spec.rb
+++ b/spec/lib/gitlab/ci/status/skipped_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Skipped do
end
describe '#text' do
- it { expect(subject.text).to eq 'skipped' }
+ it { expect(subject.text).to eq 'Skipped' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/success_spec.rb b/spec/lib/gitlab/ci/status/success_spec.rb
index f2069334abd..c6567684ac0 100644
--- a/spec/lib/gitlab/ci/status/success_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::Success do
end
describe '#text' do
- it { expect(subject.text).to eq 'passed' }
+ it { expect(subject.text).to eq 'Passed' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/status/success_warning_spec.rb b/spec/lib/gitlab/ci/status/success_warning_spec.rb
index 1725f90a0cf..4a669da358e 100644
--- a/spec/lib/gitlab/ci/status/success_warning_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_warning_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Gitlab::Ci::Status::SuccessWarning, feature_category: :continuous
described_class.new(status)
end
- describe '#test' do
- it { expect(subject.text).to eq 'warning' }
+ describe '#text' do
+ it { expect(subject.text).to eq 'Warning' }
end
describe '#label' do
@@ -25,6 +25,10 @@ RSpec.describe Gitlab::Ci::Status::SuccessWarning, feature_category: :continuous
it { expect(subject.group).to eq 'success-with-warnings' }
end
+ describe '#name' do
+ it { expect(subject.name).to eq 'SUCCESS_WITH_WARNINGS' }
+ end
+
describe '.matches?' do
let(:matchable) { double('matchable') }
diff --git a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
index 6f5ab77a358..bd9663fb80f 100644
--- a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
+++ b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Status::WaitingForResource do
end
describe '#text' do
- it { expect(subject.text).to eq 'waiting' }
+ it { expect(subject.text).to eq 'Waiting' }
end
describe '#label' do
@@ -27,6 +27,10 @@ RSpec.describe Gitlab::Ci::Status::WaitingForResource do
it { expect(subject.group).to eq 'waiting-for-resource' }
end
+ describe '#name' do
+ it { expect(subject.name).to eq 'WAITING_FOR_RESOURCE' }
+ end
+
describe '#details_path' do
it { expect(subject.details_path).to be_nil }
end
diff --git a/spec/lib/gitlab/ci/variables/builder/group_spec.rb b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
index c3743ebd2d7..004e63f424f 100644
--- a/spec/lib/gitlab/ci/variables/builder/group_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Variables::Builder::Group do
+RSpec.describe Gitlab::Ci::Variables::Builder::Group, feature_category: :secrets_management do
let_it_be(:group) { create(:group) }
let(:builder) { described_class.new(group) }
@@ -185,21 +185,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Group do
end
end
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples 'correct ancestor order'
- end
-
- context 'linear' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- include_examples 'correct ancestor order'
- end
+ include_examples 'correct ancestor order'
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
index f7c6f7f51df..d96c8f1bd0c 100644
--- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Variables::Collection::Item do
+RSpec.describe Gitlab::Ci::Variables::Collection::Item, feature_category: :secrets_management do
let(:variable_key) { 'VAR' }
let(:variable_value) { 'something' }
let(:expected_value) { variable_value }
@@ -217,6 +217,25 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
end
end
+ describe '#masked?' do
+ let(:variable_hash) { { key: variable_key, value: variable_value } }
+ let(:item) { described_class.new(**variable_hash) }
+
+ context 'when :masked is not specified' do
+ it 'returns false' do
+ expect(item.masked?).to eq(false)
+ end
+ end
+
+ context 'when :masked is specified as true' do
+ let(:variable_hash) { { key: variable_key, value: variable_value, masked: true } }
+
+ it 'returns true' do
+ expect(item.masked?).to eq(true)
+ end
+ end
+ end
+
describe '#to_runner_variable' do
context 'when variable is not a file-related' do
it 'returns a runner-compatible hash representation' do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5cfd8d9b9fb..81bc8c7ab9a 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -794,28 +794,6 @@ module Gitlab
it_behaves_like 'returns errors', 'test_job_1 has the following needs duplicated: test_job_2.'
end
-
- context 'when needed job name is too long' do
- let(:job_name) { 'a' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH + 1) }
-
- let(:config) do
- <<-EOYML
- lint_job:
- script: 'echo lint_job'
- rules:
- - if: $var == null
- needs: [#{job_name}]
- #{job_name}:
- script: 'echo job'
- EOYML
- end
-
- it 'returns an error' do
- expect(subject.errors).to include(
- "lint_job job: need `#{job_name}` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)"
- )
- end
- end
end
context 'rule needs as hash' do
@@ -3659,7 +3637,8 @@ module Gitlab
context 'when a project ref does not contain the forked commit sha' do
it 'returns an error' do
is_expected.not_to be_valid
- expect(subject.errors).to include(/Could not validate configuration/)
+ expect(subject.errors).to include(
+ /configuration originates from an external project or a commit not associated with a Git reference/)
end
it_behaves_like 'when the processor is executed twice consecutively'
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 3682a654181..9e2f3bda14c 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -577,17 +577,6 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s
end
end
- context 'when browsersdk_tracking is disabled' do
- before do
- stub_feature_flags(browsersdk_tracking: false)
- stub_env('GITLAB_ANALYTICS_URL', analytics_url)
- end
-
- it 'does not add GITLAB_ANALYTICS_URL to connect-src' do
- expect(connect_src).not_to include(analytics_url)
- end
- end
-
context 'when GITLAB_ANALYTICS_URL is not set' do
before do
stub_env('GITLAB_ANALYTICS_URL', nil)
diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb
index 6e63ae56557..271500ed3f6 100644
--- a/spec/lib/gitlab/database/click_house_client_spec.rb
+++ b/spec/lib/gitlab/database/click_house_client_spec.rb
@@ -112,6 +112,28 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
results = ClickHouse::Client.select(select_query, :main)
expect(results).to be_empty
+
+ # Async, lazy deletion
+ # Set the `deleted` field to 1 and update the `updated_at` timestamp.
+ # Based on the highest version of the given row (updated_at), CH will eventually remove the row.
+ # See: https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replacingmergetree#is_deleted
+ soft_delete_query = ClickHouse::Client::Query.new(
+ raw_query: %{
+ INSERT INTO events (id, deleted, updated_at)
+ VALUES ({id:UInt64}, 1, {updated_at:DateTime64(6, 'UTC')})
+ },
+ placeholders: { id: event2.id, updated_at: (event2.updated_at + 2.hours).utc.to_f }
+ )
+
+ ClickHouse::Client.execute(soft_delete_query, :main)
+
+ select_query = ClickHouse::Client::Query.new(
+ raw_query: 'SELECT * FROM events FINAL WHERE id = {id:UInt64}',
+ placeholders: { id: event2.id }
+ )
+
+ results = ClickHouse::Client.select(select_query, :main)
+ expect(results).to be_empty
end
end
end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index e402014df90..a6de695c345 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -226,57 +226,83 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
allow_cross_joins: %i[gitlab_shared],
allow_cross_transactions: %i[gitlab_internal gitlab_shared],
allow_cross_foreign_keys: %i[]
+ ),
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_main_cell",
+ allow_cross_joins: [
+ :gitlab_shared,
+ :gitlab_main,
+ { gitlab_main_clusterwide: { specific_tables: %w[plans] } }
+ ],
+ allow_cross_transactions: [
+ :gitlab_internal,
+ :gitlab_shared,
+ :gitlab_main,
+ { gitlab_main_clusterwide: { specific_tables: %w[plans] } }
+ ],
+ allow_cross_foreign_keys: [
+ { gitlab_main_clusterwide: { specific_tables: %w[plans] } }
+ ]
)
].index_by(&:name)
)
end
describe '.cross_joins_allowed?' do
- where(:schemas, :result) do
- %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | true
- %i[gitlab_main_clusterwide gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_internal] | false
- %i[gitlab_main gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
- %i[gitlab_main_clusterwide gitlab_shared] | true
+ where(:schemas, :tables, :result) do
+ %i[] | %i[] | true
+ %i[gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
+ %i[gitlab_main gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
end
with_them do
- it { expect(described_class.cross_joins_allowed?(schemas)).to eq(result) }
+ it { expect(described_class.cross_joins_allowed?(schemas, tables)).to eq(result) }
end
end
describe '.cross_transactions_allowed?' do
- where(:schemas, :result) do
- %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | true
- %i[gitlab_main_clusterwide gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_internal] | true
- %i[gitlab_main gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
- %i[gitlab_main_clusterwide gitlab_shared] | true
+ where(:schemas, :tables, :result) do
+ %i[] | %i[] | true
+ %i[gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %i[] | true
+ %i[gitlab_main gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
end
with_them do
- it { expect(described_class.cross_transactions_allowed?(schemas)).to eq(result) }
+ it { expect(described_class.cross_transactions_allowed?(schemas, tables)).to eq(result) }
end
end
describe '.cross_foreign_key_allowed?' do
- where(:schemas, :result) do
- %i[] | false
- %i[gitlab_main_clusterwide gitlab_main] | true
- %i[gitlab_main_clusterwide gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_internal] | false
- %i[gitlab_main gitlab_ci] | false
- %i[gitlab_main_clusterwide gitlab_shared] | false
+ where(:schemas, :tables, :result) do
+ %i[] | %i[] | false
+ %i[gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
+ %i[gitlab_main gitlab_ci] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_shared] | %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
end
with_them do
- it { expect(described_class.cross_foreign_key_allowed?(schemas)).to eq(result) }
+ it { expect(described_class.cross_foreign_key_allowed?(schemas, tables)).to eq(result) }
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index 7197b99fe33..442fa678d4e 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -194,7 +194,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
describe '#replace_hosts' do
before do
- stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'true')
allow(service)
.to receive(:load_balancer)
.and_return(load_balancer)
@@ -257,26 +256,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
service.replace_hosts([address_foo, address_bar])
end
end
-
- context 'when LOAD_BALANCER_PARALLEL_DISCONNECT is false' do
- before do
- stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'false')
- end
-
- it 'disconnects them sequentially' do
- host = load_balancer.host_list.hosts.first
-
- allow(service)
- .to receive(:disconnect_timeout)
- .and_return(2)
-
- expect(host)
- .to receive(:disconnect!)
- .with(timeout: 2)
-
- service.replace_hosts([address_bar])
- end
- end
end
describe '#addresses_from_dns' do
diff --git a/spec/lib/gitlab/database/migration_helpers/swapping_spec.rb b/spec/lib/gitlab/database/migration_helpers/swapping_spec.rb
new file mode 100644
index 00000000000..0940c6f4c30
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/swapping_spec.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::Swapping, feature_category: :database do
+ let(:connection) { ApplicationRecord.connection }
+ let(:migration_context) do
+ ActiveRecord::Migration
+ .new
+ .extend(described_class)
+ .extend(Gitlab::Database::MigrationHelpers)
+ end
+
+ let(:service_instance) { instance_double('Gitlab::Database::Migrations::SwapColumns', execute: nil) }
+
+ describe '#reset_trigger_function' do
+ let(:trigger_function_name) { 'existing_trigger_function' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE FUNCTION #{trigger_function_name}() RETURNS trigger
+ LANGUAGE plpgsql
+ AS $$
+ BEGIN
+ NEW."bigint_column" := NEW."integer_column";
+ RETURN NEW;
+ END;
+ $$;
+ SQL
+ end
+
+ it 'resets' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration_context.reset_trigger_function(trigger_function_name)
+ end
+ expect(recorder.log).to include(/ALTER FUNCTION "existing_trigger_function" RESET ALL/)
+ end
+ end
+
+ describe '#swap_columns' do
+ let(:table) { :ci_pipeline_variables }
+ let(:column1) { :pipeline_id }
+ let(:column2) { :pipeline_id_convert_to_bigint }
+
+ it 'calls service' do
+ expect(::Gitlab::Database::Migrations::SwapColumns).to receive(:new).with(
+ migration_context: migration_context,
+ table: table,
+ column1: column1,
+ column2: column2
+ ).and_return(service_instance)
+
+ migration_context.swap_columns(table, column1, column2)
+ end
+ end
+
+ describe '#swap_columns_default' do
+ let(:table) { :_test_table }
+ let(:column1) { :pipeline_id }
+ let(:column2) { :pipeline_id_convert_to_bigint }
+
+ it 'calls service' do
+ expect(::Gitlab::Database::Migrations::SwapColumnsDefault).to receive(:new).with(
+ migration_context: migration_context,
+ table: table,
+ column1: column1,
+ column2: column2
+ ).and_return(service_instance)
+
+ migration_context.swap_columns_default(table, column1, column2)
+ end
+ end
+
+ describe '#swap_foreign_keys' do
+ let(:table) { :_test_swap_foreign_keys }
+ let(:referenced_table) { "#{table}_referenced" }
+ let(:foreign_key1) { :fkey_on_integer_column }
+ let(:foreign_key2) { :fkey_on_bigint_column }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table} (
+ integer_column integer NOT NULL,
+ bigint_column bigint DEFAULT 0 NOT NULL
+ );
+ CREATE TABLE #{referenced_table} (
+ id bigint NOT NULL
+ );
+
+ ALTER TABLE ONLY #{referenced_table}
+ ADD CONSTRAINT pk PRIMARY KEY (id);
+
+ ALTER TABLE ONLY #{table}
+ ADD CONSTRAINT #{foreign_key1}
+ FOREIGN KEY (integer_column) REFERENCES #{referenced_table}(id) ON DELETE SET NULL;
+
+ ALTER TABLE ONLY #{table}
+ ADD CONSTRAINT #{foreign_key2}
+ FOREIGN KEY (bigint_column) REFERENCES #{referenced_table}(id) ON DELETE SET NULL;
+ SQL
+ end
+
+ shared_examples_for 'swapping foreign keys correctly' do
+ specify do
+ expect { migration_context.swap_foreign_keys(table, foreign_key1, foreign_key2) }
+ .to change {
+ find_foreign_key_by(foreign_key1).options[:column]
+ }.from('integer_column').to('bigint_column')
+ .and change {
+ find_foreign_key_by(foreign_key2).options[:column]
+ }.from('bigint_column').to('integer_column')
+ end
+ end
+
+ it_behaves_like 'swapping foreign keys correctly'
+
+ context 'when foreign key names are 63 bytes' do
+ let(:foreign_key1) { :f1_012345678901234567890123456789012345678901234567890123456789 }
+ let(:foreign_key2) { :f2_012345678901234567890123456789012345678901234567890123456789 }
+
+ it_behaves_like 'swapping foreign keys correctly'
+ end
+
+ private
+
+ def find_foreign_key_by(name)
+ connection.foreign_keys(table).find { |k| k.options[:name].to_s == name.to_s }
+ end
+ end
+
+ describe '#swap_indexes' do
+ let(:table) { :_test_swap_indexes }
+ let(:index1) { :index_on_integer }
+ let(:index2) { :index_on_bigint }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table} (
+ integer_column integer NOT NULL,
+ bigint_column bigint DEFAULT 0 NOT NULL
+ );
+
+ CREATE INDEX #{index1} ON #{table} USING btree (integer_column);
+
+ CREATE INDEX #{index2} ON #{table} USING btree (bigint_column);
+ SQL
+ end
+
+ shared_examples_for 'swapping indexes correctly' do
+ specify do
+ expect { migration_context.swap_indexes(table, index1, index2) }
+ .to change { find_index_by(index1).columns }.from(['integer_column']).to(['bigint_column'])
+ .and change { find_index_by(index2).columns }.from(['bigint_column']).to(['integer_column'])
+ end
+ end
+
+ it_behaves_like 'swapping indexes correctly'
+
+ context 'when index names are 63 bytes' do
+ let(:index1) { :i1_012345678901234567890123456789012345678901234567890123456789 }
+ let(:index2) { :i2_012345678901234567890123456789012345678901234567890123456789 }
+
+ it_behaves_like 'swapping indexes correctly'
+ end
+
+ private
+
+ def find_index_by(name)
+ connection.indexes(table).find { |c| c.name == name.to_s }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index f3c181db3aa..dd51cca688c 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1774,6 +1774,35 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
end
describe '#copy_indexes' do
+ context 'when index name is too long' do
+ it 'does not fail' do
+ index = double(:index,
+ columns: %w(uuid),
+ name: 'index_vuln_findings_on_uuid_including_vuln_id_1',
+ using: nil,
+ where: nil,
+ opclasses: {},
+ unique: true,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:vulnerability_occurrences, 'uuid')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:vulnerability_occurrences,
+ %w(tmp_undo_cleanup_column_8cbf300838),
+ {
+ unique: true,
+ name: 'idx_copy_191a1af1a0',
+ length: [],
+ order: []
+ })
+
+ model.copy_indexes(:vulnerability_occurrences, :uuid, :tmp_undo_cleanup_column_8cbf300838)
+ end
+ end
+
context 'using a regular index using a single column' do
it 'copies the index' do
index = double(:index,
@@ -2326,6 +2355,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
end
describe '#revert_initialize_conversion_of_integer_to_bigint' do
+ let(:setup_table) { true }
let(:table) { :_test_table }
before do
@@ -2334,7 +2364,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
t.integer :other_id
end
- model.initialize_conversion_of_integer_to_bigint(table, columns)
+ model.initialize_conversion_of_integer_to_bigint(table, columns) if setup_table
+ end
+
+ context 'when column and trigger do not exist' do
+ let(:setup_table) { false }
+ let(:columns) { :id }
+
+ it 'does not raise an error' do
+ expect do
+ model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
+ end.not_to raise_error
+ end
end
context 'when single column is given' do
@@ -2906,4 +2947,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
it { expect(recorder.log).to be_empty }
end
end
+
+ describe '#lock_tables' do
+ let(:lock_statement) do
+ /LOCK TABLE ci_builds, ci_pipelines IN ACCESS EXCLUSIVE MODE/
+ end
+
+ subject(:recorder) do
+ ActiveRecord::QueryRecorder.new do
+ model.lock_tables(:ci_builds, :ci_pipelines)
+ end
+ end
+
+ it 'locks the tables' do
+ expect(recorder.log).to include(lock_statement)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index 158497b1fef..f1271f2434c 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers do
+RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers, feature_category: :database do
let(:migration_class) do
Class.new(ActiveRecord::Migration[6.1])
.include(described_class)
@@ -70,39 +70,54 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
- it 'creates the database record for the migration' do
- expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+ context "when the migration doesn't exist already" do
+ before do
+ allow(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+ end
- expect do
+ subject(:enqueue_batched_background_migration) do
migration.queue_batched_background_migration(
job_class.name,
:projects,
:id,
job_interval: 5.minutes,
+ queued_migration_version: format("%.14d", 123),
batch_min_value: 5,
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
batch_size: 100,
max_batch_size: 10000,
sub_batch_size: 10,
- gitlab_schema: :gitlab_ci)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
- job_class_name: 'MyJobClass',
- table_name: 'projects',
- column_name: 'id',
- interval: 300,
- min_value: 5,
- max_value: 1000,
- batch_class_name: 'MyBatchClass',
- batch_size: 100,
- max_batch_size: 10000,
- sub_batch_size: 10,
- job_arguments: %w[],
- status_name: :active,
- total_tuple_count: pgclass_info.cardinality_estimate,
- gitlab_schema: 'gitlab_ci')
+ gitlab_schema: :gitlab_ci
+ )
+ end
+
+ it 'enqueues exactly one batched migration' do
+ expect { enqueue_batched_background_migration }
+ .to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+ end
+
+ it 'creates the database record for the migration' do
+ batched_background_migration = enqueue_batched_background_migration
+
+ expect(batched_background_migration.reload).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 5,
+ max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ max_batch_size: 10000,
+ sub_batch_size: 10,
+ job_arguments: %w[],
+ status_name: :active,
+ total_tuple_count: pgclass_info.cardinality_estimate,
+ gitlab_schema: 'gitlab_ci',
+ queued_migration_version: format("%.14d", 123)
+ )
+ end
end
context 'when the job interval is lower than the minimum' do
diff --git a/spec/lib/gitlab/database/migrations/milestone_mixin_spec.rb b/spec/lib/gitlab/database/migrations/milestone_mixin_spec.rb
new file mode 100644
index 00000000000..e375af494a2
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/milestone_mixin_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::MilestoneMixin, feature_category: :database do
+ let(:migration_no_mixin) do
+ Class.new(Gitlab::Database::Migration[2.1]) do
+ def change
+ # no-op here to make rubocop happy
+ end
+ end
+ end
+
+ let(:migration_mixin) do
+ Class.new(Gitlab::Database::Migration[2.1]) do
+ include Gitlab::Database::Migrations::MilestoneMixin
+ end
+ end
+
+ let(:migration_mixin_version) do
+ Class.new(Gitlab::Database::Migration[2.1]) do
+ include Gitlab::Database::Migrations::MilestoneMixin
+ milestone '16.4'
+ end
+ end
+
+ context 'when the mixin is not included' do
+ it 'does not raise an error' do
+ expect { migration_no_mixin.new(4, 4) }.not_to raise_error
+ end
+ end
+
+ context 'when the mixin is included' do
+ context 'when a milestone is not specified' do
+ it "raises MilestoneNotSetError" do
+ expect { migration_mixin.new(4, 4, :regular) }.to raise_error(
+ "#{described_class}::MilestoneNotSetError".constantize
+ )
+ end
+ end
+
+ context 'when a milestone is specified' do
+ it "does not raise an error" do
+ expect { migration_mixin_version.new(4, 4, :regular) }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
index 66de25d65bb..330c9d18fb2 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
@@ -41,7 +41,13 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
let(:result) { double }
let(:pgss_query) do
<<~SQL
- SELECT query, calls, total_time, max_time, mean_time, rows
+ SELECT
+ query,
+ calls,
+ total_exec_time + total_plan_time AS total_time,
+ max_exec_time + max_plan_time AS max_time,
+ mean_exec_time + mean_plan_time AS mean_time,
+ "rows"
FROM pg_stat_statements
WHERE pg_get_userbyid(userid) = current_user
ORDER BY total_time DESC
diff --git a/spec/lib/gitlab/database/migrations/swap_columns_default_spec.rb b/spec/lib/gitlab/database/migrations/swap_columns_default_spec.rb
new file mode 100644
index 00000000000..e53480d453e
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/swap_columns_default_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::SwapColumnsDefault, feature_category: :database do
+ describe '#execute' do
+ let(:connection) { ApplicationRecord.connection }
+ let(:migration_context) do
+ Gitlab::Database::Migration[2.1]
+ .new('name', 'version')
+ .extend(Gitlab::Database::MigrationHelpers::Swapping)
+ end
+
+ let(:table) { :_test_swap_columns_and_defaults }
+ let(:column1) { :integer_column }
+ let(:column2) { :bigint_column }
+
+ subject(:execute_service) do
+ described_class.new(
+ migration_context: migration_context,
+ table: table,
+ column1: column1,
+ column2: column2
+ ).execute
+ end
+
+ before do
+ connection.execute(sql)
+ end
+
+ context 'when defaults are static values' do
+ let(:sql) do
+ <<~SQL
+ CREATE TABLE #{table} (
+ id integer NOT NULL,
+ #{column1} integer DEFAULT 8 NOT NULL,
+ #{column2} bigint DEFAULT 100 NOT NULL
+ );
+ SQL
+ end
+
+ it 'swaps the default correctly' do
+ expect { execute_service }
+ .to change { find_column_by(column1).default }.to('100')
+ .and change { find_column_by(column2).default }.to('8')
+ .and not_change { find_column_by(column1).default_function }.from(nil)
+ .and not_change { find_column_by(column2).default_function }.from(nil)
+ end
+ end
+
+ context 'when default is sequence' do
+ let(:sql) do
+ <<~SQL
+ CREATE TABLE #{table} (
+ id integer NOT NULL,
+ #{column1} integer NOT NULL,
+ #{column2} bigint DEFAULT 100 NOT NULL
+ );
+
+ CREATE SEQUENCE #{table}_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ ALTER SEQUENCE #{table}_seq OWNED BY #{table}.#{column1};
+ ALTER TABLE ONLY #{table} ALTER COLUMN #{column1} SET DEFAULT nextval('#{table}_seq'::regclass);
+ SQL
+ end
+
+ it 'swaps the default correctly' do
+ recorder = nil
+ expect { recorder = ActiveRecord::QueryRecorder.new { execute_service } }
+ .to change { find_column_by(column1).default }.to('100')
+ .and change { find_column_by(column1).default_function }.to(nil)
+ .and change { find_column_by(column2).default }.to(nil)
+ .and change {
+ find_column_by(column2).default_function
+ }.to("nextval('_test_swap_columns_and_defaults_seq'::regclass)")
+ expect(recorder.log).to include(
+ /SEQUENCE "_test_swap_columns_and_defaults_seq" OWNED BY "_test_swap_columns_and_defaults"."bigint_column"/
+ )
+ expect(recorder.log).to include(
+ /COLUMN "bigint_column" SET DEFAULT nextval\('_test_swap_columns_and_defaults_seq'::regclass\)/
+ )
+ end
+ end
+
+ context 'when defaults are the same' do
+ let(:sql) do
+ <<~SQL
+ CREATE TABLE #{table} (
+ id integer NOT NULL,
+ #{column1} integer DEFAULT 100 NOT NULL,
+ #{column2} bigint DEFAULT 100 NOT NULL
+ );
+ SQL
+ end
+
+ it 'does nothing' do
+ recorder = nil
+ expect { recorder = ActiveRecord::QueryRecorder.new { execute_service } }
+ .to not_change { find_column_by(column1).default }
+ .and not_change { find_column_by(column1).default_function }
+ .and not_change { find_column_by(column2).default }
+ .and not_change { find_column_by(column2).default_function }
+ expect(recorder.log).not_to include(/ALTER TABLE/)
+ end
+ end
+
+ private
+
+ def find_column_by(name)
+ connection.columns(table).find { |c| c.name == name.to_s }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/swap_columns_spec.rb b/spec/lib/gitlab/database/migrations/swap_columns_spec.rb
new file mode 100644
index 00000000000..a119b23dda4
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/swap_columns_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::SwapColumns, feature_category: :database do
+ describe '#execute' do
+ let(:connection) { ApplicationRecord.connection }
+ let(:sql) do
+ <<~SQL
+ CREATE TABLE #{table} (
+ id integer NOT NULL,
+ #{column1} integer DEFAULT 8 NOT NULL,
+ #{column2} bigint DEFAULT 100 NOT NULL
+ );
+ SQL
+ end
+
+ let(:migration_context) do
+ Gitlab::Database::Migration[2.1]
+ .new('name', 'version')
+ .extend(Gitlab::Database::MigrationHelpers::Swapping)
+ end
+
+ let(:table) { :_test_swap_columns_and_defaults }
+ let(:column1) { :integer_column }
+ let(:column2) { :bigint_column }
+
+ subject(:execute_service) do
+ described_class.new(
+ migration_context: migration_context,
+ table: table,
+ column1: column1,
+ column2: column2
+ ).execute
+ end
+
+ before do
+ connection.execute(sql)
+ end
+
+ shared_examples_for 'swapping columns correctly' do
+ specify do
+ expect { execute_service }
+ .to change { find_column_by(column1).sql_type }.from('integer').to('bigint')
+ .and change { find_column_by(column2).sql_type }.from('bigint').to('integer')
+ end
+ end
+
+ it_behaves_like 'swapping columns correctly'
+
+ context 'when column names are 63 bytes' do
+ let(:column1) { :int012345678901234567890123456789012345678901234567890123456789 }
+ let(:column2) { :big012345678901234567890123456789012345678901234567890123456789 }
+
+ it_behaves_like 'swapping columns correctly'
+ end
+
+ private
+
+ def find_column_by(name)
+ connection.columns(table).find { |c| c.name == name.to_s }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/version_spec.rb b/spec/lib/gitlab/database/migrations/version_spec.rb
new file mode 100644
index 00000000000..821a2156539
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/version_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::Version, feature_category: :database do
+ let(:test_versions) do
+ [
+ 4,
+ 5,
+ described_class.new(6, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ 7,
+ described_class.new(8, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ described_class.new(9, Gitlab::VersionInfo.parse_from_milestone('10.4'), :regular),
+ described_class.new(10, Gitlab::VersionInfo.parse_from_milestone('10.3'), :post),
+ described_class.new(11, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular)
+ ]
+ end
+
+ describe "#<=>" do
+ it 'sorts by existence of milestone, then by milestone, then by type, then by timestamp when sorted by version' do
+ expect(test_versions.sort.map(&:to_i)).to eq [4, 5, 7, 6, 8, 11, 10, 9]
+ end
+ end
+
+ describe 'initialize' do
+ context 'when the type is :post or :regular' do
+ it 'does not raise an error' do
+ expect { described_class.new(4, 4, :regular) }.not_to raise_error
+ expect { described_class.new(4, 4, :post) }.not_to raise_error
+ end
+ end
+
+ context 'when the type is anything else' do
+ it 'does not raise an error' do
+ expect { described_class.new(4, 4, 'foo') }.to raise_error("#{described_class}::InvalidTypeError".constantize)
+ end
+ end
+ end
+
+ describe 'eql?' do
+ where(:version1, :version2, :expected_equality) do
+ [
+ [
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ true
+ ],
+ [
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.4'), :regular),
+ false
+ ],
+ [
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :post),
+ false
+ ],
+ [
+ described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ described_class.new(5, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular),
+ false
+ ]
+ ]
+ end
+
+ with_them do
+ it 'correctly evaluates deep equality' do
+ expect(version1.eql?(version2)).to eq(expected_equality)
+ end
+
+ it 'correctly evaluates deep equality using ==' do
+ expect(version1 == version2).to eq(expected_equality)
+ end
+ end
+ end
+
+ describe 'type' do
+ subject { described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), migration_type) }
+
+ context 'when the migration is regular' do
+ let(:migration_type) { :regular }
+
+ it 'correctly identifies the migration type' do
+ expect(subject.type).to eq(:regular)
+ expect(subject.regular?).to eq(true)
+ expect(subject.post_deployment?).to eq(false)
+ end
+ end
+
+ context 'when the migration is post_deployment' do
+ let(:migration_type) { :post }
+
+ it 'correctly identifies the migration type' do
+ expect(subject.type).to eq(:post)
+ expect(subject.regular?).to eq(false)
+ expect(subject.post_deployment?).to eq(true)
+ end
+ end
+ end
+
+ describe 'to_s' do
+ subject { described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular) }
+
+ it 'returns the given timestamp value as a string' do
+ expect(subject.to_s).to eql('4')
+ end
+ end
+
+ describe 'hash' do
+ subject { described_class.new(4, Gitlab::VersionInfo.parse_from_milestone('10.3'), :regular) }
+
+ let(:expected_hash) { subject.hash }
+
+ it 'deterministically returns a hash of the timestamp, milestone, and type value' do
+ 3.times do
+ expect(subject.hash).to eq(expected_hash)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index 2fa4c9e562f..c6cd5e55754 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -23,8 +23,6 @@ RSpec.describe 'cross-database foreign keys' do
'merge_requests.merge_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
'merge_requests.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
- 'projects.creator_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
- 'projects.marked_for_deletion_by_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287
]
end
@@ -34,9 +32,11 @@ RSpec.describe 'cross-database foreign keys' do
end
def is_cross_db?(fk_record)
- table_schemas = Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table])
+ tables = [fk_record.from_table, fk_record.to_table]
- !Gitlab::Database::GitlabSchema.cross_foreign_key_allowed?(table_schemas)
+ table_schemas = Gitlab::Database::GitlabSchema.table_schemas!(tables)
+
+ !Gitlab::Database::GitlabSchema.cross_foreign_key_allowed?(table_schemas, tables)
end
it 'onlies have allowed list of cross-database foreign keys', :aggregate_failures do
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index c41228777ca..80ffa708d8a 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -322,74 +322,33 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager, feature_categor
allow(connection).to receive(:select_value).and_return(nil, Time.current, Time.current)
end
- context 'when feature flag database_analyze_on_partitioned_tables is enabled' do
- before do
- stub_feature_flags(database_analyze_on_partitioned_tables: true)
- end
-
- it_behaves_like 'run only once analyze within interval'
+ it_behaves_like 'run only once analyze within interval'
- context 'when analyze is false' do
- let(:analyze) { false }
+ context 'when analyze is false' do
+ let(:analyze) { false }
- it_behaves_like 'not to run the analyze at all'
- end
+ it_behaves_like 'not to run the analyze at all'
+ end
- context 'when model does not set analyze_interval' do
- let(:my_model) do
- Class.new(ApplicationRecord) do
- include PartitionedTable
+ context 'when model does not set analyze_interval' do
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
- partitioned_by :partition_id,
- strategy: :ci_sliding_list,
- next_partition_if: proc { false },
- detach_partition_if: proc { false }
- end
+ partitioned_by :partition_id,
+ strategy: :ci_sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false }
end
-
- it_behaves_like 'not to run the analyze at all'
- end
-
- context 'when no partition is created' do
- let(:create_partition) { false }
-
- it_behaves_like 'run only once analyze within interval'
- end
- end
-
- context 'when feature flag database_analyze_on_partitioned_tables is disabled' do
- before do
- stub_feature_flags(database_analyze_on_partitioned_tables: false)
end
it_behaves_like 'not to run the analyze at all'
+ end
- context 'when analyze is false' do
- let(:analyze) { false }
-
- it_behaves_like 'not to run the analyze at all'
- end
-
- context 'when model does not set analyze_interval' do
- let(:my_model) do
- Class.new(ApplicationRecord) do
- include PartitionedTable
-
- partitioned_by :partition_id,
- strategy: :ci_sliding_list,
- next_partition_if: proc { false },
- detach_partition_if: proc { false }
- end
- end
-
- it_behaves_like 'not to run the analyze at all'
- end
-
- context 'when no partition is created' do
- let(:create_partition) { false }
+ context 'when no partition is created' do
+ let(:create_partition) { false }
- it_behaves_like 'not to run the analyze at all'
- end
+ it_behaves_like 'run only once analyze within interval'
end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
deleted file mode 100644
index 370d03b495c..00000000000
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ /dev/null
@@ -1,292 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :groups_and_projects do
- let(:migration) { FakeRenameReservedPathMigrationV1.new }
- let(:subject) { described_class.new(['the-path'], migration) }
-
- before do
- allow(migration).to receive(:say)
- TestEnv.clean_test_path
- end
-
- def migration_namespace(namespace)
- Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::
- Namespace.find(namespace.id)
- end
-
- def migration_project(project)
- Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::
- Project.find(project.id)
- end
-
- describe "#remove_last_occurrence" do
- it "removes only the last occurrence of a string" do
- input = "this/is/a-word-to-replace/namespace/with/a-word-to-replace"
-
- expect(subject.remove_last_occurrence(input, "a-word-to-replace"))
- .to eq("this/is/a-word-to-replace/namespace/with/")
- end
- end
-
- describe '#remove_cached_html_for_projects' do
- let(:project) { create(:project, description_html: 'Project description') }
-
- it 'removes description_html from projects' do
- subject.remove_cached_html_for_projects([project.id])
-
- expect(project.reload.description_html).to be_nil
- end
-
- it 'removes issue descriptions' do
- issue = create(:issue, project: project, description_html: 'Issue description')
-
- subject.remove_cached_html_for_projects([project.id])
-
- expect(issue.reload.description_html).to be_nil
- end
-
- it 'removes merge request descriptions' do
- merge_request = create(:merge_request,
- source_project: project,
- target_project: project,
- description_html: 'MergeRequest description')
-
- subject.remove_cached_html_for_projects([project.id])
-
- expect(merge_request.reload.description_html).to be_nil
- end
-
- it 'removes note html' do
- note = create(:note,
- project: project,
- noteable: create(:issue, project: project),
- note_html: 'note description')
-
- subject.remove_cached_html_for_projects([project.id])
-
- expect(note.reload.note_html).to be_nil
- end
-
- it 'removes milestone description' do
- milestone = create(:milestone,
- project: project,
- description_html: 'milestone description')
-
- subject.remove_cached_html_for_projects([project.id])
-
- expect(milestone.reload.description_html).to be_nil
- end
- end
-
- describe '#rename_path_for_routable' do
- context 'for personal namespaces' do
- let(:namespace) { create(:namespace, path: 'the-path') }
-
- it "renames namespaces called the-path" do
- subject.rename_path_for_routable(migration_namespace(namespace))
-
- expect(namespace.reload.path).to eq("the-path0")
- end
-
- it "renames the route to the namespace" do
- subject.rename_path_for_routable(migration_namespace(namespace))
-
- expect(Namespace.find(namespace.id).full_path).to eq("the-path0")
- end
-
- it "renames the route for projects of the namespace" do
- project = create(:project, :repository, path: "project-path", namespace: namespace)
-
- subject.rename_path_for_routable(migration_namespace(namespace))
-
- expect(project.route.reload.path).to eq("the-path0/project-path")
- end
-
- it 'returns the old & the new path' do
- old_path, new_path = subject.rename_path_for_routable(migration_namespace(namespace))
-
- expect(old_path).to eq('the-path')
- expect(new_path).to eq('the-path0')
- end
-
- it "doesn't rename routes that start with a similar name" do
- other_namespace = create(:namespace, path: 'the-path-but-not-really')
- project = create(:project, path: 'the-project', namespace: other_namespace)
-
- subject.rename_path_for_routable(migration_namespace(namespace))
-
- expect(project.route.reload.path).to eq('the-path-but-not-really/the-project')
- end
- end
-
- context 'for groups' do
- context "the-path group -> subgroup -> the-path0 project" do
- it "updates the route of the project correctly" do
- group = create(:group, path: 'the-path')
- subgroup = create(:group, path: "subgroup", parent: group)
- project = create(:project, :repository, path: "the-path0", namespace: subgroup)
-
- subject.rename_path_for_routable(migration_namespace(group))
-
- expect(project.route.reload.path).to eq("the-path0/subgroup/the-path0")
- end
- end
- end
-
- context 'for projects' do
- let(:parent) { create(:namespace, path: 'the-parent') }
- let(:project) { create(:project, path: 'the-path', namespace: parent) }
-
- it 'renames the project called `the-path`' do
- subject.rename_path_for_routable(migration_project(project))
-
- expect(project.reload.path).to eq('the-path0')
- end
-
- it 'renames the route for the project' do
- subject.rename_path_for_routable(project)
-
- expect(project.reload.route.path).to eq('the-parent/the-path0')
- end
-
- it 'returns the old & new path' do
- old_path, new_path = subject.rename_path_for_routable(migration_project(project))
-
- expect(old_path).to eq('the-parent/the-path')
- expect(new_path).to eq('the-parent/the-path0')
- end
- end
- end
-
- describe '#perform_rename' do
- context 'for personal namespaces' do
- it 'renames the path' do
- namespace = create(:namespace, path: 'the-path')
-
- subject.perform_rename(migration_namespace(namespace), 'the-path', 'renamed')
-
- expect(namespace.reload.path).to eq('renamed')
- expect(namespace.reload.route.path).to eq('renamed')
- end
- end
-
- context 'for groups' do
- it 'renames all the routes for the group' do
- group = create(:group, path: 'the-path')
- child = create(:group, path: 'child', parent: group)
- project = create(:project, :repository, namespace: child, path: 'the-project')
- other_one = create(:group, path: 'the-path-is-similar')
-
- subject.perform_rename(migration_namespace(group), 'the-path', 'renamed')
-
- expect(group.reload.route.path).to eq('renamed')
- expect(child.reload.route.path).to eq('renamed/child')
- expect(project.reload.route.path).to eq('renamed/child/the-project')
- expect(other_one.reload.route.path).to eq('the-path-is-similar')
- end
- end
- end
-
- describe '#move_pages' do
- it 'moves the pages directory' do
- expect(subject).to receive(:move_folders)
- .with(TestEnv.pages_path, 'old-path', 'new-path')
-
- subject.move_pages('old-path', 'new-path')
- end
- end
-
- describe "#move_uploads" do
- let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'rename_reserved_paths') }
- let(:uploads_dir) { File.join(test_dir, 'public', 'uploads') }
-
- it 'moves subdirectories in the uploads folder' do
- expect(subject).to receive(:uploads_dir).and_return(uploads_dir)
- expect(subject).to receive(:move_folders).with(uploads_dir, 'old_path', 'new_path')
-
- subject.move_uploads('old_path', 'new_path')
- end
-
- it "doesn't move uploads when they are stored in object storage" do
- expect(subject).to receive(:file_storage?).and_return(false)
- expect(subject).not_to receive(:move_folders)
-
- subject.move_uploads('old_path', 'new_path')
- end
- end
-
- describe '#move_folders' do
- let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'rename_reserved_paths') }
- let(:uploads_dir) { File.join(test_dir, 'public', 'uploads') }
-
- before do
- FileUtils.remove_dir(test_dir) if File.directory?(test_dir)
- FileUtils.mkdir_p(uploads_dir)
- allow(subject).to receive(:uploads_dir).and_return(uploads_dir)
- end
-
- it 'moves a folder with files' do
- source = File.join(uploads_dir, 'parent-group', 'sub-group')
- FileUtils.mkdir_p(source)
- destination = File.join(uploads_dir, 'parent-group', 'moved-group')
- FileUtils.touch(File.join(source, 'test.txt'))
- expected_file = File.join(destination, 'test.txt')
-
- subject.move_folders(uploads_dir, File.join('parent-group', 'sub-group'), File.join('parent-group', 'moved-group'))
-
- expect(File.exist?(expected_file)).to be(true)
- end
- end
-
- describe '#track_rename', :redis do
- it 'tracks a rename in redis' do
- key = 'rename:FakeRenameReservedPathMigrationV1:namespace'
-
- subject.track_rename('namespace', 'path/to/namespace', 'path/to/renamed')
-
- old_path = nil
- new_path = nil
- Gitlab::Redis::SharedState.with do |redis|
- rename_info = redis.lpop(key)
- old_path, new_path = Gitlab::Json.parse(rename_info)
- end
-
- expect(old_path).to eq('path/to/namespace')
- expect(new_path).to eq('path/to/renamed')
- end
- end
-
- describe '#reverts_for_type', :redis do
- it 'yields for each tracked rename' do
- subject.track_rename('project', 'old_path', 'new_path')
- subject.track_rename('project', 'old_path2', 'new_path2')
- subject.track_rename('namespace', 'namespace_path', 'new_namespace_path')
-
- expect { |b| subject.reverts_for_type('project', &b) }
- .to yield_successive_args(%w(old_path2 new_path2), %w(old_path new_path))
- expect { |b| subject.reverts_for_type('namespace', &b) }
- .to yield_with_args('namespace_path', 'new_namespace_path')
- end
-
- it 'keeps the revert in redis if it failed' do
- subject.track_rename('project', 'old_path', 'new_path')
-
- subject.reverts_for_type('project') do
- raise 'whatever happens, keep going!'
- end
-
- key = 'rename:FakeRenameReservedPathMigrationV1:project'
- stored_renames = nil
- rename_count = 0
- Gitlab::Redis::SharedState.with do |redis|
- stored_renames = redis.lrange(key, 0, 1)
- rename_count = redis.llen(key)
- end
-
- expect(rename_count).to eq(1)
- expect(Gitlab::Json.parse(stored_renames.first)).to eq(%w(old_path new_path))
- end
- end
-end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
deleted file mode 100644
index b00a1d4a9e1..00000000000
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ /dev/null
@@ -1,313 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete,
-feature_category: :groups_and_projects do
- let(:migration) { FakeRenameReservedPathMigrationV1.new }
- let(:subject) { described_class.new(['the-path'], migration) }
- let(:namespace) { create(:group, name: 'the-path') }
-
- before do
- allow(migration).to receive(:say)
- TestEnv.clean_test_path
- end
-
- def migration_namespace(namespace)
- Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::
- Namespace.find(namespace.id)
- end
-
- describe '#namespaces_for_paths' do
- context 'nested namespaces' do
- let(:subject) { described_class.new(['parent/the-Path'], migration) }
-
- it 'includes the namespace' do
- parent = create(:group, path: 'parent')
- child = create(:group, path: 'the-path', parent: parent)
-
- found_ids = subject.namespaces_for_paths(type: :child)
- .map(&:id)
-
- expect(found_ids).to contain_exactly(child.id)
- end
- end
-
- context 'for child namespaces' do
- it 'only returns child namespaces with the correct path' do
- _root_namespace = create(:group, path: 'THE-path')
- _other_path = create(:group,
- path: 'other',
- parent: create(:group))
- namespace = create(:group,
- path: 'the-path',
- parent: create(:group))
-
- found_ids = subject.namespaces_for_paths(type: :child)
- .map(&:id)
-
- expect(found_ids).to contain_exactly(namespace.id)
- end
-
- it 'has no namespaces that look the same' do
- _root_namespace = create(:group, path: 'THE-path')
- _similar_path = create(:group,
- path: 'not-really-the-path',
- parent: create(:group))
- namespace = create(:group,
- path: 'the-path',
- parent: create(:group))
-
- found_ids = subject.namespaces_for_paths(type: :child)
- .map(&:id)
-
- expect(found_ids).to contain_exactly(namespace.id)
- end
- end
-
- context 'for top levelnamespaces' do
- it 'only returns child namespaces with the correct path' do
- root_namespace = create(:group, path: 'the-path')
- _other_path = create(:group, path: 'other')
- _child_namespace = create(:group,
- path: 'the-path',
- parent: create(:group))
-
- found_ids = subject.namespaces_for_paths(type: :top_level)
- .map(&:id)
-
- expect(found_ids).to contain_exactly(root_namespace.id)
- end
-
- it 'has no namespaces that just look the same' do
- root_namespace = create(:group, path: 'the-path')
- _similar_path = create(:group, path: 'not-really-the-path')
- _child_namespace = create(:group,
- path: 'the-path',
- parent: create(:group))
-
- found_ids = subject.namespaces_for_paths(type: :top_level)
- .map(&:id)
-
- expect(found_ids).to contain_exactly(root_namespace.id)
- end
- end
- end
-
- describe '#move_repositories' do
- let(:namespace) { create(:group, name: 'hello-group') }
-
- it 'moves a project for a namespace' do
- project = create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project')
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- 'bye-group/hello-project.git',
- nil,
- nil
- )
-
- subject.move_repositories(namespace, 'hello-group', 'bye-group')
-
- expect(expected_repository).to exist
- end
-
- it 'moves a namespace in a subdirectory correctly' do
- child_namespace = create(:group, name: 'sub-group', parent: namespace)
- project = create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
-
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- 'hello-group/renamed-sub-group/hello-project.git',
- nil,
- nil
- )
-
- subject.move_repositories(child_namespace, 'hello-group/sub-group', 'hello-group/renamed-sub-group')
-
- expect(expected_repository).to exist
- end
-
- it 'moves a parent namespace with subdirectories' do
- child_namespace = create(:group, name: 'sub-group', parent: namespace)
- project = create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- 'renamed-group/sub-group/hello-project.git',
- nil,
- nil
- )
-
- subject.move_repositories(child_namespace, 'hello-group', 'renamed-group')
-
- expect(expected_repository).to exist
- end
- end
-
- describe "#child_ids_for_parent" do
- it "collects child ids for all levels" do
- parent = create(:group)
- first_child = create(:group, parent: parent)
- second_child = create(:group, parent: parent)
- third_child = create(:group, parent: second_child)
- all_ids = [parent.id, first_child.id, second_child.id, third_child.id]
-
- collected_ids = subject.child_ids_for_parent(parent, ids: [parent.id])
-
- expect(collected_ids).to contain_exactly(*all_ids)
- end
- end
-
- describe "#rename_namespace" do
- it 'renames paths & routes for the namespace' do
- expect(subject).to receive(:rename_path_for_routable)
- .with(namespace)
- .and_call_original
-
- subject.rename_namespace(namespace)
-
- expect(namespace.reload.path).to eq('the-path0')
- end
-
- it 'tracks the rename' do
- expect(subject).to receive(:track_rename)
- .with('namespace', 'the-path', 'the-path0')
-
- subject.rename_namespace(namespace)
- end
-
- it 'renames things related to the namespace' do
- expect(subject).to receive(:rename_namespace_dependencies)
- .with(namespace, 'the-path', 'the-path0')
-
- subject.rename_namespace(namespace)
- end
- end
-
- describe '#rename_namespace_dependencies' do
- it "moves the repository for a project in the namespace" do
- project = create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- "the-path0/the-path-project.git",
- nil,
- nil
- )
-
- subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
-
- expect(expected_repository).to exist
- end
-
- it "moves the uploads for the namespace" do
- expect(subject).to receive(:move_uploads).with("the-path", "the-path0")
-
- subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
- end
-
- it "moves the pages for the namespace" do
- expect(subject).to receive(:move_pages).with("the-path", "the-path0")
-
- subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
- end
-
- it 'invalidates the markdown cache of related projects' do
- project = create(:project, :legacy_storage, namespace: namespace, path: "the-path-project")
-
- expect(subject).to receive(:remove_cached_html_for_projects).with([project.id])
-
- subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
- end
-
- it "doesn't rename users for other namespaces" do
- expect(subject).not_to receive(:rename_user)
-
- subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
- end
-
- it 'renames the username of a namespace for a user' do
- user = create(:user, username: 'the-path')
-
- expect(subject).to receive(:rename_user).with('the-path', 'the-path0')
-
- subject.rename_namespace_dependencies(user.namespace, 'the-path', 'the-path0')
- end
- end
-
- describe '#rename_user' do
- it 'renames a username' do
- subject = described_class.new([], migration)
- user = create(:user, username: 'broken')
-
- subject.rename_user('broken', 'broken0')
-
- expect(user.reload.username).to eq('broken0')
- end
- end
-
- describe '#rename_namespaces' do
- let!(:top_level_namespace) { create(:group, path: 'the-path') }
- let!(:child_namespace) do
- create(:group, path: 'the-path', parent: create(:group))
- end
-
- it 'renames top level namespaces the namespace' do
- expect(subject).to receive(:rename_namespace)
- .with(migration_namespace(top_level_namespace))
-
- subject.rename_namespaces(type: :top_level)
- end
-
- it 'renames child namespaces' do
- expect(subject).to receive(:rename_namespace)
- .with(migration_namespace(child_namespace))
-
- subject.rename_namespaces(type: :child)
- end
- end
-
- describe '#revert_renames', :redis do
- it 'renames the routes back to the previous values' do
- project = create(:project, :legacy_storage, :repository, path: 'a-project', namespace: namespace)
- subject.rename_namespace(namespace)
-
- expect(subject).to receive(:perform_rename)
- .with(
- kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Namespace),
- 'the-path0',
- 'the-path'
- ).and_call_original
-
- subject.revert_renames
-
- expect(namespace.reload.path).to eq('the-path')
- expect(namespace.reload.route.path).to eq('the-path')
- expect(project.reload.route.path).to eq('the-path/a-project')
- end
-
- it 'moves the repositories back to their original place' do
- project = create(:project, :repository, :legacy_storage, path: 'a-project', namespace: namespace)
- project.create_repository
- subject.rename_namespace(namespace)
-
- expected_repository = Gitlab::Git::Repository.new(project.repository_storage, 'the-path/a-project.git', nil, nil)
-
- expect(subject).to receive(:rename_namespace_dependencies)
- .with(
- kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Namespace),
- 'the-path0',
- 'the-path'
- ).and_call_original
-
- subject.revert_renames
-
- expect(expected_repository).to exist
- end
-
- it "doesn't break when the namespace was renamed" do
- subject.rename_namespace(namespace)
- namespace.update!(path: 'renamed-afterwards')
-
- expect { subject.revert_renames }.not_to raise_error
- end
- end
-end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
deleted file mode 100644
index d2665664fb0..00000000000
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ /dev/null
@@ -1,190 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete,
-feature_category: :groups_and_projects do
- let(:migration) { FakeRenameReservedPathMigrationV1.new }
- let(:subject) { described_class.new(['the-path'], migration) }
- let(:project) do
- create(:project,
- :legacy_storage,
- path: 'the-path',
- namespace: create(:namespace, path: 'known-parent' ))
- end
-
- before do
- allow(migration).to receive(:say)
- TestEnv.clean_test_path
- end
-
- describe '#projects_for_paths' do
- it 'searches using nested paths' do
- namespace = create(:namespace, path: 'hello')
- project = create(:project, :legacy_storage, path: 'THE-path', namespace: namespace)
-
- result_ids = described_class.new(['Hello/the-path'], migration)
- .projects_for_paths.map(&:id)
-
- expect(result_ids).to contain_exactly(project.id)
- end
-
- it 'includes the correct projects' do
- project = create(:project, :legacy_storage, path: 'THE-path')
- _other_project = create(:project, :legacy_storage)
-
- result_ids = subject.projects_for_paths.map(&:id)
-
- expect(result_ids).to contain_exactly(project.id)
- end
- end
-
- describe '#rename_projects' do
- let!(:projects) { create_list(:project, 2, :legacy_storage, path: 'the-path') }
-
- it 'renames each project' do
- expect(subject).to receive(:rename_project).twice
-
- subject.rename_projects
- end
-
- it 'invalidates the markdown cache of related projects' do
- expect(subject).to receive(:remove_cached_html_for_projects)
- .with(a_collection_containing_exactly(*projects.map(&:id)))
-
- subject.rename_projects
- end
- end
-
- describe '#rename_project' do
- it 'renames path & route for the project' do
- expect(subject).to receive(:rename_path_for_routable)
- .with(project)
- .and_call_original
-
- subject.rename_project(project)
-
- expect(project.reload.path).to eq('the-path0')
- end
-
- it 'tracks the rename' do
- expect(subject).to receive(:track_rename)
- .with('project', 'known-parent/the-path', 'known-parent/the-path0')
-
- subject.rename_project(project)
- end
-
- it 'renames the folders for the project' do
- expect(subject).to receive(:move_project_folders).with(project, 'known-parent/the-path', 'known-parent/the-path0')
-
- subject.rename_project(project)
- end
- end
-
- describe '#move_project_folders' do
- it 'moves the wiki & the repo' do
- expect(subject).to receive(:move_repository)
- .with(project, 'known-parent/the-path.wiki', 'known-parent/the-path0.wiki')
- expect(subject).to receive(:move_repository)
- .with(project, 'known-parent/the-path', 'known-parent/the-path0')
-
- subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
- end
-
- it 'does not move the repositories when hashed storage is enabled' do
- project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:repository])
-
- expect(subject).not_to receive(:move_repository)
-
- subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
- end
-
- it 'moves uploads' do
- expect(subject).to receive(:move_uploads)
- .with('known-parent/the-path', 'known-parent/the-path0')
-
- subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
- end
-
- it 'does not move uploads when hashed storage is enabled for attachments' do
- project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:attachments])
-
- expect(subject).not_to receive(:move_uploads)
-
- subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
- end
-
- it 'moves pages' do
- expect(subject).to receive(:move_pages)
- .with('known-parent/the-path', 'known-parent/the-path0')
-
- subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
- end
- end
-
- describe '#move_repository' do
- let(:known_parent) { create(:namespace, path: 'known-parent') }
- let(:project) { create(:project, :repository, :legacy_storage, path: 'the-path', namespace: known_parent) }
-
- it 'moves the repository for a project' do
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- 'known-parent/new-repo.git',
- nil,
- nil
- )
-
- subject.move_repository(project, 'known-parent/the-path', 'known-parent/new-repo')
-
- expect(expected_repository).to exist
- end
- end
-
- describe '#revert_renames', :redis do
- it 'renames the routes back to the previous values' do
- subject.rename_project(project)
-
- expect(subject).to receive(:perform_rename)
- .with(
- kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Project),
- 'known-parent/the-path0',
- 'known-parent/the-path'
- ).and_call_original
-
- subject.revert_renames
-
- expect(project.reload.path).to eq('the-path')
- expect(project.route.path).to eq('known-parent/the-path')
- end
-
- it 'moves the repositories back to their original place' do
- project.create_repository
- subject.rename_project(project)
-
- expected_repository = Gitlab::Git::Repository.new(
- project.repository_storage,
- 'known-parent/the-path.git',
- nil,
- nil
- )
-
- expect(subject).to receive(:move_project_folders)
- .with(
- kind_of(Gitlab::Database::RenameReservedPathsMigration::V1::MigrationClasses::Project),
- 'known-parent/the-path0',
- 'known-parent/the-path'
- ).and_call_original
-
- subject.revert_renames
-
- expect(expected_repository).to exist
- end
-
- it "doesn't break when the project was renamed" do
- subject.rename_project(project)
- project.update!(path: 'renamed-afterwards')
-
- expect { subject.revert_renames }.not_to raise_error
- end
- end
-end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
deleted file mode 100644
index 3b2d3ab1354..00000000000
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.shared_examples 'renames child namespaces' do |type|
- it 'renames namespaces' do
- rename_namespaces = double
- expect(described_class::RenameNamespaces)
- .to receive(:new).with(%w[first-path second-path], subject)
- .and_return(rename_namespaces)
- expect(rename_namespaces).to receive(:rename_namespaces)
- .with(type: :child)
-
- subject.rename_wildcard_paths(%w[first-path second-path])
- end
-end
-
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1, :delete do
- let(:subject) { FakeRenameReservedPathMigrationV1.new }
-
- before do
- allow(subject).to receive(:say)
- end
-
- describe '#rename_child_paths' do
- it_behaves_like 'renames child namespaces'
- end
-
- describe '#rename_wildcard_paths' do
- it_behaves_like 'renames child namespaces'
-
- it 'renames projects' do
- rename_projects = double
- expect(described_class::RenameProjects)
- .to receive(:new).with(['the-path'], subject)
- .and_return(rename_projects)
-
- expect(rename_projects).to receive(:rename_projects)
-
- subject.rename_wildcard_paths(['the-path'])
- end
- end
-
- describe '#rename_root_paths' do
- it 'renames namespaces' do
- rename_namespaces = double
- expect(described_class::RenameNamespaces)
- .to receive(:new).with(['the-path'], subject)
- .and_return(rename_namespaces)
- expect(rename_namespaces).to receive(:rename_namespaces)
- .with(type: :top_level)
-
- subject.rename_root_paths('the-path')
- end
- end
-
- describe '#revert_renames' do
- it 'renames namespaces' do
- rename_namespaces = double
- expect(described_class::RenameNamespaces)
- .to receive(:new).with([], subject)
- .and_return(rename_namespaces)
- expect(rename_namespaces).to receive(:revert_renames)
-
- subject.revert_renames
- end
-
- it 'renames projects' do
- rename_projects = double
- expect(described_class::RenameProjects)
- .to receive(:new).with([], subject)
- .and_return(rename_projects)
- expect(rename_projects).to receive(:revert_renames)
-
- subject.revert_renames
- end
- end
-end
diff --git a/spec/lib/gitlab/database_importers/work_items/related_links_restrictions_importer_spec.rb b/spec/lib/gitlab/database_importers/work_items/related_links_restrictions_importer_spec.rb
new file mode 100644
index 00000000000..39d02922acc
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/work_items/related_links_restrictions_importer_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter,
+ feature_category: :portfolio_management do
+ subject { described_class.upsert_restrictions }
+
+ it_behaves_like 'work item related links restrictions importer'
+end
diff --git a/spec/lib/gitlab/deploy_key_access_spec.rb b/spec/lib/gitlab/deploy_key_access_spec.rb
index e32858cc13f..0a85fc5d967 100644
--- a/spec/lib/gitlab/deploy_key_access_spec.rb
+++ b/spec/lib/gitlab/deploy_key_access_spec.rb
@@ -23,16 +23,6 @@ RSpec.describe Gitlab::DeployKeyAccess, feature_category: :source_code_managemen
it 'returns false' do
expect(access.can_create_tag?('v0.1.2')).to be_falsey
end
-
- context 'when deploy_key_for_protected_tags FF is disabled' do
- before do
- stub_feature_flags(deploy_key_for_protected_tags: false)
- end
-
- it 'allows to push the tag' do
- expect(access.can_create_tag?('v0.1.2')).to be_truthy
- end
- end
end
context 'push tag that matches a protected tag pattern via a deploy key' do
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index 4aa4f160fc9..059058c5499 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -116,5 +116,71 @@ RSpec.describe Gitlab::Diff::PositionTracer do
expect(diff_refs.head_sha).to eq(new_diff_refs.head_sha)
end
end
+
+ describe 'when requesting diffs' do
+ shared_examples 'it does not call diff stats' do
+ it 'does not call diff stats' do
+ expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |instance|
+ expect(instance).not_to receive(:diff_stats)
+ end
+
+ diff_files
+ end
+ end
+
+ shared_examples 'it calls diff stats' do
+ it 'calls diff stats' do
+ expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |instance|
+ expect(instance).to receive(:diff_stats).and_call_original
+ end
+
+ diff_files
+ end
+ end
+
+ context 'when remove_request_stats_for_tracing is true' do
+ context 'ac diffs' do
+ let(:diff_files) { subject.ac_diffs.diff_files }
+
+ it_behaves_like 'it does not call diff stats'
+ end
+
+ context 'bd diffs' do
+ let(:diff_files) { subject.bd_diffs.diff_files }
+
+ it_behaves_like 'it does not call diff stats'
+ end
+
+ context 'cd diffs' do
+ let(:diff_files) { subject.cd_diffs.diff_files }
+
+ it_behaves_like 'it does not call diff stats'
+ end
+ end
+
+ context 'when remove_request_stats_for_tracing is false' do
+ before do
+ stub_feature_flags(remove_request_stats_for_tracing: false)
+ end
+
+ context 'ac diffs' do
+ let(:diff_files) { subject.ac_diffs.diff_files }
+
+ it_behaves_like 'it calls diff stats'
+ end
+
+ context 'bd diffs' do
+ let(:diff_files) { subject.bd_diffs.diff_files }
+
+ it_behaves_like 'it calls diff stats'
+ end
+
+ context 'cd diffs' do
+ let(:diff_files) { subject.cd_diffs.diff_files }
+
+ it_behaves_like 'it calls diff stats'
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/doctor/reset_tokens_spec.rb b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
new file mode 100644
index 00000000000..0cc947efdb4
--- /dev/null
+++ b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Doctor::ResetTokens, feature_category: :runner_fleet do
+ let(:logger) { instance_double('Logger') }
+ let(:model_names) { %w[Project Group] }
+ let(:token_names) { %w[runners_token] }
+ let(:dry_run) { false }
+ let(:doctor) { described_class.new(logger, model_names: model_names, token_names: token_names, dry_run: dry_run) }
+
+ let_it_be(:functional_project) { create(:project).tap(&:runners_token) }
+ let_it_be(:functional_group) { create(:group).tap(&:runners_token) }
+
+ let(:broken_project) { create(:project).tap { |project| project.update_columns(runners_token_encrypted: 'aaa') } }
+ let(:project_with_cipher_error) do
+ create(:project).tap do |project|
+ project.update_columns(
+ runners_token_encrypted: '|rXs75DSHXPE9MGAIgyxcut8pZc72gaa/2ojU0GS1+R+cXNqkbUB13Vb5BaMwf47d98980fc1')
+ end
+ end
+
+ let(:broken_group) { create(:group, runners_token_encrypted: 'aaa') }
+
+ subject(:run!) do
+ expect(logger).to receive(:info).with(
+ "Resetting #{token_names.join(', ')} on #{model_names.join(', ')} if they can not be read"
+ )
+ expect(logger).to receive(:info).with('Done!')
+ doctor.run!
+ end
+
+ before do
+ allow(logger).to receive(:info).with(%r{Checked \d/\d Projects})
+ allow(logger).to receive(:info).with(%r{Checked \d Projects})
+ allow(logger).to receive(:info).with(%r{Checked \d/\d Groups})
+ allow(logger).to receive(:info).with(%r{Checked \d Groups})
+ end
+
+ it 'fixes broken project and not the functional project' do
+ expect(logger).to receive(:debug).with("> Fix Project[#{broken_project.id}].runners_token")
+
+ expect { run! }.to change { broken_project.reload.runners_token_encrypted }.from('aaa')
+ .and not_change { functional_project.reload.runners_token_encrypted }
+ expect { broken_project.runners_token }.not_to raise_error
+ end
+
+ it 'fixes project with cipher error' do
+ expect { project_with_cipher_error.runners_token }.to raise_error(OpenSSL::Cipher::CipherError)
+ expect(logger).to receive(:debug).with("> Fix Project[#{project_with_cipher_error.id}].runners_token")
+
+ expect { run! }.to change { project_with_cipher_error.reload.runners_token_encrypted }
+ expect { project_with_cipher_error.runners_token }.not_to raise_error
+ end
+
+ it 'fixes broken group and not the functional group' do
+ expect(logger).to receive(:debug).with("> Fix Group[#{broken_group.id}].runners_token")
+
+ expect { run! }.to change { broken_group.reload.runners_token_encrypted }.from('aaa')
+ .and not_change { functional_group.reload.runners_token_encrypted }
+
+ expect { broken_group.runners_token }.not_to raise_error
+ end
+
+ context 'when one model specified' do
+ let(:model_names) { %w[Project] }
+
+ it 'fixes broken project' do
+ expect(logger).to receive(:debug).with("> Fix Project[#{broken_project.id}].runners_token")
+
+ expect { run! }.to change { broken_project.reload.runners_token_encrypted }.from('aaa')
+ expect { broken_project.runners_token }.not_to raise_error
+ end
+
+ it 'does not fix other models' do
+ expect { run! }.not_to change { broken_group.reload.runners_token_encrypted }.from('aaa')
+ end
+ end
+
+ context 'when non-existing token field is given' do
+ let(:token_names) { %w[nonexisting_token] }
+
+ it 'does not fix anything' do
+ expect { run! }.not_to change { broken_project.reload.runners_token_encrypted }.from('aaa')
+ end
+ end
+
+ context 'when executing in a dry-run mode' do
+ let(:dry_run) { true }
+
+ it 'prints info about fixed project, but does not actually do anything' do
+ expect(logger).to receive(:info).with('Executing in DRY RUN mode, no records will actually be updated')
+ expect(logger).to receive(:debug).with("> Fix Project[#{broken_project.id}].runners_token")
+
+ expect { run! }.not_to change { broken_project.reload.runners_token_encrypted }.from('aaa')
+ expect { broken_project.runners_token }.to raise_error(TypeError)
+ end
+ end
+
+ it 'prints progress along the way' do
+ stub_const('Gitlab::Doctor::ResetTokens::PRINT_PROGRESS_EVERY', 1)
+
+ broken_project
+ project_with_cipher_error
+
+ expect(logger).to receive(:info).with(
+ "Resetting #{token_names.join(', ')} on #{model_names.join(', ')} if they can not be read"
+ )
+ expect(logger).to receive(:info).with('Checked 1/3 Projects')
+ expect(logger).to receive(:debug).with("> Fix Project[#{broken_project.id}].runners_token")
+ expect(logger).to receive(:info).with('Checked 2/3 Projects')
+ expect(logger).to receive(:debug).with("> Fix Project[#{project_with_cipher_error.id}].runners_token")
+ expect(logger).to receive(:info).with('Checked 3/3 Projects')
+ expect(logger).to receive(:info).with('Done!')
+
+ doctor.run!
+ end
+
+ it "prints 'Something went wrong' error when encounters unexpected exception, but continues" do
+ broken_project
+ project_with_cipher_error
+
+ expect(logger).to receive(:debug).with(
+ "> Something went wrong for Project[#{broken_project.id}].runners_token: Error message")
+ expect(logger).to receive(:debug).with("> Fix Project[#{project_with_cipher_error.id}].runners_token")
+
+ expect(broken_project).to receive(:runners_token).and_raise("Error message")
+ expect(Project).to receive(:find_each).and_return([broken_project, project_with_cipher_error].each)
+
+ expect { run! }.to not_change { broken_project.reload.runners_token_encrypted }.from('aaa')
+ .and change { project_with_cipher_error.reload.runners_token_encrypted }
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 6941ebd2e11..e6fff939632 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -321,7 +321,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
- context 'when using custom service desk address' do
+ context 'when using additional service desk alias address' do
let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) }
before do
@@ -587,6 +587,16 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
+ context 'when there is no to address' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:to_address).and_return(nil)
+ end
+ end
+
+ it_behaves_like 'a new issue request'
+ end
+
context 'when there is no from address' do
before do
allow_next_instance_of(described_class) do |instance|
diff --git a/spec/lib/gitlab/email/message/build_ios_app_guide_spec.rb b/spec/lib/gitlab/email/message/build_ios_app_guide_spec.rb
deleted file mode 100644
index 4b77b2f7192..00000000000
--- a/spec/lib/gitlab/email/message/build_ios_app_guide_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::BuildIosAppGuide, :saas do
- subject(:message) { described_class.new }
-
- it 'contains the correct message', :aggregate_failures do
- expect(message.subject_line).to eq 'Get set up to build for iOS'
- expect(message.title).to eq "Building for iOS? We've got you covered."
- expect(message.body_line1).to eq "Want to get your iOS app up and running, including " \
- "publishing all the way to TestFlight? Follow our guide to set up GitLab and fastlane to publish iOS apps to " \
- "the App Store."
- expect(message.cta_text).to eq 'Learn how to build for iOS'
- expect(message.cta2_text).to eq 'Watch iOS building in action.'
- expect(message.logo_path).to eq 'mailers/in_product_marketing/create-0.png'
- expect(message.unsubscribe).to include('%tag_unsubscribe_url%')
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/helper_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/helper_spec.rb
deleted file mode 100644
index a3c2d1b428e..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/helper_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Helper do
- describe 'unsubscribe_message' do
- include Gitlab::Routing
-
- let(:dummy_class_with_helper) do
- Class.new do
- include Gitlab::Email::Message::InProductMarketing::Helper
- include Gitlab::Routing
-
- def initialize(format = :html)
- @format = format
- end
-
- def default_url_options
- {}
- end
-
- attr_accessor :format
- end
- end
-
- let(:format) { :html }
-
- subject(:class_with_helper) { dummy_class_with_helper.new(format) }
-
- context 'for SaaS', :saas do
- context 'format is HTML' do
- it 'returns the correct HTML' do
- message = "If you no longer wish to receive marketing emails from us, " \
- "you may <a href=\"%tag_unsubscribe_url%\">unsubscribe</a> at any time."
- expect(class_with_helper.unsubscribe_message).to match message
- end
- end
-
- context 'format is text' do
- let(:format) { :text }
-
- it 'returns the correct string' do
- message = "If you no longer wish to receive marketing emails from us, " \
- "you may unsubscribe (%tag_unsubscribe_url%) at any time."
- expect(class_with_helper.unsubscribe_message.squish).to match message
- end
- end
- end
-
- context 'self-managed' do
- context 'format is HTML' do
- it 'returns the correct HTML' do
- preferences_link = "http://example.com/preferences"
- message = "To opt out of these onboarding emails, " \
- "<a href=\"#{profile_notifications_url}\">unsubscribe</a>. " \
- "If you don't want to receive marketing emails directly from GitLab, #{preferences_link}."
- expect(class_with_helper.unsubscribe_message(preferences_link))
- .to match message
- end
- end
-
- context 'format is text' do
- let(:format) { :text }
-
- it 'returns the correct string' do
- preferences_link = "http://example.com/preferences"
- message = "To opt out of these onboarding emails, " \
- "unsubscribe (#{profile_notifications_url}). " \
- "If you don't want to receive marketing emails directly from GitLab, #{preferences_link}."
- expect(class_with_helper.unsubscribe_message(preferences_link).squish).to match message
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index ee836fc2129..f8084d24850 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::Email::Receiver do
metadata = receiver.mail_metadata
- expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients))
+ expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients cc_address))
expect(metadata[:meta]).to include(client_id: client_id, project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
@@ -112,6 +112,24 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'successful receive'
end
end
+
+ context 'when in a Cc header' do
+ let(:email_raw) do
+ <<~EMAIL
+ From: jake@example.com
+ To: to@example.com
+ Cc: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+ Subject: Issue titile
+
+ Issue description
+ EMAIL
+ end
+
+ let(:meta_key) { :cc_address }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
+
+ it_behaves_like 'successful receive'
+ end
end
context 'when we cannot find a capable handler' do
diff --git a/spec/lib/gitlab/email/service_desk_receiver_spec.rb b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
index c249a5422ff..4b67020471a 100644
--- a/spec/lib/gitlab/email/service_desk_receiver_spec.rb
+++ b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
@@ -7,6 +7,12 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
let(:receiver) { described_class.new(email) }
context 'when the email contains a valid email address' do
+ shared_examples 'received successfully' do
+ it 'finds the service desk key' do
+ expect { receiver.execute }.not_to raise_error
+ end
+ end
+
before do
stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
@@ -21,34 +27,41 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
end
context 'when in a To header' do
- it 'finds the service desk key' do
- receiver.execute
- end
+ it_behaves_like 'received successfully'
end
context 'when the email contains a valid email address in a header' do
context 'when in a Delivered-To header' do
let(:email) { fixture_file('emails/service_desk_custom_address_reply.eml') }
- it 'finds the service desk key' do
- receiver.execute
- end
+ it_behaves_like 'received successfully'
end
context 'when in a Envelope-To header' do
let(:email) { fixture_file('emails/service_desk_custom_address_envelope_to.eml') }
- it 'finds the service desk key' do
- receiver.execute
- end
+ it_behaves_like 'received successfully'
end
context 'when in a X-Envelope-To header' do
let(:email) { fixture_file('emails/service_desk_custom_address_x_envelope_to.eml') }
- it 'finds the service desk key' do
- receiver.execute
+ it_behaves_like 'received successfully'
+ end
+
+ context 'when in a Cc header' do
+ let(:email) do
+ <<~EMAIL
+ From: from@example.com
+ To: to@example.com
+ Cc: support+project_slug-project_key@example.com
+ Subject: Issue titile
+
+ Issue description
+ EMAIL
end
+
+ it_behaves_like 'received successfully'
end
end
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index bc72d1a67d6..1b7c11dfef6 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::EncodingHelper do
+RSpec.describe Gitlab::EncodingHelper, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
let(:ext_class) { Class.new { extend Gitlab::EncodingHelper } }
@@ -291,4 +291,39 @@ RSpec.describe Gitlab::EncodingHelper do
expect(described_class.strip_bom("BOM at the end\xEF\xBB\xBF")).to eq("BOM at the end\xEF\xBB\xBF")
end
end
+
+ # This cop's alternative to .dup doesn't work in this context for some reason.
+ # rubocop: disable Performance/UnfreezeString
+ describe "#force_encode_utf8" do
+ let(:stringish) do
+ Class.new(String) do
+ undef :force_encoding
+ end
+ end
+
+ it "raises an ArgumentError if the argument can't force encoding" do
+ expect { described_class.force_encode_utf8(stringish.new("foo")) }.to raise_error(ArgumentError)
+ end
+
+ it "returns the message if already UTF-8 and valid encoding" do
+ string = "føø".dup
+
+ expect(string).not_to receive(:force_encoding).and_call_original
+ expect(described_class.force_encode_utf8(string)).to eq("føø")
+ end
+
+ it "forcibly encodes a string to UTF-8" do
+ string = "føø".dup.force_encoding("ISO-8859-1")
+
+ expect(string).to receive(:force_encoding).with("UTF-8").and_call_original
+ expect(described_class.force_encode_utf8(string)).to eq("føø")
+ end
+
+ it "forcibly encodes a frozen string to UTF-8" do
+ string = "bår".dup.force_encoding("ISO-8859-1").freeze
+
+ expect(described_class.force_encode_utf8(string)).to eq("bår")
+ end
+ end
+ # rubocop: enable Performance/UnfreezeString
end
diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb
index c8325c5b359..80154c729e3 100644
--- a/spec/lib/gitlab/exclusive_lease_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_shared_state,
+RSpec.describe Gitlab::ExclusiveLease, :request_store,
:clean_gitlab_redis_cluster_shared_state, feature_category: :shared do
let(:unique_key) { SecureRandom.hex(10) }
@@ -20,67 +20,6 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_share
sleep(2 * timeout) # lease should have expired now
expect(lease.try_obtain).to be_present
end
-
- context 'when migrating across stores' do
- let(:lease) { described_class.new(unique_key, timeout: 3600) }
-
- before do
- stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
- allow(lease).to receive(:same_store).and_return(false)
- end
-
- it 'acquires 2 locks' do
- # stub first SETNX
- Gitlab::Redis::SharedState.with { |r| expect(r).to receive(:set).and_return(true) }
- Gitlab::Redis::ClusterSharedState.with { |r| expect(r).to receive(:set).and_call_original }
-
- expect(lease.try_obtain).to be_truthy
- end
-
- it 'rollback first lock if second lock is not acquired' do
- Gitlab::Redis::ClusterSharedState.with do |r|
- expect(r).to receive(:set).and_return(false)
- expect(r).to receive(:eval).and_call_original
- end
-
- Gitlab::Redis::SharedState.with do |r|
- expect(r).to receive(:set).and_call_original
- expect(r).to receive(:eval).and_call_original
- end
-
- expect(lease.try_obtain).to be_falsey
- end
- end
-
- context 'when cutting over to ClusterSharedState' do
- context 'when lock is not acquired' do
- it 'waits for existing holder to yield the lock' do
- Gitlab::Redis::ClusterSharedState.with { |r| expect(r).to receive(:set).and_call_original }
- Gitlab::Redis::SharedState.with { |r| expect(r).not_to receive(:set) }
-
- lease = described_class.new(unique_key, timeout: 3600)
- expect(lease.try_obtain).to be_truthy
- end
- end
-
- context 'when lock is still acquired' do
- let(:lease) { described_class.new(unique_key, timeout: 3600) }
-
- before do
- # simulates cutover where some application's feature-flag has not updated
- stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
- lease.try_obtain
- stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: true)
- end
-
- it 'waits for existing holder to yield the lock' do
- Gitlab::Redis::ClusterSharedState.with { |r| expect(r).not_to receive(:set) }
- Gitlab::Redis::SharedState.with { |r| expect(r).not_to receive(:set) }
-
- expect(lease.try_obtain).to be_falsey
- end
- end
- end
end
describe '.redis_shared_state_key' do
@@ -104,159 +43,131 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_share
end
end
- shared_examples 'write operations' do
- describe '#renew' do
- it 'returns true when we have the existing lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- expect(lease.try_obtain).to be_present
- expect(lease.renew).to be_truthy
- end
+ describe '#renew' do
+ it 'returns true when we have the existing lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ expect(lease.try_obtain).to be_present
+ expect(lease.renew).to be_truthy
+ end
- it 'returns false when we dont have a lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- expect(lease.renew).to be_falsey
- end
+ it 'returns false when we dont have a lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ expect(lease.renew).to be_falsey
end
+ end
- describe 'cancellation' do
- def new_lease(key)
- described_class.new(key, timeout: 3600)
- end
+ describe 'cancellation' do
+ def new_lease(key)
+ described_class.new(key, timeout: 3600)
+ end
- shared_examples 'cancelling a lease' do
- let(:lease) { new_lease(unique_key) }
+ shared_examples 'cancelling a lease' do
+ let(:lease) { new_lease(unique_key) }
- it 'releases the held lease' do
- uuid = lease.try_obtain
- expect(uuid).to be_present
- expect(new_lease(unique_key).try_obtain).to eq(false)
+ it 'releases the held lease' do
+ uuid = lease.try_obtain
+ expect(uuid).to be_present
+ expect(new_lease(unique_key).try_obtain).to eq(false)
- cancel_lease(uuid)
+ cancel_lease(uuid)
- expect(new_lease(unique_key).try_obtain).to be_present
- end
+ expect(new_lease(unique_key).try_obtain).to be_present
end
+ end
- describe '.cancel' do
- def cancel_lease(uuid)
- described_class.cancel(release_key, uuid)
- end
+ describe '.cancel' do
+ def cancel_lease(uuid)
+ described_class.cancel(release_key, uuid)
+ end
- context 'when called with the unprefixed key' do
- it_behaves_like 'cancelling a lease' do
- let(:release_key) { unique_key }
- end
+ context 'when called with the unprefixed key' do
+ it_behaves_like 'cancelling a lease' do
+ let(:release_key) { unique_key }
end
+ end
- context 'when called with the prefixed key' do
- it_behaves_like 'cancelling a lease' do
- let(:release_key) { described_class.redis_shared_state_key(unique_key) }
- end
+ context 'when called with the prefixed key' do
+ it_behaves_like 'cancelling a lease' do
+ let(:release_key) { described_class.redis_shared_state_key(unique_key) }
end
+ end
- it 'does not raise errors when given a nil key' do
- expect { described_class.cancel(nil, nil) }.not_to raise_error
- end
+ it 'does not raise errors when given a nil key' do
+ expect { described_class.cancel(nil, nil) }.not_to raise_error
end
+ end
- describe '#cancel' do
- def cancel_lease(_uuid)
- lease.cancel
- end
+ describe '#cancel' do
+ def cancel_lease(_uuid)
+ lease.cancel
+ end
- it_behaves_like 'cancelling a lease'
+ it_behaves_like 'cancelling a lease'
- it 'is safe to call even if the lease was never obtained' do
- lease = new_lease(unique_key)
+ it 'is safe to call even if the lease was never obtained' do
+ lease = new_lease(unique_key)
- lease.cancel
+ lease.cancel
- expect(new_lease(unique_key).try_obtain).to be_present
- end
+ expect(new_lease(unique_key).try_obtain).to be_present
end
end
+ end
- describe '.reset_all!' do
- it 'removes all existing lease keys from redis' do
- uuid = described_class.new(unique_key, timeout: 3600).try_obtain
+ describe '.reset_all!' do
+ it 'removes all existing lease keys from redis' do
+ uuid = described_class.new(unique_key, timeout: 3600).try_obtain
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
- described_class.reset_all!
+ described_class.reset_all!
- expect(described_class.get_uuid(unique_key)).to be_falsey
- end
+ expect(described_class.get_uuid(unique_key)).to be_falsey
end
end
- shared_examples 'read operations' do
- describe '#exists?' do
- it 'returns true for an existing lease' do
- lease = described_class.new(unique_key, timeout: 3600)
- lease.try_obtain
-
- expect(lease.exists?).to eq(true)
- end
-
- it 'returns false for a lease that does not exist' do
- lease = described_class.new(unique_key, timeout: 3600)
+ describe '#exists?' do
+ it 'returns true for an existing lease' do
+ lease = described_class.new(unique_key, timeout: 3600)
+ lease.try_obtain
- expect(lease.exists?).to eq(false)
- end
+ expect(lease.exists?).to eq(true)
end
- describe '.get_uuid' do
- it 'gets the uuid if lease with the key associated exists' do
- uuid = described_class.new(unique_key, timeout: 3600).try_obtain
-
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
- end
+ it 'returns false for a lease that does not exist' do
+ lease = described_class.new(unique_key, timeout: 3600)
- it 'returns false if the lease does not exist' do
- expect(described_class.get_uuid(unique_key)).to be false
- end
+ expect(lease.exists?).to eq(false)
end
+ end
- describe '#ttl' do
- it 'returns the TTL of the Redis key' do
- lease = described_class.new('kittens', timeout: 100)
- lease.try_obtain
-
- expect(lease.ttl <= 100).to eq(true)
- end
+ describe '.get_uuid' do
+ it 'gets the uuid if lease with the key associated exists' do
+ uuid = described_class.new(unique_key, timeout: 3600).try_obtain
- it 'returns nil when the lease does not exist' do
- lease = described_class.new('kittens', timeout: 10)
+ expect(described_class.get_uuid(unique_key)).to eq(uuid)
+ end
- expect(lease.ttl).to be_nil
- end
+ it 'returns false if the lease does not exist' do
+ expect(described_class.get_uuid(unique_key)).to be false
end
end
- context 'when migrating across stores' do
- before do
- stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
+ describe '#ttl' do
+ it 'returns the TTL of the Redis key' do
+ lease = described_class.new('kittens', timeout: 100)
+ lease.try_obtain
+
+ expect(lease.ttl <= 100).to eq(true)
end
- it_behaves_like 'read operations'
- it_behaves_like 'write operations'
- end
+ it 'returns nil when the lease does not exist' do
+ lease = described_class.new('kittens', timeout: 10)
- context 'when feature flags are all disabled' do
- before do
- stub_feature_flags(
- use_cluster_shared_state_for_exclusive_lease: false,
- enable_exclusive_lease_double_lock_rw: false
- )
+ expect(lease.ttl).to be_nil
end
-
- it_behaves_like 'read operations'
- it_behaves_like 'write operations'
end
- it_behaves_like 'read operations'
- it_behaves_like 'write operations'
-
describe '.throttle' do
it 'prevents repeated execution of the block' do
number = 0
@@ -310,8 +221,8 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_share
it 'allows count to be specified' do
expect(described_class)
.to receive(:new)
- .with(anything, hash_including(timeout: 15.minutes.to_i))
- .and_call_original
+ .with(anything, hash_including(timeout: 15.minutes.to_i))
+ .and_call_original
described_class.throttle(1, count: 4) {}
end
@@ -319,8 +230,8 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_share
it 'allows period to be specified' do
expect(described_class)
.to receive(:new)
- .with(anything, hash_including(timeout: 1.day.to_i))
- .and_call_original
+ .with(anything, hash_including(timeout: 1.day.to_i))
+ .and_call_original
described_class.throttle(1, period: 1.day) {}
end
@@ -328,80 +239,10 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store, :clean_gitlab_redis_share
it 'allows period and count to be specified' do
expect(described_class)
.to receive(:new)
- .with(anything, hash_including(timeout: 30.minutes.to_i))
- .and_call_original
+ .with(anything, hash_including(timeout: 30.minutes.to_i))
+ .and_call_original
described_class.throttle(1, count: 48, period: 1.day) {}
end
end
-
- describe 'transitions between feature-flag toggles' do
- shared_examples 'retains behaviours across transitions' do |flag|
- it 'retains read behaviour' do
- lease = described_class.new(unique_key, timeout: 3600)
- uuid = lease.try_obtain
-
- expect(lease.ttl).not_to eq(nil)
- expect(lease.exists?).to be_truthy
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
-
- # simulates transition
- stub_feature_flags({ flag => true })
- Gitlab::SafeRequestStore.clear!
-
- expect(lease.ttl).not_to eq(nil)
- expect(lease.exists?).to be_truthy
- expect(described_class.get_uuid(unique_key)).to eq(uuid)
- end
-
- it 'retains renew behaviour' do
- lease = described_class.new(unique_key, timeout: 3600)
- lease.try_obtain
-
- expect(lease.renew).to be_truthy
-
- # simulates transition
- stub_feature_flags({ flag => true })
- Gitlab::SafeRequestStore.clear!
-
- expect(lease.renew).to be_truthy
- end
-
- it 'retains renew behaviour' do
- lease = described_class.new(unique_key, timeout: 3600)
- uuid = lease.try_obtain
- lease.cancel
-
- # proves successful cancellation
- expect(lease.try_obtain).to eq(uuid)
-
- # simulates transition
- stub_feature_flags({ flag => true })
- Gitlab::SafeRequestStore.clear!
-
- expect(lease.try_obtain).to be_falsey
- lease.cancel
- expect(lease.try_obtain).to eq(uuid)
- end
- end
-
- context 'when enabling enable_exclusive_lease_double_lock_rw' do
- before do
- stub_feature_flags(
- enable_exclusive_lease_double_lock_rw: false,
- use_cluster_shared_state_for_exclusive_lease: false
- )
- end
-
- it_behaves_like 'retains behaviours across transitions', :enable_exclusive_lease_double_lock_rw
- end
-
- context 'when enabling use_cluster_shared_state_for_exclusive_lease' do
- before do
- stub_feature_flags(use_cluster_shared_state_for_exclusive_lease: false)
- end
-
- it_behaves_like 'retains behaviours across transitions', :use_cluster_shared_state_for_exclusive_lease
- end
- end
end
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index cd46e7b3386..6d01b7a175f 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -2,16 +2,15 @@
require 'spec_helper'
-RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
+RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_category: :acquisition do
subject { described_class.new(subject_experiment) }
let(:subject_experiment) { experiment('namespaced/stub') }
- describe "#enabled?" do
+ describe "#enabled?", :saas do
before do
stub_feature_flags(gitlab_experiment: true)
allow(subject).to receive(:feature_flag_defined?).and_return(true)
- allow(Gitlab).to receive(:com?).and_return(true)
allow(subject).to receive(:feature_flag_instance).and_return(double(state: :on))
end
@@ -45,6 +44,18 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
end
describe "#execute_assignment" do
+ let(:variants) do
+ ->(e) do
+ # rubocop:disable Lint/EmptyBlock
+ e.control {}
+ e.variant(:red) {}
+ e.variant(:blue) {}
+ # rubocop:enable Lint/EmptyBlock
+ end
+ end
+
+ let(:subject_experiment) { experiment('namespaced/stub', &variants) }
+
before do
allow(Feature).to receive(:enabled?).with('namespaced_stub', any_args).and_return(true)
end
@@ -60,9 +71,82 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
end
it "returns an assigned name" do
- allow(subject).to receive(:behavior_names).and_return([:variant1, :variant2])
+ expect(subject.execute_assignment).to eq(:blue)
+ end
+
+ context "when there are no behaviors" do
+ let(:variants) { ->(e) { e.control {} } } # rubocop:disable Lint/EmptyBlock
+
+ it "does not raise an error" do
+ expect { subject.execute_assignment }.not_to raise_error
+ end
+ end
+
+ context "for even rollout to non-control", :saas do
+ let(:counts) { Hash.new(0) }
+ let(:subject_experiment) { experiment('namespaced/stub') }
+
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:enabled?).and_return(true)
+ end
+
+ subject_experiment.variant(:variant1) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant2) {} # rubocop:disable Lint/EmptyBlock
+ end
+
+ it "rolls out relatively evenly to 2 behaviors" do
+ 100.times { |i| run_cycle(subject_experiment, value: i) }
+
+ expect(counts).to eq(variant1: 54, variant2: 46)
+ end
+
+ it "rolls out relatively evenly to 3 behaviors" do
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+
+ 100.times { |i| run_cycle(subject_experiment, value: i) }
+
+ expect(counts).to eq(variant1: 31, variant2: 29, variant3: 40)
+ end
+
+ context "when distribution is specified as an array" do
+ before do
+ subject_experiment.rollout(described_class, distribution: [32, 25, 43])
+ end
+
+ it "rolls out with the expected distribution" do
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+
+ 100.times { |i| run_cycle(subject_experiment, value: i) }
+
+ expect(counts).to eq(variant1: 39, variant2: 24, variant3: 37)
+ end
+ end
+
+ context "when distribution is specified as a hash" do
+ before do
+ subject_experiment.rollout(described_class, distribution: { variant1: 90, variant2: 10 })
+ end
+
+ it "rolls out with the expected distribution" do
+ 100.times { |i| run_cycle(subject_experiment, value: i) }
+
+ expect(counts).to eq(variant1: 95, variant2: 5)
+ end
+ end
+
+ def run_cycle(experiment, **context)
+ experiment.instance_variable_set(:@_assigned_variant_name, nil)
+ experiment.context(context) if context
+
+ begin
+ experiment.cache.delete
+ rescue StandardError
+ nil
+ end
- expect(subject.execute_assignment).to eq(:variant2)
+ counts[experiment.assigned.name] += 1
+ end
end
end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 4d78e194da8..6b3630d7a1f 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Diff do
+RSpec.describe Gitlab::Git::Diff, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository }
@@ -336,6 +336,121 @@ EOT
end
end
+ describe '#unidiff' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:repository) { project.repository }
+ let_it_be(:user) { project.first_owner }
+
+ let(:commits) { repository.commits('master', limit: 10) }
+ let(:diffs) { commits.map(&:diffs).map(&:diffs).flat_map(&:to_a).reverse }
+
+ before_all do
+ create_commit(
+ project,
+ user,
+ commit_message: "Create file",
+ actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Update file",
+ actions: [{ action: 'update', content: 'foo2', file_path: 'a.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Rename file without change",
+ actions: [{ action: 'move', previous_path: 'a.txt', file_path: 'b.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Rename file with change",
+ actions: [{ action: 'move', content: 'foo3', previous_path: 'b.txt', file_path: 'c.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Delete file",
+ actions: [{ action: 'delete', file_path: 'c.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Create empty file",
+ actions: [{ action: 'create', file_path: 'empty.txt' }]
+ )
+
+ create_commit(
+ project,
+ user,
+ commit_message: "Create binary file",
+ actions: [{ action: 'create', content: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAApJREFUCNdjYAAAAAIAAeIhvDMAAAAASUVORK5CYII=', file_path: 'test%2Ebin', encoding: 'base64' }]
+ )
+ end
+
+ context 'when file was created' do
+ it 'returns a correct header' do
+ diff = diffs[0]
+
+ expect(diff.unidiff).to start_with("--- /dev/null\n+++ b/a.txt\n")
+ end
+ end
+
+ context 'when file was changed' do
+ it 'returns a correct header' do
+ diff = diffs[1]
+
+ expect(diff.unidiff).to start_with("--- a/a.txt\n+++ b/a.txt\n")
+ end
+ end
+
+ context 'when file was moved without content change' do
+ it 'returns an empty header' do
+ diff = diffs[2]
+
+ expect(diff.unidiff).to eq('')
+ end
+ end
+
+ context 'when file was moved with content change' do
+ it 'returns a correct header' do
+ expect(diffs[3].unidiff).to start_with("--- /dev/null\n+++ b/c.txt\n")
+ expect(diffs[4].unidiff).to start_with("--- a/b.txt\n+++ /dev/null\n")
+ end
+ end
+
+ context 'when file was deleted' do
+ it 'returns a correct header' do
+ diff = diffs[5]
+
+ expect(diff.unidiff).to start_with("--- a/c.txt\n+++ /dev/null\n")
+ end
+ end
+
+ context 'when empty file was created' do
+ it 'returns an empty header' do
+ diff = diffs[6]
+
+ expect(diff.unidiff).to eq('')
+ end
+ end
+
+ context 'when file is binary' do
+ it 'returns a binary files message' do
+ diff = diffs[7]
+
+ expect(diff.unidiff).to eq("Binary files /dev/null and b/test%2Ebin differ\n")
+ end
+ end
+ end
+
describe '#submodule?' do
let(:gitaly_submodule_diff) do
Gitlab::GitalyClient::Diff.new(
@@ -445,4 +560,9 @@ EOT
# rugged will not detect this as binary, but we can fake it
described_class.between(project.repository, 'add-pdf-text-binary', 'add-pdf-text-binary^').first
end
+
+ def create_commit(project, user, params)
+ params = { start_branch: 'master', branch_name: 'master' }.merge(params)
+ Files::MultiService.new(project, user, params).execute.fetch(:result)
+ end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 18a090a00be..47b5986cfd8 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -203,25 +203,6 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(metadata['CommitId']).to eq(expected_commit_id)
end
end
-
- context 'when resolve_ambiguous_archives is disabled' do
- before do
- stub_feature_flags(resolve_ambiguous_archives: false)
- end
-
- where(:ref, :expected_commit_id, :desc) do
- 'refs/heads/branch-merged' | ref(:branch_merged_commit_id) | 'when tag looks like a branch (difference!)'
- 'branch-merged' | ref(:branch_master_commit_id) | 'when tag has the same name as a branch'
- ref(:branch_merged_commit_id) | ref(:branch_merged_commit_id) | 'when tag looks like a commit id'
- 'v0.0.0' | ref(:branch_master_commit_id) | 'when tag looks like a normal tag'
- end
-
- with_them do
- it 'selects the correct commit' do
- expect(metadata['CommitId']).to eq(expected_commit_id)
- end
- end
- end
end
context 'when branch is ambiguous' do
@@ -241,25 +222,6 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(metadata['CommitId']).to eq(expected_commit_id)
end
end
-
- context 'when resolve_ambiguous_archives is disabled' do
- before do
- stub_feature_flags(resolve_ambiguous_archives: false)
- end
-
- where(:ref, :expected_commit_id, :desc) do
- 'refs/tags/v1.0.0' | ref(:tag_1_0_0_commit_id) | 'when branch looks like a tag (difference!)'
- 'v1.0.0' | ref(:tag_1_0_0_commit_id) | 'when branch has the same name as a tag'
- ref(:branch_merged_commit_id) | ref(:branch_merged_commit_id) | 'when branch looks like a commit id'
- 'just-a-normal-branch' | ref(:branch_master_commit_id) | 'when branch looks like a normal branch'
- end
-
- with_them do
- it 'selects the correct commit' do
- expect(metadata['CommitId']).to eq(expected_commit_id)
- end
- end
- end
end
context 'when ref is HEAD' do
@@ -2650,21 +2612,6 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
- describe '#rename' do
- let(:repository) { mutable_repository }
-
- it 'moves the repository' do
- checksum = repository.checksum
- new_relative_path = "rename_test/relative/path"
- renamed_repository = Gitlab::Git::Repository.new(repository.storage, new_relative_path, nil, nil)
-
- repository.rename(new_relative_path)
-
- expect(renamed_repository.checksum).to eq(checksum)
- expect(repository.exists?).to be false
- end
- end
-
describe '#remove' do
let(:repository) { mutable_repository }
@@ -2833,4 +2780,14 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
end
+
+ describe '#get_file_attributes' do
+ let(:rev) { 'master' }
+ let(:paths) { ['file.txt'] }
+ let(:attrs) { ['text'] }
+
+ it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RepositoryService, :get_file_attributes do
+ subject { repository.get_file_attributes(rev, paths, attrs) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index d320b9c4091..d5a0ab3d5e0 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -1,18 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'json'
-require 'tempfile'
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
- let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
-
- before_all do
- create_gitaly_metadata_file
- end
subject(:wrapper) do
klazz = Class.new do
@@ -24,11 +17,6 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
klazz.new
end
- before do
- allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_call_original
- Gitlab::GitalyClient.instance_variable_set(:@can_use_disk, {})
- end
-
describe '#execute_rugged_call', :request_store do
let(:args) { ['refs/heads/master', 1] }
@@ -46,83 +34,9 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
end
end
- context 'when feature flag is not persisted', stub_feature_flags: false do
- context 'when running puma with multiple threads' do
- before do
- allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
- end
-
- it 'returns false' do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be false
- end
- end
-
- context 'when skip_rugged_auto_detect feature flag is enabled' do
- context 'when not running puma with multiple threads' do
- before do
- allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
- stub_feature_flags(feature_flag_name => nil)
- stub_feature_flags(skip_rugged_auto_detect: true)
- end
-
- it 'returns false' do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be false
- end
- end
- end
-
- context 'when skip_rugged_auto_detect feature flag is disabled' do
- before do
- stub_feature_flags(skip_rugged_auto_detect: false)
- end
-
- context 'when not running puma with multiple threads' do
- before do
- allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
- end
-
- it 'returns true when gitaly matches disk' do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
- end
-
- it 'returns false when disk access fails' do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
-
- expect(subject.use_rugged?(repository, feature_flag_name)).to be false
- end
-
- it "returns false when gitaly doesn't match disk" do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
-
- expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
-
- File.delete(temp_gitaly_metadata_file)
- end
-
- it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
-
- expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
-
- subject.use_rugged?(repository, feature_flag_name)
- end
- end
- end
- end
-
- context 'when feature flag is persisted' do
- it 'returns false when the feature flag is off' do
- Feature.disable(feature_flag_name)
-
- expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
- end
-
- it "returns true when feature flag is on" do
- Feature.enable(feature_flag_name)
-
- allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(false)
-
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ describe '#use_rugged?' do
+ it 'returns false' do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
end
end
@@ -184,7 +98,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
context 'all features are enabled' do
let(:feature_keys) { [:feature_key_1, :feature_key_2] }
- it { is_expected.to be_truthy }
+ it { is_expected.to be_falsey }
end
context 'all features are not enabled' do
@@ -196,28 +110,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do
context 'some feature is enabled' do
let(:feature_keys) { [:feature_key_4, :feature_key_2] }
- it { is_expected.to be_truthy }
- end
- end
-
- def create_temporary_gitaly_metadata_file
- tmp = Tempfile.new('.gitaly-metadata')
- gitaly_metadata = {
- "gitaly_filesystem_id" => "some-value"
- }
- tmp.write(gitaly_metadata.to_json)
- tmp.flush
- tmp.close
- tmp.path
- end
-
- def create_gitaly_metadata_file
- metadata_filename = File.join(TestEnv.repos_path, '.gitaly-metadata')
- File.open(metadata_filename, 'w+') do |f|
- gitaly_metadata = {
- "gitaly_filesystem_id" => SecureRandom.uuid
- }
- f.write(gitaly_metadata.to_json)
+ it { is_expected.to be_falsey }
end
end
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 84ab8376fe1..9675e48a77f 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -2,11 +2,11 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Tree do
+RSpec.describe Gitlab::Git::Tree, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository.raw }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
shared_examples 'repo' do
subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, pagination_params) }
@@ -95,6 +95,8 @@ RSpec.describe Gitlab::Git::Tree do
end
context :flat_path do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
let(:filename) { 'files/flat/path/correct/content.txt' }
let(:path) { 'files/flat' }
# rubocop: disable Rails/FindBy
@@ -192,9 +194,9 @@ RSpec.describe Gitlab::Git::Tree do
end
describe '.where with Rugged enabled', :enable_rugged do
- it 'calls out to the Rugged implementation' do
+ it 'does not call to the Rugged implementation' do
allow_next_instance_of(Rugged) do |instance|
- allow(instance).to receive(:lookup).with(SeedRepo::Commit::ID)
+ allow(instance).not_to receive(:lookup)
end
described_class.where(repository, SeedRepo::Commit::ID, 'files', false, false)
@@ -214,10 +216,10 @@ RSpec.describe Gitlab::Git::Tree do
context 'when limit is equal to number of entries' do
let(:entries_count) { entries.count }
- it 'returns all entries without a cursor' do
+ it 'returns all entries with a cursor' do
result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: entries_count, page_token: nil })
- expect(cursor).to be_nil
+ expect(cursor).to eq(Gitaly::PaginationCursor.new)
expect(result.entries.count).to eq(entries_count)
end
end
@@ -234,9 +236,9 @@ RSpec.describe Gitlab::Git::Tree do
context 'when limit is missing' do
let(:pagination_params) { { limit: nil, page_token: nil } }
- it 'returns empty result' do
- expect(entries).to eq([])
- expect(cursor).to be_nil
+ it 'returns all entries' do
+ expect(entries.count).to be < 20
+ expect(cursor).to eq(Gitaly::PaginationCursor.new)
end
end
@@ -247,7 +249,7 @@ RSpec.describe Gitlab::Git::Tree do
result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: -1, page_token: nil })
expect(result.count).to eq(entries_count)
- expect(cursor).to be_nil
+ expect(cursor).to eq(Gitaly::PaginationCursor.new)
end
context 'when token is provided' do
@@ -258,7 +260,7 @@ RSpec.describe Gitlab::Git::Tree do
result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, { limit: -1, page_token: token })
expect(result.count).to eq(entries.count - 2)
- expect(cursor).to be_nil
+ expect(cursor).to eq(Gitaly::PaginationCursor.new)
end
end
end
@@ -276,7 +278,7 @@ RSpec.describe Gitlab::Git::Tree do
it 'returns only available entries' do
expect(entries.count).to be < 20
- expect(cursor).to be_nil
+ expect(cursor).to eq(Gitaly::PaginationCursor.new)
end
end
diff --git a/spec/lib/gitlab/git_audit_event_spec.rb b/spec/lib/gitlab/git_audit_event_spec.rb
new file mode 100644
index 00000000000..c533b39f550
--- /dev/null
+++ b/spec/lib/gitlab/git_audit_event_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GitAuditEvent, feature_category: :source_code_management do
+ let_it_be(:player) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.new(player, project) }
+
+ describe '#send_audit_event' do
+ let(:msg) { 'valid_msg' }
+
+ context 'with successfully sending' do
+ let_it_be(:project) { create(:project, namespace: group) }
+
+ before do
+ allow(::Gitlab::Audit::Auditor).to receive(:audit)
+ end
+
+ context 'when player is a regular user' do
+ it 'sends git audit event' do
+ expect(::Gitlab::Audit::Auditor).to receive(:audit).with(a_hash_including(
+ name: 'repository_git_operation',
+ stream_only: true,
+ author: player,
+ scope: project,
+ target: project,
+ message: msg
+ )).once
+
+ subject.send_audit_event(msg)
+ end
+ end
+
+ context 'when player is ::API::Support::GitAccessActor' do
+ let_it_be(:user) { player }
+ let_it_be(:key) { create(:key, user: user) }
+ let_it_be(:git_access_actor) { ::API::Support::GitAccessActor.new(user: user, key: key) }
+
+ subject { described_class.new(git_access_actor, project) }
+
+ it 'sends git audit event' do
+ expect(::Gitlab::Audit::Auditor).to receive(:audit).with(a_hash_including(
+ name: 'repository_git_operation',
+ stream_only: true,
+ author: git_access_actor.deploy_key_or_user,
+ scope: project,
+ target: project,
+ message: msg
+ )).once
+
+ subject.send_audit_event(msg)
+ end
+ end
+ end
+
+ context 'when user is blank' do
+ let_it_be(:player) { nil }
+
+ it 'does not send git audit event' do
+ expect(::Gitlab::Audit::Auditor).not_to receive(:audit)
+
+ subject.send_audit_event(msg)
+ end
+ end
+
+ context 'when project is blank' do
+ let_it_be(:project) { nil }
+
+ it 'does not send git audit event' do
+ expect(::Gitlab::Audit::Auditor).not_to receive(:audit)
+
+ subject.send_audit_event(msg)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 8e0e4525729..283a9cb45dc 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -369,17 +369,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
end
end
- describe '#rename' do
- it 'sends a rename_repository message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:rename_repository)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(value: true))
-
- client.rename('some/new/path')
- end
- end
-
describe '#remove' do
it 'sends a remove_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
@@ -451,4 +440,19 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
client.object_pool
end
end
+
+ describe '#get_file_attributes' do
+ let(:rev) { 'master' }
+ let(:paths) { ['file.txt'] }
+ let(:attrs) { ['text'] }
+
+ it 'sends a get_file_attributes message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:get_file_attributes)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_call_original
+
+ expect(client.get_file_attributes(rev, paths, attrs)).to be_a Gitaly::GetFileAttributesResponse
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
index cbcd9b83c15..b098a151660 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
@@ -174,9 +174,9 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
.to receive(:validate!)
.with(url, ports: [80, 443], schemes: %w[http https git],
allow_localhost: true, allow_local_network: true)
- .and_raise(Gitlab::UrlBlocker::BlockedUrlError)
+ .and_raise(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -191,9 +191,9 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
.to receive(:validate!)
.with(url, ports: [80, 443], schemes: %w[http https git],
allow_localhost: false, allow_local_network: false)
- .and_raise(Gitlab::UrlBlocker::BlockedUrlError)
+ .and_raise(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
- expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
index 28fbd4d883f..6b4984ceaf2 100644
--- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -47,10 +47,9 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
.with(object)
.and_return(false)
- expect(Gitlab::Import::Logger)
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- import_type: :github,
project_id: 1,
importer: 'MyImporter',
message: '1 object_types fetched'
@@ -82,10 +81,9 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
.with(object)
.and_return(true)
- expect(Gitlab::Import::Logger)
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
- import_type: :github,
project_id: 1,
importer: 'MyImporter',
message: '0 object_types fetched'
@@ -145,14 +143,13 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
}
)
- expect(Gitlab::Import::Logger)
+ expect(Gitlab::GithubImport::Logger)
.to receive(:error)
.with(
- import_type: :github,
project_id: 1,
importer: 'MyImporter',
message: ['Title is invalid'],
- github_identifiers: { id: 12345, title: 'bug,bug', object_type: :object_type }
+ external_identifiers: { id: 12345, title: 'bug,bug', object_type: :object_type }
)
expect(Gitlab::GithubImport::ObjectCounter)
@@ -172,7 +169,7 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
expect(errors).not_to be_empty
expect(errors[0][:validation_errors].full_messages).to match_array(['Title is invalid'])
- expect(errors[0][:github_identifiers]).to eq({ id: 12345, title: 'bug,bug', object_type: :object_type })
+ expect(errors[0][:external_identifiers]).to eq({ id: 12345, title: 'bug,bug', object_type: :object_type })
end
end
end
@@ -182,11 +179,10 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
it 'bulk inserts rows into the database' do
rows = [{ title: 'Foo' }] * 10
- expect(Gitlab::Import::Logger)
+ expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.twice
.with(
- import_type: :github,
project_id: 1,
importer: 'MyImporter',
message: '5 object_types imported'
@@ -243,7 +239,7 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers
importer.bulk_insert_failures([{
validation_errors: error,
- github_identifiers: { id: 123456 }
+ external_identifiers: { id: 123456 }
}])
end
end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 4b0d61e3188..5f321a15de9 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -316,7 +316,7 @@ RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do
allow_retry
expect(client).to receive(:requests_remaining?).twice.and_return(true)
- expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+ expect(Gitlab::GithubImport::Logger).to receive(:info).with(hash_including(info_params)).once
expect(client.with_rate_limit(&block_to_rate_limit)).to eq({})
end
@@ -337,7 +337,7 @@ RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do
it 'retries on error and succeeds' do
allow_retry
- expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+ expect(Gitlab::GithubImport::Logger).to receive(:info).with(hash_including(info_params)).once
expect(client.with_rate_limit(&block_to_rate_limit)).to eq({})
end
@@ -723,7 +723,7 @@ RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do
it 'retries on error and succeeds' do
allow_retry(:post)
- expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+ expect(Gitlab::GithubImport::Logger).to receive(:info).with(hash_including(info_params)).once
expect(client.search_repos_by_name_graphql('test')).to eq({})
end
diff --git a/spec/lib/gitlab/github_import/clients/proxy_spec.rb b/spec/lib/gitlab/github_import/clients/proxy_spec.rb
index 7b2a8fa9d74..99fd98d2ed4 100644
--- a/spec/lib/gitlab/github_import/clients/proxy_spec.rb
+++ b/spec/lib/gitlab/github_import/clients/proxy_spec.rb
@@ -3,10 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Clients::Proxy, :manage, feature_category: :importers do
- subject(:client) { described_class.new(access_token, client_options) }
+ subject(:client) { described_class.new(access_token) }
let(:access_token) { 'test_token' }
- let(:client_options) { { foo: :bar } }
it { expect(client).to delegate_method(:each_object).to(:client) }
it { expect(client).to delegate_method(:user).to(:client) }
@@ -15,124 +14,67 @@ RSpec.describe Gitlab::GithubImport::Clients::Proxy, :manage, feature_category:
describe '#repos' do
let(:search_text) { 'search text' }
let(:pagination_options) { { limit: 10 } }
-
- context 'when remove_legacy_github_client FF is enabled' do
- let(:client_stub) { instance_double(Gitlab::GithubImport::Client) }
-
- let(:client_response) do
- {
- data: {
- search: {
- nodes: [{ name: 'foo' }, { name: 'bar' }],
- pageInfo: { startCursor: 'foo', endCursor: 'bar' },
- repositoryCount: 2
- }
+ let(:client_stub) { instance_double(Gitlab::GithubImport::Client) }
+ let(:client_response) do
+ {
+ data: {
+ search: {
+ nodes: [{ name: 'foo' }, { name: 'bar' }],
+ pageInfo: { startCursor: 'foo', endCursor: 'bar' },
+ repositoryCount: 2
}
}
- end
-
- it 'fetches repos with Gitlab::GithubImport::Client (GraphQL API)' do
- expect(Gitlab::GithubImport::Client)
- .to receive(:new).with(access_token).and_return(client_stub)
- expect(client_stub)
- .to receive(:search_repos_by_name_graphql)
- .with(search_text, pagination_options).and_return(client_response)
-
- expect(client.repos(search_text, pagination_options)).to eq(
- {
- repos: [{ name: 'foo' }, { name: 'bar' }],
- page_info: { startCursor: 'foo', endCursor: 'bar' },
- count: 2
- }
- )
- end
+ }
end
- context 'when remove_legacy_github_client FF is disabled' do
- let(:client_stub) { instance_double(Gitlab::LegacyGithubImport::Client) }
- let(:search_text) { nil }
-
- before do
- stub_feature_flags(remove_legacy_github_client: false)
- end
-
- it 'fetches repos with Gitlab::LegacyGithubImport::Client' do
- expect(Gitlab::LegacyGithubImport::Client)
- .to receive(:new).with(access_token, client_options).and_return(client_stub)
- expect(client_stub).to receive(:repos)
- .and_return([{ name: 'foo' }, { name: 'bar' }])
-
- expect(client.repos(search_text, pagination_options))
- .to eq({ repos: [{ name: 'foo' }, { name: 'bar' }] })
- end
-
- context 'with filter params' do
- let(:search_text) { 'fo' }
+ it 'fetches repos with Gitlab::GithubImport::Client (GraphQL API)' do
+ expect(Gitlab::GithubImport::Client)
+ .to receive(:new).with(access_token).and_return(client_stub)
+ expect(client_stub)
+ .to receive(:search_repos_by_name_graphql)
+ .with(search_text, pagination_options).and_return(client_response)
- it 'fetches repos with Gitlab::LegacyGithubImport::Client' do
- expect(Gitlab::LegacyGithubImport::Client)
- .to receive(:new).with(access_token, client_options).and_return(client_stub)
- expect(client_stub).to receive(:repos)
- .and_return([{ name: 'FOO' }, { name: 'bAr' }])
-
- expect(client.repos(search_text, pagination_options))
- .to eq({ repos: [{ name: 'FOO' }] })
- end
- end
+ expect(client.repos(search_text, pagination_options)).to eq(
+ {
+ repos: [{ name: 'foo' }, { name: 'bar' }],
+ page_info: { startCursor: 'foo', endCursor: 'bar' },
+ count: 2
+ }
+ )
end
end
describe '#count_by', :clean_gitlab_redis_cache do
- context 'when remove_legacy_github_client FF is enabled' do
- let(:client_stub) { instance_double(Gitlab::GithubImport::Client) }
- let(:client_response) { { data: { search: { repositoryCount: 1 } } } }
+ let(:client_stub) { instance_double(Gitlab::GithubImport::Client) }
+ let(:client_response) { { data: { search: { repositoryCount: 1 } } } }
+ context 'when value is cached' do
before do
- stub_feature_flags(remove_legacy_github_client: true)
+ Gitlab::Cache::Import::Caching.write('github-importer/provider-repo-count/owned/user_id', 3)
end
- context 'when value is cached' do
- before do
- Gitlab::Cache::Import::Caching.write('github-importer/provider-repo-count/owned/user_id', 3)
- end
-
- it 'returns repository count from cache' do
- expect(Gitlab::GithubImport::Client)
- .to receive(:new).with(access_token).and_return(client_stub)
- expect(client_stub)
- .not_to receive(:count_repos_by_relation_type_graphql)
- .with({ relation_type: 'owned' })
- expect(client.count_repos_by('owned', 'user_id')).to eq(3)
- end
- end
-
- context 'when value is not cached' do
- it 'returns repository count' do
- expect(Gitlab::GithubImport::Client)
- .to receive(:new).with(access_token).and_return(client_stub)
- expect(client_stub)
- .to receive(:count_repos_by_relation_type_graphql)
- .with({ relation_type: 'owned' }).and_return(client_response)
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:write)
- .with('github-importer/provider-repo-count/owned/user_id', 1, timeout: 5.minutes)
- .and_call_original
- expect(client.count_repos_by('owned', 'user_id')).to eq(1)
- end
+ it 'returns repository count from cache' do
+ expect(Gitlab::GithubImport::Client)
+ .to receive(:new).with(access_token).and_return(client_stub)
+ expect(client_stub)
+ .not_to receive(:count_repos_by_relation_type_graphql)
+ .with({ relation_type: 'owned' })
+ expect(client.count_repos_by('owned', 'user_id')).to eq(3)
end
end
- context 'when remove_legacy_github_client FF is disabled' do
- let(:client_stub) { instance_double(Gitlab::LegacyGithubImport::Client) }
-
- before do
- stub_feature_flags(remove_legacy_github_client: false)
- end
-
- it 'returns nil' do
- expect(Gitlab::LegacyGithubImport::Client)
- .to receive(:new).with(access_token, client_options).and_return(client_stub)
- expect(client.count_repos_by('owned', 'user_id')).to be_nil
+ context 'when value is not cached' do
+ it 'returns repository count' do
+ expect(Gitlab::GithubImport::Client)
+ .to receive(:new).with(access_token).and_return(client_stub)
+ expect(client_stub)
+ .to receive(:count_repos_by_relation_type_graphql)
+ .with({ relation_type: 'owned' }).and_return(client_response)
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with('github-importer/provider-repo-count/owned/user_id', 1, timeout: 5.minutes)
+ .and_call_original
+ expect(client.count_repos_by('owned', 'user_id')).to eq(1)
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
index 7890561bf2d..b44f1ec85f3 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project) }
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter do
let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
it 'imports each project issue attachments' do
+ expect(project.issues).to receive(:id_not_in).with([]).and_return(project.issues)
expect(project.issues).to receive(:select).with(:id, :description, :iid).and_call_original
expect_next_instances_of(
@@ -32,6 +33,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter do
it "doesn't import this issue attachments" do
importer.mark_as_imported(issue_1)
+ expect(project.issues).to receive(:id_not_in).with([issue_1.id.to_s]).and_call_original
expect_next_instance_of(
Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
) do |note_attachments_importer|
diff --git a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
index e5aa17dd81e..381cb17bb52 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project) }
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporte
let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
it 'imports each project merge request attachments' do
+ expect(project.merge_requests).to receive(:id_not_in).with([]).and_return(project.merge_requests)
expect(project.merge_requests).to receive(:select).with(:id, :description, :iid).and_call_original
expect_next_instances_of(
@@ -32,6 +33,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporte
it "doesn't import this merge request attachments" do
importer.mark_as_imported(merge_request_1)
+ expect(project.merge_requests).to receive(:id_not_in).with([merge_request_1.id.to_s]).and_call_original
expect_next_instance_of(
Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
) do |note_attachments_importer|
diff --git a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
index 7ed353e1b71..5b3ad032702 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project) }
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter do
let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
it 'imports each project user note' do
+ expect(project.notes).to receive(:id_not_in).with([]).and_call_original
expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
.with(*importer_attrs).twice.and_return(importer_stub)
expect(importer_stub).to receive(:execute).twice
@@ -29,6 +30,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter do
it "doesn't import this note" do
importer.mark_as_imported(note_1)
+ expect(project.notes).to receive(:id_not_in).with([note_1.id.to_s]).and_call_original
expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
.with(*importer_attrs).once.and_return(importer_stub)
expect(importer_stub).to receive(:execute).once
diff --git a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
index e1b009c3eeb..c1c19c40afb 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project) }
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter do
let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
it 'imports each project release' do
+ expect(project.releases).to receive(:id_not_in).with([]).and_return(project.releases)
expect(project.releases).to receive(:select).with(:id, :description, :tag).and_call_original
expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
@@ -30,6 +31,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter do
it "doesn't import this release" do
importer.mark_as_imported(release_1)
+ expect(project.releases).to receive(:id_not_in).with([release_1.id.to_s]).and_call_original
expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
.with(*importer_attrs).once.and_return(importer_stub)
expect(importer_stub).to receive(:execute).once
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 0f35c7ee0dc..7668451ad4e 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_failures do
+RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_failures, feature_category: :importers do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -80,17 +80,6 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
expect(note.author_id).to eq(project.creator_id)
expect(note.note).to eq("*Created by: #{user.username}*\n\nHello")
end
-
- it 'does not import the note when a foreign key error is raised' do
- stub_user_finder(project.creator_id, false)
-
- expect(ApplicationRecord)
- .to receive(:legacy_bulk_insert)
- .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
-
- expect { subject.execute }
- .not_to change(LegacyDiffNote, :count)
- end
end
describe '#execute' do
@@ -143,6 +132,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
expect(note.noteable_type).to eq('MergeRequest')
expect(note.noteable_id).to eq(merge_request.id)
expect(note.project_id).to eq(project.id)
+ expect(note.namespace_id).to eq(project.project_namespace_id)
expect(note.author_id).to eq(user.id)
expect(note.system).to eq(false)
expect(note.discussion_id).to eq(discussion_id)
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index bf2ffda3bf1..1c453436f9f 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:work_item_type_id) { ::WorkItems::Type.default_issue_type.id }
let(:project) { create(:project) }
@@ -77,6 +77,27 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
importer.execute
end
+
+ it 'caches the created issue ID even if importer later fails' do
+ error = StandardError.new('mocked error')
+
+ allow_next_instance_of(described_class) do |importer|
+ allow(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+ allow(importer)
+ .to receive(:create_assignees)
+ .and_raise(error)
+ end
+
+ expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ expect(finder)
+ .to receive(:cache_database_id)
+ .with(10)
+ end
+
+ expect { importer.execute }.to raise_error(error)
+ end
end
describe '#create_issue' do
@@ -162,21 +183,6 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
end
end
- context 'when the import fails due to a foreign key error' do
- it 'does not raise any errors' do
- allow(importer.user_finder)
- .to receive(:author_id_for)
- .with(issue)
- .and_return([user.id, true])
-
- expect(importer)
- .to receive(:insert_and_return_id)
- .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
-
- expect { importer.create_issue }.not_to raise_error
- end
- end
-
it 'produces a valid Issue' do
allow(importer.user_finder)
.to receive(:author_id_for)
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
index fc8d9cee066..0328a36b646 100644
--- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -50,13 +50,12 @@ feature_category: :importers do
label = { id: 1, name: 'bug,bug', color: 'ffffff' }
expect(importer).to receive(:each_label).and_return([label])
- expect(Gitlab::Import::Logger).to receive(:error)
+ expect(Gitlab::GithubImport::Logger).to receive(:error)
.with(
- import_type: :github,
project_id: project.id,
importer: described_class.name,
message: ['Title is invalid'],
- github_identifiers: { title: 'bug,bug', object_type: :label }
+ external_identifiers: { title: 'bug,bug', object_type: :label }
)
rows, errors = importer.build_labels
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
index cf44d510c80..fa7283d210b 100644
--- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -80,13 +80,12 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab
.to receive(:each_milestone)
.and_return([milestone])
- expect(Gitlab::Import::Logger).to receive(:error)
+ expect(Gitlab::GithubImport::Logger).to receive(:error)
.with(
- import_type: :github,
project_id: project.id,
importer: described_class.name,
message: ["Title can't be blank"],
- github_identifiers: { iid: 2, object_type: :milestone, title: nil }
+ external_identifiers: { iid: 2, object_type: :milestone, title: nil }
)
rows, errors = importer.build_milestones
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 5ac50578b6a..91311a8e90f 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
+RSpec.describe Gitlab::GithubImport::Importer::NoteImporter, feature_category: :importers do
let(:client) { double(:client) }
let(:project) { create(:project) }
let(:user) { create(:user) }
@@ -12,13 +12,13 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
let(:github_note) do
Gitlab::GithubImport::Representation::Note.new(
+ note_id: 100,
noteable_id: 1,
noteable_type: 'Issue',
author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
note: note_body,
created_at: created_at,
- updated_at: updated_at,
- github_id: 1
+ updated_at: updated_at
)
end
@@ -50,6 +50,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
noteable_type: 'Issue',
noteable_id: issue_row.id,
project_id: project.id,
+ namespace_id: project.project_namespace_id,
author_id: user.id,
note: 'This is my note',
discussion_id: match(/\A[0-9a-f]{40}\z/),
@@ -81,6 +82,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
noteable_type: 'Issue',
noteable_id: issue_row.id,
project_id: project.id,
+ namespace_id: project.project_namespace_id,
author_id: project.creator_id,
note: "*Created by: alice*\n\nThis is my note",
discussion_id: match(/\A[0-9a-f]{40}\z/),
@@ -126,34 +128,20 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
expect { importer.execute }.to raise_error(ActiveRecord::RecordInvalid)
end
end
- end
-
- context 'when the noteable does not exist' do
- it 'does not import the note' do
- expect(ApplicationRecord).not_to receive(:legacy_bulk_insert)
-
- importer.execute
- end
- end
-
- context 'when the import fails due to a foreign key error' do
- it 'does not raise any errors' do
- issue_row = create(:issue, project: project, iid: 1)
-
- allow(importer)
- .to receive(:find_noteable_id)
- .and_return(issue_row.id)
- allow(importer.user_finder)
- .to receive(:author_id_for)
- .with(github_note)
- .and_return([user.id, true])
-
- expect(ApplicationRecord)
- .to receive(:legacy_bulk_insert)
- .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+ context 'when noteble_id can not be found' do
+ before do
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(nil)
+ end
- expect { importer.execute }.not_to raise_error
+ it 'raises NoteableNotFound' do
+ expect { importer.execute }.to raise_error(
+ ::Gitlab::GithubImport::Exceptions::NoteableNotFound,
+ 'Error to find noteable_id for note'
+ )
+ end
end
end
@@ -173,13 +161,6 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
expect(project.notes.take).to be_valid
end
-
- # rubocop:disable RSpec/AnyInstanceOf
- it 'skips markdown field cache callback' do
- expect_any_instance_of(Note).not_to receive(:refresh_markdown_cache)
- importer.execute
- end
- # rubocop:enable RSpec/AnyInstanceOf
end
describe '#find_noteable_id' do
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index dd73b6879e0..52c91d91eff 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redis_cache, feature_category: :importers do
let(:project) { create(:project, :repository) }
let(:client) { double(:client) }
let(:user) { create(:user) }
@@ -42,9 +42,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitla
let(:importer) { described_class.new(pull_request, project, client) }
describe '#execute' do
- it 'imports the pull request' do
- mr = double(:merge_request, id: 10, merged?: false)
+ let(:mr) { double(:merge_request, id: 10, merged?: false) }
+ it 'imports the pull request' do
expect(importer)
.to receive(:create_merge_request)
.and_return([mr, false])
@@ -63,6 +63,27 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitla
importer.execute
end
+
+ it 'caches the created MR ID even if importer later fails' do
+ error = StandardError.new('mocked error')
+
+ allow_next_instance_of(described_class) do |importer|
+ allow(importer)
+ .to receive(:create_merge_request)
+ .and_return([mr, false])
+ allow(importer)
+ .to receive(:set_merge_request_assignees)
+ .and_raise(error)
+ end
+
+ expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ expect(finder)
+ .to receive(:cache_database_id)
+ .with(mr.id)
+ end
+
+ expect { importer.execute }.to raise_error(error)
+ end
end
describe '#create_merge_request' do
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
index 9e9d6c6e9cd..d0145ba1120 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
@@ -54,6 +54,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
expect(note_attachments_importer).to receive(:execute)
end
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).twice.with(project, :pull_request_review_request, :fetched)
+
importer.sequential_import
end
@@ -72,6 +75,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
expect(note_attachments_importer).to receive(:execute)
end
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).once.with(project, :pull_request_review_request, :fetched)
+
importer.sequential_import
end
end
@@ -115,6 +121,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
.to receive(:perform_in).with(1.second, *expected_worker_payload.second)
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).twice.with(project, :pull_request_review_request, :fetched)
+
importer.parallel_import
end
@@ -130,6 +139,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
.to receive(:perform_in).with(1.second, *expected_worker_payload.second)
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).once.with(project, :pull_request_review_request, :fetched)
+
importer.parallel_import
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index eddde272d2c..cfd75fba849 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
it 'updates the repository' do
importer = described_class.new(project, client)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(Gitlab::GithubImport::Logger) do |logger|
expect(logger)
.to receive(:info)
.with(an_instance_of(Hash))
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index a3d20af22c7..1cfbe8e20ae 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -148,7 +148,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_categor
expect(errors[0][:validation_errors].full_messages).to match_array(
['Description is too long (maximum is 1000000 characters)']
)
- expect(errors[0][:github_identifiers]).to eq({ tag: '1.0', object_type: :release })
+ expect(errors[0][:external_identifiers]).to eq({ tag: '1.0', object_type: :release })
end
end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index d670aaea482..de497bc6689 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -62,17 +62,20 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
collaborators_import: false,
foo: :bar
},
+ timeout_strategy: "optimistic",
additional_access_tokens: %w[foo bar]
}.stringify_keys
end
- it 'puts optional steps & access tokens into projects import_data' do
- project.create_or_update_import_data(credentials: { user: 'token' })
+ it 'puts optional steps, timeout strategy & access tokens into projects import_data' do
+ project.build_or_assign_import_data(credentials: { user: 'token' })
settings.write(data_input)
expect(project.import_data.data['optional_stages'])
.to eq optional_stages.stringify_keys
+ expect(project.import_data.data['timeout_strategy'])
+ .to eq("optimistic")
expect(project.import_data.credentials.fetch(:additional_access_tokens))
.to eq(data_input['additional_access_tokens'])
end
@@ -80,7 +83,7 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
describe '#enabled?' do
it 'returns is enabled or not specific optional stage' do
- project.create_or_update_import_data(data: { optional_stages: optional_stages })
+ project.build_or_assign_import_data(data: { optional_stages: optional_stages })
expect(settings.enabled?(:single_endpoint_issue_events_import)).to eq true
expect(settings.enabled?(:single_endpoint_notes_import)).to eq false
@@ -91,7 +94,7 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
describe '#disabled?' do
it 'returns is disabled or not specific optional stage' do
- project.create_or_update_import_data(data: { optional_stages: optional_stages })
+ project.build_or_assign_import_data(data: { optional_stages: optional_stages })
expect(settings.disabled?(:single_endpoint_issue_events_import)).to eq false
expect(settings.disabled?(:single_endpoint_notes_import)).to eq true
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index fc722402917..e4684597ddf 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -206,6 +206,7 @@ RSpec.describe Gitlab::GonHelper do
context 'when feature flag is false' do
before do
stub_feature_flags(browsersdk_tracking: false)
+ stub_feature_flags(gl_analytics_tracking: false)
end
it "doesn't set the analytics_url and analytics_id" do
diff --git a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
index 55650b0480e..4db9c1da418 100644
--- a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
+++ b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
@@ -175,6 +175,23 @@ RSpec.describe ::Gitlab::Graphql::Deprecations::Deprecation, feature_category: :
expect(desc).to be_nil
end
+
+ it 'strips any leading or trailing spaces' do
+ desc = deprecation.edit_description(" Some description. \n")
+
+ expect(desc).to eq('Some description. Deprecated in 10.10: This was renamed.')
+ end
+
+ it 'strips any leading or trailing spaces in heredoc string literals' do
+ description = <<~DESC
+ Lorem ipsum
+ dolor sit amet.
+ DESC
+
+ desc = deprecation.edit_description(description)
+
+ expect(desc).to eq("Lorem ipsum\ndolor sit amet. Deprecated in 10.10: This was renamed.")
+ end
end
describe '#original_description' do
diff --git a/spec/lib/gitlab/graphql/pagination/array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/array_connection_spec.rb
index 03cf53bb990..28885d0379b 100644
--- a/spec/lib/gitlab/graphql/pagination/array_connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/array_connection_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe ::Gitlab::Graphql::Pagination::ArrayConnection do
+ let(:context) { instance_double(GraphQL::Query::Context, schema: GitlabSchema) }
let(:nodes) { (1..10) }
- subject(:connection) { described_class.new(nodes, max_page_size: 100) }
+ subject(:connection) { described_class.new(nodes, context: context, max_page_size: 100) }
it_behaves_like 'a connection with collection methods'
diff --git a/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb
index d2475d1edb9..e3ae6732ebb 100644
--- a/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection do
+ let(:context) { instance_double(GraphQL::Query::Context, schema: GitlabSchema) }
let(:prev_cursor) { 1 }
let(:next_cursor) { 6 }
let(:values) { [2, 3, 4, 5] }
@@ -10,7 +11,7 @@ RSpec.describe Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection d
let(:arguments) { {} }
subject(:connection) do
- described_class.new(all_nodes, **{ max_page_size: values.size }.merge(arguments))
+ described_class.new(all_nodes, **{ context: context, max_page_size: values.size }.merge(arguments))
end
it_behaves_like 'a connection with collection methods'
diff --git a/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb
index 1ca7c1c3c69..a8babaf8d3b 100644
--- a/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb
@@ -3,18 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::OffsetActiveRecordRelationConnection do
+ let(:context) { instance_double(GraphQL::Query::Context, schema: GitlabSchema) }
+
it 'subclasses from GraphQL::Relay::RelationConnection' do
expect(described_class.superclass).to eq GraphQL::Pagination::ActiveRecordRelationConnection
end
it_behaves_like 'a connection with collection methods' do
- let(:connection) { described_class.new(Project.all) }
+ let(:connection) { described_class.new(Project.all, context: context) }
end
it_behaves_like 'a redactable connection' do
let_it_be(:users) { create_list(:user, 2) }
- let(:connection) { described_class.new(User.all, max_page_size: 10) }
+ let(:connection) { described_class.new(User.all, context: context, max_page_size: 10) }
let(:unwanted) { users.second }
end
end
diff --git a/spec/lib/gitlab/graphql/timeout_spec.rb b/spec/lib/gitlab/graphql/timeout_spec.rb
index 999840019d2..fd27def6973 100644
--- a/spec/lib/gitlab/graphql/timeout_spec.rb
+++ b/spec/lib/gitlab/graphql/timeout_spec.rb
@@ -8,10 +8,9 @@ RSpec.describe Gitlab::Graphql::Timeout do
end
it 'sends the error to our GraphQL logger' do
- parent_type = double(graphql_name: 'parent_type')
- field = double(graphql_name: 'field')
+ field = double(path: 'parent_type.field')
query = double(query_string: 'query_string', provided_variables: 'provided_variables')
- error = GraphQL::Schema::Timeout::TimeoutError.new(parent_type, field)
+ error = GraphQL::Schema::Timeout::TimeoutError.new(field)
expect(Gitlab::GraphqlLogger)
.to receive(:error)
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 314759fb8a4..84a2a0549d5 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
let(:query) { 'foo' }
let(:scope) { 'milestones' }
- include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects'
+ include_examples 'search results filtered by archived'
end
describe '#projects' do
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
deleted file mode 100644
index f4f15cab05a..00000000000
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ /dev/null
@@ -1,247 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::HashedStorage::Migrator, :redis do
- describe '#bulk_schedule_migration' do
- it 'schedules job to HashedStorage::MigratorWorker' do
- Sidekiq::Testing.fake! do
- expect { subject.bulk_schedule_migration(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1)
- end
- end
- end
-
- describe '#bulk_schedule_rollback' do
- it 'schedules job to HashedStorage::RollbackerWorker' do
- Sidekiq::Testing.fake! do
- expect { subject.bulk_schedule_rollback(start: 1, finish: 5) }.to change(HashedStorage::RollbackerWorker.jobs, :size).by(1)
- end
- end
- end
-
- describe '#bulk_migrate' do
- let(:projects) { create_list(:project, 2, :legacy_storage, :empty_repo) }
- let(:ids) { projects.map(&:id) }
-
- it 'enqueue jobs to HashedStorage::ProjectMigrateWorker' do
- Sidekiq::Testing.fake! do
- expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(2)
- end
- end
-
- it 'rescues and log exceptions' do
- allow_any_instance_of(Project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
- expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.not_to raise_error
- end
-
- it 'delegates each project in specified range to #migrate' do
- projects.each do |project|
- expect(subject).to receive(:migrate).with(project)
- end
-
- subject.bulk_migrate(start: ids.min, finish: ids.max)
- end
-
- it 'has all projects migrated and set as writable', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- subject.bulk_migrate(start: ids.min, finish: ids.max)
- end
-
- projects.each do |project|
- project.reload
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
- end
-
- describe '#bulk_rollback' do
- let(:projects) { create_list(:project, 2, :empty_repo) }
- let(:ids) { projects.map(&:id) }
-
- it 'enqueue jobs to HashedStorage::ProjectRollbackWorker' do
- Sidekiq::Testing.fake! do
- expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(2)
- end
- end
-
- it 'rescues and log exceptions' do
- allow_any_instance_of(Project).to receive(:rollback_to_legacy_storage!).and_raise(StandardError)
- expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.not_to raise_error
- end
-
- it 'delegates each project in specified range to #rollback' do
- projects.each do |project|
- expect(subject).to receive(:rollback).with(project)
- end
-
- subject.bulk_rollback(start: ids.min, finish: ids.max)
- end
-
- it 'has all projects rolledback and set as writable', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- subject.bulk_rollback(start: ids.min, finish: ids.max)
- end
-
- projects.each do |project|
- project.reload
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
- end
-
- describe '#migrate' do
- let(:project) { create(:project, :legacy_storage, :empty_repo) }
-
- it 'enqueues project migration job' do
- Sidekiq::Testing.fake! do
- expect { subject.migrate(project) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
- end
- end
-
- it 'rescues and log exceptions' do
- allow(project).to receive(:migrate_to_hashed_storage!).and_raise(StandardError)
-
- expect { subject.migrate(project) }.not_to raise_error
- end
-
- it 'migrates project storage', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- subject.migrate(project)
- end
-
- expect(project.reload.hashed_storage?(:attachments)).to be_truthy
- end
-
- it 'has migrated project set as writable' do
- perform_enqueued_jobs do
- subject.migrate(project)
- end
-
- expect(project.reload.repository_read_only?).to be_falsey
- end
-
- context 'when project is already on hashed storage' do
- let(:project) { create(:project, :empty_repo) }
-
- it 'doesnt enqueue any migration job' do
- Sidekiq::Testing.fake! do
- expect { subject.migrate(project) }.not_to change(HashedStorage::ProjectMigrateWorker.jobs, :size)
- end
- end
-
- it 'returns false' do
- expect(subject.migrate(project)).to be_falsey
- end
- end
- end
-
- describe '#rollback' do
- let(:project) { create(:project, :empty_repo) }
-
- it 'enqueues project rollback job' do
- Sidekiq::Testing.fake! do
- expect { subject.rollback(project) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
- end
- end
-
- it 'rescues and log exceptions' do
- allow(project).to receive(:rollback_to_hashed_storage!).and_raise(StandardError)
-
- expect { subject.rollback(project) }.not_to raise_error
- end
-
- it 'rolls-back project storage', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- subject.rollback(project)
- end
-
- expect(project.reload.legacy_storage?).to be_truthy
- end
-
- it 'has rolled-back project set as writable' do
- perform_enqueued_jobs do
- subject.rollback(project)
- end
-
- expect(project.reload.repository_read_only?).to be_falsey
- end
-
- context 'when project is already on legacy storage' do
- let(:project) { create(:project, :legacy_storage, :empty_repo) }
-
- it 'doesnt enqueue any rollback job' do
- Sidekiq::Testing.fake! do
- expect { subject.rollback(project) }.not_to change(HashedStorage::ProjectRollbackWorker.jobs, :size)
- end
- end
-
- it 'returns false' do
- expect(subject.rollback(project)).to be_falsey
- end
- end
- end
-
- describe 'migration_pending?' do
- let_it_be(:project) { create(:project, :empty_repo) }
-
- it 'returns true when there are MigratorWorker jobs scheduled' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::MigratorWorker.perform_async(1, 5)
-
- expect(subject.migration_pending?).to be_truthy
- end
- end
-
- it 'returns true when there are ProjectMigrateWorker jobs scheduled' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::ProjectMigrateWorker.perform_async(1)
-
- expect(subject.migration_pending?).to be_truthy
- end
- end
-
- it 'returns false when queues are empty' do
- expect(subject.migration_pending?).to be_falsey
- end
- end
-
- describe 'rollback_pending?' do
- let_it_be(:project) { create(:project, :empty_repo) }
-
- it 'returns true when there are RollbackerWorker jobs scheduled' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::RollbackerWorker.perform_async(1, 5)
-
- expect(subject.rollback_pending?).to be_truthy
- end
- end
-
- it 'returns true when there are jobs scheduled' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::ProjectRollbackWorker.perform_async(1)
-
- expect(subject.rollback_pending?).to be_truthy
- end
- end
-
- it 'returns false when queues are empty' do
- expect(subject.rollback_pending?).to be_falsey
- end
- end
-
- describe 'abort_rollback!' do
- let_it_be(:project) { create(:project, :empty_repo) }
-
- it 'removes any rollback related scheduled job' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::RollbackerWorker.perform_async(1, 5)
-
- expect { subject.abort_rollback! }.to change { subject.rollback_pending? }.from(true).to(false)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 9d89167bf81..a9e0c6a3b92 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -2,441 +2,102 @@
require 'spec_helper'
-RSpec.describe Gitlab::HTTP do
- include StubRequests
-
- let(:default_options) { described_class::DEFAULT_TIMEOUT_OPTIONS }
-
- context 'when allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
-
- described_class.get('https://example.org:8080', allow_local_requests: false)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.8:8080').once
- end
+RSpec.describe Gitlab::HTTP, feature_category: :shared do
+ let(:default_options) do
+ {
+ allow_local_requests: false,
+ deny_all_requests_except_allowed: false,
+ dns_rebinding_protection_enabled: true,
+ outbound_local_requests_allowlist: [],
+ silent_mode_enabled: false
+ }
end
- context 'when not allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080')
-
- described_class.get('https://example.org:8080', allow_local_requests: true)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.9:8080').once
- end
- end
-
- context 'when reading the response is too slow' do
- before_all do
- # Override Net::HTTP to add a delay between sending each response chunk
- mocked_http = Class.new(Net::HTTP) do
- def request(*)
- super do |response|
- response.instance_eval do
- def read_body(*)
- mock_stream = @body.split(' ')
- mock_stream.each do |fragment|
- sleep 0.002.seconds
-
- yield fragment if block_given?
- end
-
- @body
- end
- end
-
- yield response if block_given?
-
- response
- end
- end
- end
-
- @original_net_http = Net.send(:remove_const, :HTTP)
- @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
-
- Net.send(:const_set, :HTTP, mocked_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
+ describe '.get' do
+ it 'calls Gitlab::HTTP_V2.get with default options' do
+ expect(Gitlab::HTTP_V2).to receive(:get).with('/path', default_options)
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
+ described_class.get('/path')
end
- before do
- stub_const("#{described_class}::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
-
- WebMock.stub_request(:post, /.*/).to_return do
- { body: "chunk-1 chunk-2", status: 200 }
- end
- end
-
- after(:all) do
- Net.send(:remove_const, :HTTP)
- Net.send(:const_set, :HTTP, @original_net_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
-
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
- end
-
- let(:options) { {} }
-
- subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
-
- it 'raises an error' do
- expect { request_slow_responder }.to raise_error(Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
- end
-
- context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
- let(:options) { { timeout: 10.seconds } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
-
- context 'and stream_body option is truthy' do
- let(:options) { { stream_body: true } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
- end
-
- it 'calls a block' do
- WebMock.stub_request(:post, /.*/)
-
- expect { |b| described_class.post('http://example.org', &b) }.to yield_with_args
- end
-
- describe 'allow_local_requests_from_web_hooks_and_services is' do
- before do
- WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
- end
-
- context 'disabled' do
+ context 'when passing allow_object_storage:true' do
before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
- end
-
- it 'deny requests to localhost' do
- expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
-
- it 'deny requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
+ allow(ObjectStoreSettings).to receive(:enabled_endpoint_uris).and_return([URI('http://example.com')])
end
- context 'if allow_local_requests set to true' do
- it 'override the global value and allow requests to localhost or private network' do
- stub_full_request('http://localhost:3003')
+ it 'calls Gitlab::HTTP_V2.get with default options and extra_allowed_uris' do
+ expect(Gitlab::HTTP_V2).to receive(:get)
+ .with('/path', default_options.merge(extra_allowed_uris: [URI('http://example.com')]))
- expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error
- end
+ described_class.get('/path', allow_object_storage: true)
end
end
-
- context 'enabled' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
- end
-
- it 'allow requests to localhost' do
- stub_full_request('http://localhost:3003')
-
- expect { described_class.get('http://localhost:3003') }.not_to raise_error
- end
-
- it 'allow requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error
- end
-
- context 'if allow_local_requests set to false' do
- it 'override the global value and ban requests to localhost or private network' do
- expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
- end
- end
- end
-
- describe 'handle redirect loops' do
- before do
- stub_full_request("http://example.org", method: :any).to_raise(HTTParty::RedirectionTooDeep.new("Redirection Too Deep"))
- end
-
- it 'handles GET requests' do
- expect { described_class.get('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles HEAD requests' do
- expect { described_class.head('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
end
- describe 'setting default timeouts' do
- before do
- stub_full_request('http://example.org', method: :any)
- end
-
- context 'when no timeouts are set' do
- it 'sets default open and read and write timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options
- ).and_call_original
-
- described_class.get('http://example.org')
- end
- end
-
- context 'when :timeout is set' do
- it 'does not set any default timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', { timeout: 1 }
- ).and_call_original
-
- described_class.get('http://example.org', { timeout: 1 })
- end
- end
-
- context 'when :open_timeout is set' do
- it 'only sets default read and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
- ).and_call_original
+ describe '.try_get' do
+ it 'calls .get' do
+ expect(described_class).to receive(:get).with('/path', {})
- described_class.get('http://example.org', open_timeout: 1)
- end
+ described_class.try_get('/path')
end
- context 'when :read_timeout is set' do
- it 'only sets default open and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
- ).and_call_original
+ it 'returns nil when .get raises an error' do
+ expect(described_class).to receive(:get).and_raise(SocketError)
- described_class.get('http://example.org', read_timeout: 1)
- end
- end
-
- context 'when :write_timeout is set' do
- it 'only sets default open and read timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
- ).and_call_original
-
- described_class.put('http://example.org', write_timeout: 1)
- end
+ expect(described_class.try_get('/path')).to be_nil
end
end
- describe '.try_get' do
- let(:path) { 'http://example.org' }
+ describe '.perform_request' do
+ context 'when sending a GET request' do
+ it 'calls Gitlab::HTTP_V2.get with default options' do
+ expect(Gitlab::HTTP_V2).to receive(:get).with('/path', default_options)
- let(:extra_log_info_proc) do
- proc do |error, url, options|
- { klass: error.class, url: url, options: options }
+ described_class.perform_request(Net::HTTP::Get, '/path', {})
end
end
- let(:request_options) do
- default_options.merge({
- verify: false,
- basic_auth: { username: 'user', password: 'pass' }
- })
- end
-
- described_class::HTTP_ERRORS.each do |exception_class|
- context "with #{exception_class}" do
- let(:klass) { exception_class }
-
- context 'with path' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, default_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { url: path, klass: klass, options: {} })
-
- expect(described_class.try_get(path, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path and options' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path, options, and block' do
- let(:block) do
- proc {}
- end
-
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options, &block)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b }, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc, &block)).to be_nil
- end
- end
+ context 'when sending a LOCK request' do
+ it 'raises ArgumentError' do
+ expect do
+ described_class.perform_request(Net::HTTP::Lock, '/path', {})
+ end.to raise_error(ArgumentError, "Unsupported HTTP method: 'lock'.")
end
end
end
- describe 'silent mode', feature_category: :geo_replication do
+ context 'when the FF use_gitlab_http_v2 is disabled' do
before do
- stub_full_request("http://example.org", method: :any)
- stub_application_setting(silent_mode_enabled: silent_mode)
+ stub_feature_flags(use_gitlab_http_v2: false)
end
- context 'when silent mode is enabled' do
- let(:silent_mode) { true }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
+ describe '.get' do
+ it 'calls Gitlab::LegacyHTTP.get with default options' do
+ expect(Gitlab::LegacyHTTP).to receive(:get).with('/path', {})
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
- end
-
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
-
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'logs blocked requests' do
- expect(::Gitlab::AppJsonLogger).to receive(:info).with(
- message: "Outbound HTTP request blocked",
- outbound_http_request_method: 'Net::HTTP::Post',
- silent_mode_enabled: true
- )
-
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ described_class.get('/path')
end
end
- context 'when silent mode is disabled' do
- let(:silent_mode) { false }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
+ describe '.try_get' do
+ it 'calls .get' do
+ expect(described_class).to receive(:get).with('/path', {})
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
+ described_class.try_get('/path')
end
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
+ it 'returns nil when .get raises an error' do
+ expect(described_class).to receive(:get).and_raise(SocketError)
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.not_to raise_error
+ expect(described_class.try_get('/path')).to be_nil
end
+ end
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.not_to raise_error
- end
+ describe '.perform_request' do
+ it 'calls Gitlab::LegacyHTTP.perform_request with default options' do
+ expect(Gitlab::LegacyHTTP).to receive(:perform_request).with(Net::HTTP::Get, '/path', {})
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.not_to raise_error
+ described_class.perform_request(Net::HTTP::Get, '/path', {})
end
end
end
diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb
index ee92831922d..fdd868acbb1 100644
--- a/spec/lib/gitlab/i18n_spec.rb
+++ b/spec/lib/gitlab/i18n_spec.rb
@@ -62,4 +62,18 @@ RSpec.describe Gitlab::I18n, feature_category: :internationalization do
end
end
end
+
+ describe '.trimmed_language_name' do
+ it 'trims the language name', :aggregate_failures do
+ expect(described_class.trimmed_language_name('en')).to eq('English')
+ expect(described_class.trimmed_language_name('bg')).to eq('Bulgarian')
+ expect(described_class.trimmed_language_name('id_ID')).to eq('Indonesian')
+ expect(described_class.trimmed_language_name('nb_NO')).to eq('Norwegian (Bokmål)')
+ expect(described_class.trimmed_language_name('zh_HK')).to eq('Chinese, Traditional (Hong Kong)')
+ end
+
+ it 'return nil for unknown language code' do
+ expect(described_class.trimmed_language_name('_invalid_code_')).to be_nil
+ end
+ end
end
diff --git a/spec/lib/gitlab/import/errors_spec.rb b/spec/lib/gitlab/import/errors_spec.rb
index 21d96601609..3b45af0618b 100644
--- a/spec/lib/gitlab/import/errors_spec.rb
+++ b/spec/lib/gitlab/import/errors_spec.rb
@@ -39,7 +39,6 @@ RSpec.describe Gitlab::Import::Errors, feature_category: :importers do
"Noteable can't be blank",
"Author can't be blank",
"Project does not match noteable project",
- "Namespace can't be blank",
"User can't be blank",
"Name is not a valid emoji name"
)
diff --git a/spec/lib/gitlab/import/import_failure_service_spec.rb b/spec/lib/gitlab/import/import_failure_service_spec.rb
index eb71b307b8d..a4682a9495e 100644
--- a/spec/lib/gitlab/import/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import/import_failure_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
+RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures, feature_category: :importers do
let_it_be(:import_type) { 'import_type' }
let_it_be(:project) { create(:project, :import_started, import_type: import_type) }
@@ -10,15 +10,18 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
let(:import_state) { nil }
let(:fail_import) { false }
let(:metrics) { false }
+ let(:external_identifiers) { {} }
+ let(:project_id) { project.id }
let(:arguments) do
{
- project_id: project.id,
+ project_id: project_id,
error_source: 'SomeImporter',
exception: exception,
fail_import: fail_import,
metrics: metrics,
- import_state: import_state
+ import_state: import_state,
+ external_identifiers: external_identifiers
}
end
@@ -33,7 +36,8 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
project_id: '_project_id_',
error_source: '_error_source_',
fail_import: '_fail_import_',
- metrics: '_metrics_'
+ metrics: '_metrics_',
+ external_identifiers: { id: 1 }
}
end
@@ -59,7 +63,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
subject(:service) { described_class.new(**arguments) }
shared_examples 'logs the exception and fails the import' do
- it 'when the failure does not abort the import' do
+ specify do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
@@ -67,7 +71,8 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
{
project_id: project.id,
import_type: import_type,
- source: 'SomeImporter'
+ source: 'SomeImporter',
+ external_identifiers: external_identifiers
}
)
@@ -76,10 +81,11 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
.with(
{
message: 'importer failed',
- 'error.message': 'some error',
+ 'exception.message': 'some error',
project_id: project.id,
import_type: import_type,
- source: 'SomeImporter'
+ source: 'SomeImporter',
+ external_identifiers: external_identifiers
}
)
@@ -95,7 +101,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
end
shared_examples 'logs the exception and does not fail the import' do
- it 'when the failure does not abort the import' do
+ specify do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
@@ -103,7 +109,8 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
{
project_id: project.id,
import_type: import_type,
- source: 'SomeImporter'
+ source: 'SomeImporter',
+ external_identifiers: external_identifiers
}
)
@@ -112,10 +119,11 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
.with(
{
message: 'importer failed',
- 'error.message': 'some error',
+ 'exception.message': 'some error',
project_id: project.id,
import_type: import_type,
- source: 'SomeImporter'
+ source: 'SomeImporter',
+ external_identifiers: external_identifiers
}
)
@@ -159,6 +167,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
end
context 'when using the import_state as reference' do
+ let(:project_id) { nil }
let(:import_state) { project.import_state }
context 'when it fails the import' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index d337a37c69f..cd899a79451 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -172,7 +172,6 @@ project_members:
- user
- source
- project
-- member_task
- member_namespace
- member_role
member_roles:
@@ -250,6 +249,7 @@ merge_requests:
- created_environments
- predictions
- user_agent_detail
+- scan_result_policy_violations
external_pull_requests:
- project
merge_request_diff:
@@ -668,6 +668,7 @@ project:
- statistics
- container_repositories
- container_registry_data_repair_detail
+- container_registry_protection_rules
- uploads
- file_uploads
- import_state
@@ -823,10 +824,12 @@ project:
- design_management_repository_state
- compliance_standards_adherence
- scan_result_policy_reads
+- scan_result_policy_violations
- project_state
- security_policy_bots
- target_branch_rules
- organization
+- dora_performance_scores
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/attributes_finder_spec.rb b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
index f12cbe4f82f..fd9d609992d 100644
--- a/spec/lib/gitlab/import_export/attributes_finder_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
@@ -131,19 +131,19 @@ RSpec.describe Gitlab::ImportExport::AttributesFinder, feature_category: :import
end
it 'generates the correct hash for a relation with included attributes' do
- setup_yaml(tree: { project: [:issues] },
- included_attributes: { issues: [:name, :description] })
+ setup_yaml(
+ tree: { project: [:issues] },
+ included_attributes: { issues: [:name, :description] }
+ )
is_expected.to match(
- include: [{ issues: { include: [],
- only: [:name, :description] } }],
+ include: [{ issues: { include: [], only: [:name, :description] } }],
preload: { issues: nil }
)
end
it 'generates the correct hash for a relation with excluded attributes' do
- setup_yaml(tree: { project: [:issues] },
- excluded_attributes: { issues: [:name] })
+ setup_yaml(tree: { project: [:issues] }, excluded_attributes: { issues: [:name] })
is_expected.to match(
include: [{ issues: { except: [:name],
@@ -153,25 +153,23 @@ RSpec.describe Gitlab::ImportExport::AttributesFinder, feature_category: :import
end
it 'generates the correct hash for a relation with both excluded and included attributes' do
- setup_yaml(tree: { project: [:issues] },
- excluded_attributes: { issues: [:name] },
- included_attributes: { issues: [:description] })
+ setup_yaml(
+ tree: { project: [:issues] },
+ excluded_attributes: { issues: [:name] },
+ included_attributes: { issues: [:description] }
+ )
is_expected.to match(
- include: [{ issues: { except: [:name],
- include: [],
- only: [:description] } }],
+ include: [{ issues: { except: [:name], include: [], only: [:description] } }],
preload: { issues: nil }
)
end
it 'generates the correct hash for a relation with custom methods' do
- setup_yaml(tree: { project: [:issues] },
- methods: { issues: [:name] })
+ setup_yaml(tree: { project: [:issues] }, methods: { issues: [:name] })
is_expected.to match(
- include: [{ issues: { include: [],
- methods: [:name] } }],
+ include: [{ issues: { include: [], methods: [:name] } }],
preload: { issues: nil }
)
end
diff --git a/spec/lib/gitlab/import_export/base/object_builder_spec.rb b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
index 38c3b23db36..3c69a6a7746 100644
--- a/spec/lib/gitlab/import_export/base/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
@@ -4,11 +4,13 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Base::ObjectBuilder do
let(:project) do
- create(:project, :repository,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project')
+ create(
+ :project, :repository,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project'
+ )
end
let(:klass) { Milestone }
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
index 4ef8f4b5d76..5e63804c51c 100644
--- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -11,14 +11,16 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
let(:excluded_keys) { [] }
subject do
- described_class.create(relation_sym: relation_sym, # rubocop:disable Rails/SaveBang
- relation_hash: relation_hash,
- relation_index: 1,
- object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
- members_mapper: members_mapper,
- user: user,
- importable: project,
- excluded_keys: excluded_keys)
+ described_class.create( # rubocop:disable Rails/SaveBang
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ relation_index: 1,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: user,
+ importable: project,
+ excluded_keys: excluded_keys
+ )
end
describe '#create' do
diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
index 5ef9eb78d3b..144617055ab 100644
--- a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
@@ -12,9 +12,7 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
let(:bundler) { Gitlab::ImportExport::DesignRepoSaver.new(exportable: project_with_design_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.design_repo_bundle_filename) }
let(:restorer) do
- described_class.new(path_to_bundle: bundle_path,
- shared: shared,
- importable: project)
+ described_class.new(path_to_bundle: bundle_path, shared: shared, importable: project)
end
before do
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 02419267f0e..dfc7202194d 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -217,17 +217,18 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer, :with_license, feature_
release = create(:release)
group = create(:group)
- project = create(:project,
- :public,
- :repository,
- :issues_disabled,
- :wiki_enabled,
- :builds_private,
- description: 'description',
- releases: [release],
- group: group,
- approvals_before_merge: 1
- )
+ project = create(
+ :project,
+ :public,
+ :repository,
+ :issues_disabled,
+ :wiki_enabled,
+ :builds_private,
+ description: 'description',
+ releases: [release],
+ group: group,
+ approvals_before_merge: 1
+ )
issue = create(:issue, assignees: [user], project: project)
snippet = create(:project_snippet, project: project)
@@ -249,10 +250,7 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer, :with_license, feature_
create(:discussion_note, noteable: issue, project: project)
create(:note, noteable: merge_request, project: project)
create(:note, noteable: snippet, project: project)
- create(:note_on_commit,
- author: user,
- project: project,
- commit_id: ci_build.pipeline.sha)
+ create(:note_on_commit, author: user, project: project, commit_id: ci_build.pipeline.sha)
create(:resource_label_event, label: project_label, issue: issue)
create(:resource_label_event, label: group_label, merge_request: merge_request)
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index 53d205850c8..a98080b682b 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe Gitlab::ImportExport::Importer do
context 'with sample_data_template' do
it 'initializes the Sample::TreeRestorer' do
- project.create_or_update_import_data(data: { sample_data: true })
+ project.build_or_assign_import_data(data: { sample_data: true })
expect(Gitlab::ImportExport::Project::Sample::TreeRestorer).to receive(:new).and_call_original
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::ImportExport::Importer do
end
it 'sets the correct visibility_level when visibility level is a string' do
- project.create_or_update_import_data(
+ project.build_or_assign_import_data(
data: { override_params: { visibility_level: Gitlab::VisibilityLevel::PRIVATE.to_s } }
)
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index 3ca9f727033..17d416b0f0a 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -16,10 +16,12 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser do
let(:diff_head_sha) { SecureRandom.hex(20) }
let(:parsed_merge_request) do
- described_class.new(project,
- diff_head_sha,
- merge_request,
- merge_request.as_json).parse!
+ described_class.new(
+ project,
+ diff_head_sha,
+ merge_request,
+ merge_request.as_json
+ ).parse!
end
after do
diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb
index 0837874526a..8eb3c76302a 100644
--- a/spec/lib/gitlab/import_export/project/export_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout, feature_category: :importers do
let_it_be(:username) { 'root' }
diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb
index 693f1984ce8..d38905992d9 100644
--- a/spec/lib/gitlab/import_export/project/import_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Project::ImportTask, :request_store, :silence_stdout, feature_category: :importers do
let(:username) { 'root' }
diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
index 43794ce01a3..20e176bf6fd 100644
--- a/spec/lib/gitlab/import_export/project/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
@@ -6,12 +6,15 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
let!(:group) { create(:group, :private) }
let!(:subgroup) { create(:group, :private, parent: group) }
let!(:project) do
- create(:project, :repository,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: subgroup)
+ create(
+ :project,
+ :repository,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: subgroup
+ )
end
let(:lru_cache) { subject.send(:lru_cache) }
@@ -19,10 +22,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'request store is not active' do
subject do
- described_class.new(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)
+ described_class.new(Label, 'title' => 'group label', 'project' => project, 'group' => project.group)
end
it 'ignore cache initialize' do
@@ -33,10 +33,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'request store is active', :request_store do
subject do
- described_class.new(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)
+ described_class.new(Label, 'title' => 'group label', 'project' => project, 'group' => project.group)
end
it 'initialize cache in memory' do
@@ -71,27 +68,33 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
it 'finds the existing group label' do
group_label = create(:group_label, name: 'group label', group: project.group)
- expect(described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group)).to eq(group_label)
+ expect(described_class.build(
+ Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group
+ )).to eq(group_label)
end
it 'finds the existing group label in root ancestor' do
group_label = create(:group_label, name: 'group label', group: group)
- expect(described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => group)).to eq(group_label)
+ expect(described_class.build(
+ Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => group
+ )).to eq(group_label)
end
it 'creates a new project label' do
- label = described_class.build(Label,
- 'title' => 'group label',
- 'project' => project,
- 'group' => project.group,
- 'group_id' => project.group.id)
+ label = described_class.build(
+ Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group,
+ 'group_id' => project.group.id
+ )
expect(label.persisted?).to be true
expect(label).to be_an_instance_of(ProjectLabel)
@@ -103,26 +106,32 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
it 'finds the existing group milestone' do
milestone = create(:milestone, name: 'group milestone', group: project.group)
- expect(described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => project.group)).to eq(milestone)
+ expect(described_class.build(
+ Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => project.group
+ )).to eq(milestone)
end
it 'finds the existing group milestone in root ancestor' do
milestone = create(:milestone, name: 'group milestone', group: group)
- expect(described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => group)).to eq(milestone)
+ expect(described_class.build(
+ Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => group
+ )).to eq(milestone)
end
it 'creates a new milestone' do
- milestone = described_class.build(Milestone,
- 'title' => 'group milestone',
- 'project' => project,
- 'group' => project.group)
+ milestone = described_class.build(
+ Milestone,
+ 'title' => 'group milestone',
+ 'project' => project,
+ 'group' => project.group
+ )
expect(milestone.persisted?).to be true
end
@@ -132,12 +141,14 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
clashing_iid = 1
create(:milestone, iid: clashing_iid, project: project)
- milestone = described_class.build(Milestone,
- 'iid' => clashing_iid,
- 'title' => 'milestone',
- 'project' => project,
- 'group' => nil,
- 'group_id' => nil)
+ milestone = described_class.build(
+ Milestone,
+ 'iid' => clashing_iid,
+ 'title' => 'milestone',
+ 'project' => project,
+ 'group' => nil,
+ 'group_id' => nil
+ )
expect(milestone.persisted?).to be true
expect(Milestone.count).to eq(2)
@@ -173,34 +184,45 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'merge_request' do
it 'finds the existing merge_request' do
- merge_request = create(:merge_request, title: 'MergeRequest', iid: 7, target_project: project, source_project: project)
- expect(described_class.build(MergeRequest,
- 'title' => 'MergeRequest',
- 'source_project_id' => project.id,
- 'target_project_id' => project.id,
- 'source_branch' => 'SourceBranch',
- 'iid' => 7,
- 'target_branch' => 'TargetBranch',
- 'author_id' => project.creator.id)).to eq(merge_request)
+ merge_request = create(
+ :merge_request,
+ title: 'MergeRequest',
+ iid: 7,
+ target_project: project,
+ source_project: project
+ )
+
+ expect(described_class.build(
+ MergeRequest,
+ 'title' => 'MergeRequest',
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'iid' => 7,
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id
+ )).to eq(merge_request)
end
it 'creates a new merge_request' do
- merge_request = described_class.build(MergeRequest,
- 'title' => 'MergeRequest',
- 'iid' => 8,
- 'source_project_id' => project.id,
- 'target_project_id' => project.id,
- 'source_branch' => 'SourceBranch',
- 'target_branch' => 'TargetBranch',
- 'author_id' => project.creator.id)
+ merge_request = described_class.build(
+ MergeRequest,
+ 'title' => 'MergeRequest',
+ 'iid' => 8,
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'source_branch' => 'SourceBranch',
+ 'target_branch' => 'TargetBranch',
+ 'author_id' => project.creator.id
+ )
+
expect(merge_request.persisted?).to be true
end
end
context 'merge request diff commit users' do
it 'finds the existing user' do
- user = MergeRequest::DiffCommitUser
- .find_or_create('Alice', 'alice@example.com')
+ user = MergeRequest::DiffCommitUser.find_or_create('Alice', 'alice@example.com')
found = described_class.build(
MergeRequest::DiffCommitUser,
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 5e9fed32c4e..99959daa1fa 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -335,17 +335,19 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
context 'pipeline_schedule' do
let(:relation_sym) { :pipeline_schedules }
+ let(:value) { true }
let(:relation_hash) do
{
- "id": 3,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "description": "pipeline schedule",
- "ref": "main",
- "cron": "0 4 * * 0",
- "cron_timezone": "UTC",
- "active": value,
- "project_id": project.id
+ 'id' => 3,
+ 'created_at' => '2016-07-22T08:55:44.161Z',
+ 'updated_at' => '2016-07-22T08:55:44.161Z',
+ 'description' => 'pipeline schedule',
+ 'ref' => 'main',
+ 'cron' => '0 4 * * 0',
+ 'cron_timezone' => 'UTC',
+ 'active' => value,
+ 'project_id' => project.id,
+ 'owner_id' => non_existing_record_id
}
end
@@ -360,6 +362,10 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
end
end
end
+
+ it 'sets importer user as owner' do
+ expect(created_object.owner_id).to eq(importer_user.id)
+ end
end
# `project_id`, `described_class.USER_REFERENCES`, noteable_id, target_id, and some project IDs are already
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index b0bc31e366e..14af3028a6e 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -449,6 +449,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
expect(pipeline_schedule.cron).to eq('0 4 * * 0')
expect(pipeline_schedule.cron_timezone).to eq('UTC')
expect(pipeline_schedule.active).to eq(false)
+ expect(pipeline_schedule.owner_id).to eq(@user.id)
end
end
@@ -853,12 +854,14 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
end
let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: group)
+ create(
+ :project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: group
+ )
end
before do
@@ -889,12 +892,14 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
context 'with existing group models' do
let(:group) { create(:group).tap { |g| g.add_maintainer(user) } }
let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: group)
+ create(
+ :project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: group
+ )
end
before do
@@ -925,12 +930,14 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
context 'with clashing milestones on IID' do
let(:group) { create(:group).tap { |g| g.add_maintainer(user) } }
let!(:project) do
- create(:project,
- :builds_disabled,
- :issues_disabled,
- name: 'project',
- path: 'project',
- group: group)
+ create(
+ :project,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project',
+ group: group
+ )
end
before do
@@ -1142,8 +1149,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
let_it_be(:user) { create(:admin, email: 'user_1@gitlabexample.com') }
let_it_be(:second_user) { create(:user, email: 'user_2@gitlabexample.com') }
let_it_be(:project) do
- create(:project, :builds_disabled, :issues_disabled,
- { name: 'project', path: 'project' })
+ create(:project, :builds_disabled, :issues_disabled, { name: 'project', path: 'project' })
end
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index abb781b277b..1bf1e5b47e1 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -309,8 +309,8 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license, feature_
context 'with pipeline schedules' do
let(:relation_name) { :pipeline_schedules }
- it 'has no owner_id' do
- expect(subject.first['owner_id']).to be_nil
+ it 'has owner_id' do
+ expect(subject.first['owner_id']).to be_present
end
end
end
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index 3da7af7509e..3c540eb45c9 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: project) }
after do
- Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
+ project.repository.remove
end
it 'restores the repo successfully', :aggregate_failures do
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
subject { described_class.new(path_to_bundle: bundle_path, shared: shared, importable: ProjectWiki.new(project)) }
after do
- Gitlab::Shell.new.remove_repository(project.wiki.repository_storage, project.wiki.disk_path)
+ project.wiki.repository.remove
end
it 'restores the wiki repo successfully', :aggregate_failures do
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index 408ed3a2176..37a59a68188 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -74,12 +74,12 @@ RSpec.describe Gitlab::ImportExport::Shared do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(error, hash_including(
- importer: 'Import/Export',
- project_id: project.id,
- project_name: project.name,
- project_path: project.full_path,
- import_jid: import_state.jid
- ))
+ importer: 'Import/Export',
+ project_id: project.id,
+ project_name: project.name,
+ project_path: project.full_path,
+ import_jid: import_state.jid
+ ))
subject.error(error)
end
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
index 2f39cb560d0..d7b1b180e2e 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -10,10 +10,12 @@ RSpec.describe Gitlab::ImportExport::SnippetRepoRestorer do
let(:shared) { project.import_export_shared }
let(:exporter) { Gitlab::ImportExport::SnippetsRepoSaver.new(project: project, shared: shared, current_user: user) }
let(:restorer) do
- described_class.new(user: user,
- shared: shared,
- snippet: snippet,
- path_to_bundle: snippet_bundle_path)
+ described_class.new(
+ user: user,
+ shared: shared,
+ snippet: snippet,
+ path_to_bundle: snippet_bundle_path
+ )
end
after do
diff --git a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
index e348e8f7991..4a9a01475cb 100644
--- a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
@@ -14,9 +14,7 @@ RSpec.describe Gitlab::ImportExport::SnippetsRepoRestorer, :clean_gitlab_redis_r
let(:bundle_dir) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
let(:service) { instance_double(Gitlab::ImportExport::SnippetRepoRestorer) }
let(:restorer) do
- described_class.new(user: user,
- shared: shared,
- project: project)
+ described_class.new(user: user, shared: shared, project: project)
end
after do
diff --git a/spec/lib/gitlab/internal_events/event_definitions_spec.rb b/spec/lib/gitlab/internal_events/event_definitions_spec.rb
index 924845504ca..a00d1ab5ecb 100644
--- a/spec/lib/gitlab/internal_events/event_definitions_spec.rb
+++ b/spec/lib/gitlab/internal_events/event_definitions_spec.rb
@@ -3,7 +3,9 @@
require "spec_helper"
RSpec.describe Gitlab::InternalEvents::EventDefinitions, feature_category: :product_analytics_data_management do
- after(:all) do
+ around do |example|
+ described_class.instance_variable_set(:@events, nil)
+ example.run
described_class.instance_variable_set(:@events, nil)
end
@@ -20,7 +22,6 @@ RSpec.describe Gitlab::InternalEvents::EventDefinitions, feature_category: :prod
let(:events2) { { 'event2' => nil } }
before do
- allow(Gitlab::Usage::MetricDefinition).to receive(:metric_definitions_changed?).and_return(true)
allow(Gitlab::Usage::MetricDefinition).to receive(:all).and_return([definition1, definition2])
allow(definition1).to receive(:available?).and_return(true)
allow(definition2).to receive(:available?).and_return(true)
@@ -58,9 +59,8 @@ RSpec.describe Gitlab::InternalEvents::EventDefinitions, feature_category: :prod
end
context 'when event does not have unique property' do
- it 'unique fails' do
- expect { described_class.unique_property('event1') }
- .to raise_error(described_class::InvalidMetricConfiguration, /Unique property not defined for/)
+ it 'returns nil' do
+ expect(described_class.unique_property('event1')).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
index c2615e0f22c..20625add292 100644
--- a/spec/lib/gitlab/internal_events_spec.rb
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
before do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ allow(redis).to receive(:incr)
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property).and_return(:user)
allow(fake_snowplow).to receive(:event)
@@ -19,6 +21,12 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
.with(event_name, values: unique_value)
end
+ def expect_redis_tracking(event_name)
+ expect(redis).to have_received(:incr) do |redis_key|
+ expect(redis_key).to end_with(event_name)
+ end
+ end
+
def expect_snowplow_tracking(event_name)
service_ping_context = Gitlab::Tracking::ServicePingContext
.new(data_source: :redis_hll, event: event_name)
@@ -39,14 +47,16 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
let_it_be(:project) { build(:project, id: 2) }
let_it_be(:namespace) { project.namespace }
+ let(:redis) { instance_double('Redis') }
let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
let(:event_name) { 'g_edit_by_web_ide' }
let(:unique_value) { user.id }
- it 'updates both RedisHLL and Snowplow', :aggregate_failures do
+ it 'updates Redis, RedisHLL and Snowplow', :aggregate_failures do
params = { user: user, project: project, namespace: namespace }
described_class.track_event(event_name, **params)
+ expect_redis_tracking(event_name)
expect_redis_hll_tracking(event_name)
expect_snowplow_tracking(event_name) # Add test for arguments
end
@@ -73,9 +83,10 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect { described_class.track_event('unknown_event') }.not_to raise_error
end
- it 'logs error on missing property' do
+ it 'logs error on missing property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
+ expect_redis_tracking(event_name)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
.with(described_class::InvalidPropertyError, event_name: event_name, kwargs: { merge_request_id: 1 })
end
@@ -86,9 +97,10 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
.and_raise(Gitlab::InternalEvents::EventDefinitions::InvalidMetricConfiguration)
end
- it 'fails on missing unique property' do
+ it 'logs error on missing unique property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
+ expect_redis_tracking(event_name)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
end
end
@@ -107,6 +119,7 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'is used when logging to RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
+ expect_redis_tracking(event_name)
expect_redis_hll_tracking(event_name)
expect_snowplow_tracking(event_name)
end
@@ -120,13 +133,42 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
end
- context 'when method does not exist on property' do
+ context 'when method does not exist on property', :aggregate_failures do
it 'logs error on missing method' do
expect { described_class.track_event(event_name, project: "a_string") }.not_to raise_error
+ expect_redis_tracking(event_name)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
.with(described_class::InvalidMethodError, event_name: event_name, kwargs: { project: 'a_string' })
end
end
+
+ context 'when send_snowplow_event is false' do
+ it 'logs to Redis and RedisHLL but not Snowplow' do
+ described_class.track_event(event_name, send_snowplow_event: false, user: user, project: project)
+
+ expect_redis_tracking(event_name)
+ expect_redis_hll_tracking(event_name)
+ expect(fake_snowplow).not_to have_received(:event)
+ end
+ end
+ end
+
+ context 'when unique key is not defined' do
+ let(:event_name) { 'p_ci_templates_terraform_base_latest' }
+
+ before do
+ allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property)
+ .with(event_name)
+ .and_return(nil)
+ end
+
+ it 'logs to Redis and Snowplow but not RedisHLL', :aggregate_failures do
+ described_class.track_event(event_name, user: user, project: project)
+
+ expect_redis_tracking(event_name)
+ expect_snowplow_tracking(event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to have_received(:track_event)
+ end
end
end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index d0b89afccdc..5fcbecfe6e1 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
describe '#initialize' do
shared_examples 'local address' do
it 'blocks local addresses' do
- expect { client }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { client }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
context 'when local requests are allowed' do
@@ -136,7 +136,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
let(:api_url) { 'ssh://192.168.1.2' }
it 'raises an error' do
- expect { client }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { client }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
diff --git a/spec/lib/gitlab/legacy_http_spec.rb b/spec/lib/gitlab/legacy_http_spec.rb
new file mode 100644
index 00000000000..07a30b194b6
--- /dev/null
+++ b/spec/lib/gitlab/legacy_http_spec.rb
@@ -0,0 +1,448 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::LegacyHTTP, feature_category: :shared do
+ include StubRequests
+
+ let(:default_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
+
+ context 'when allow_local_requests' do
+ it 'sends the request to the correct URI' do
+ stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
+
+ described_class.get('https://example.org:8080', allow_local_requests: false)
+
+ expect(WebMock).to have_requested(:get, 'https://8.8.8.8:8080').once
+ end
+ end
+
+ context 'when not allow_local_requests' do
+ it 'sends the request to the correct URI' do
+ stub_full_request('https://example.org:8080')
+
+ described_class.get('https://example.org:8080', allow_local_requests: true)
+
+ expect(WebMock).to have_requested(:get, 'https://8.8.8.9:8080').once
+ end
+ end
+
+ context 'when reading the response is too slow' do
+ before_all do
+ # Override Net::HTTP to add a delay between sending each response chunk
+ mocked_http = Class.new(Net::HTTP) do
+ def request(*)
+ super do |response|
+ response.instance_eval do
+ def read_body(*)
+ mock_stream = @body.split(' ')
+ mock_stream.each do |fragment|
+ sleep 0.002.seconds
+
+ yield fragment if block_given?
+ end
+
+ @body
+ end
+ end
+
+ yield response if block_given?
+
+ response
+ end
+ end
+ end
+
+ @original_net_http = Net.send(:remove_const, :HTTP)
+ @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
+
+ Net.send(:const_set, :HTTP, mocked_http)
+ WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
+
+ # Reload Gitlab::NetHttpAdapter
+ Gitlab.send(:remove_const, :NetHttpAdapter)
+ load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
+ end
+
+ before do
+ stub_const("Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
+
+ WebMock.stub_request(:post, /.*/).to_return do
+ { body: "chunk-1 chunk-2", status: 200 }
+ end
+ end
+
+ after(:all) do
+ Net.send(:remove_const, :HTTP)
+ Net.send(:const_set, :HTTP, @original_net_http)
+ WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
+
+ # Reload Gitlab::NetHttpAdapter
+ Gitlab.send(:remove_const, :NetHttpAdapter)
+ load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
+ end
+
+ let(:options) { {} }
+
+ subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
+
+ it 'raises an error' do
+ expect { request_slow_responder }.to raise_error(
+ Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
+ end
+
+ context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
+ let(:options) { { timeout: 10.seconds } }
+
+ it 'does not raise an error' do
+ expect { request_slow_responder }.not_to raise_error
+ end
+ end
+
+ context 'and stream_body option is truthy' do
+ let(:options) { { stream_body: true } }
+
+ it 'does not raise an error' do
+ expect { request_slow_responder }.not_to raise_error
+ end
+ end
+ end
+
+ it 'calls a block' do
+ WebMock.stub_request(:post, /.*/)
+
+ expect { |b| described_class.post('http://example.org', &b) }.to yield_with_args
+ end
+
+ describe 'allow_local_requests_from_web_hooks_and_services is' do
+ before do
+ WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
+ end
+
+ context 'disabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
+ end
+
+ it 'deny requests to localhost' do
+ expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
+ end
+
+ it 'deny requests to private network' do
+ expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
+ end
+
+ context 'if allow_local_requests set to true' do
+ it 'override the global value and allow requests to localhost or private network' do
+ stub_full_request('http://localhost:3003')
+
+ expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error
+ end
+ end
+ end
+
+ context 'enabled' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
+ end
+
+ it 'allow requests to localhost' do
+ stub_full_request('http://localhost:3003')
+
+ expect { described_class.get('http://localhost:3003') }.not_to raise_error
+ end
+
+ it 'allow requests to private network' do
+ expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error
+ end
+
+ context 'if allow_local_requests set to false' do
+ it 'override the global value and ban requests to localhost or private network' do
+ expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError)
+ end
+ end
+ end
+ end
+
+ describe 'handle redirect loops' do
+ before do
+ stub_full_request("http://example.org", method: :any).to_raise(
+ HTTParty::RedirectionTooDeep.new("Redirection Too Deep"))
+ end
+
+ it 'handles GET requests' do
+ expect { described_class.get('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
+ end
+
+ it 'handles POST requests' do
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
+ end
+
+ it 'handles PUT requests' do
+ expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
+ end
+
+ it 'handles DELETE requests' do
+ expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
+ end
+
+ it 'handles HEAD requests' do
+ expect { described_class.head('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
+ end
+ end
+
+ describe 'setting default timeouts' do
+ before do
+ stub_full_request('http://example.org', method: :any)
+ end
+
+ context 'when no timeouts are set' do
+ it 'sets default open and read and write timeouts' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options
+ ).and_call_original
+
+ described_class.get('http://example.org')
+ end
+ end
+
+ context 'when :timeout is set' do
+ it 'does not set any default timeouts' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', { timeout: 1 }
+ ).and_call_original
+
+ described_class.get('http://example.org', { timeout: 1 })
+ end
+ end
+
+ context 'when :open_timeout is set' do
+ it 'only sets default read and write timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
+ ).and_call_original
+
+ described_class.get('http://example.org', open_timeout: 1)
+ end
+ end
+
+ context 'when :read_timeout is set' do
+ it 'only sets default open and write timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
+ ).and_call_original
+
+ described_class.get('http://example.org', read_timeout: 1)
+ end
+ end
+
+ context 'when :write_timeout is set' do
+ it 'only sets default open and read timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
+ ).and_call_original
+
+ described_class.put('http://example.org', write_timeout: 1)
+ end
+ end
+ end
+
+ describe '.try_get' do
+ let(:path) { 'http://example.org' }
+
+ let(:extra_log_info_proc) do
+ proc do |error, url, options|
+ { klass: error.class, url: url, options: options }
+ end
+ end
+
+ let(:request_options) do
+ default_options.merge({
+ verify: false,
+ basic_auth: { username: 'user', password: 'pass' }
+ })
+ end
+
+ Gitlab::HTTP::HTTP_ERRORS.each do |exception_class|
+ context "with #{exception_class}" do
+ let(:klass) { exception_class }
+
+ context 'with path' do
+ before do
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, default_options)
+ .and_raise(klass)
+ end
+
+ it 'handles requests without extra_log_info' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), {})
+
+ expect(described_class.try_get(path)).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as hash' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { a: :b })
+
+ expect(described_class.try_get(path, extra_log_info: { a: :b })).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as proc' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { url: path, klass: klass, options: {} })
+
+ expect(described_class.try_get(path, extra_log_info: extra_log_info_proc)).to be_nil
+ end
+ end
+
+ context 'with path and options' do
+ before do
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, request_options)
+ .and_raise(klass)
+ end
+
+ it 'handles requests without extra_log_info' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), {})
+
+ expect(described_class.try_get(path, request_options)).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as hash' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { a: :b })
+
+ expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b })).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as proc' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { klass: klass, url: path, options: request_options })
+
+ expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc)).to be_nil
+ end
+ end
+
+ context 'with path, options, and block' do
+ let(:block) do
+ proc {}
+ end
+
+ before do
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, request_options, &block)
+ .and_raise(klass)
+ end
+
+ it 'handles requests without extra_log_info' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), {})
+
+ expect(described_class.try_get(path, request_options, &block)).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as hash' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { a: :b })
+
+ expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b }, &block)).to be_nil
+ end
+
+ it 'handles requests with extra_log_info as proc' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(instance_of(klass), { klass: klass, url: path, options: request_options })
+
+ expect(
+ described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc, &block)
+ ).to be_nil
+ end
+ end
+ end
+ end
+ end
+
+ describe 'silent mode', feature_category: :geo_replication do
+ before do
+ stub_full_request("http://example.org", method: :any)
+ stub_application_setting(silent_mode_enabled: silent_mode)
+ end
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'logs blocked requests' do
+ expect(::Gitlab::AppJsonLogger).to receive(:info).with(
+ message: "Outbound HTTP request blocked",
+ outbound_http_request_method: 'Net::HTTP::Post',
+ silent_mode_enabled: true
+ )
+
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/instrumentation_spec.rb b/spec/lib/gitlab/memory/instrumentation_spec.rb
index 3d58f28ec1e..f287edb7da3 100644
--- a/spec/lib/gitlab/memory/instrumentation_spec.rb
+++ b/spec/lib/gitlab/memory/instrumentation_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Memory::Instrumentation, feature_category: :application_p
subject do
described_class.with_memory_allocations do
- Array.new(1000).map { '0' * 100 }
+ Array.new(1000).map { '0' * 1000 }
end
end
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Memory::Instrumentation, feature_category: :application_p
expect(result).to include(
mem_objects: be > 1000,
mem_mallocs: be > 1000,
- mem_bytes: be > 100_000, # 100 items * 100 bytes each
+ mem_bytes: be > 1000_000, # 1000 items * 1000 bytes each
mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
)
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
index 4f437e57600..74aa3528328 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -137,4 +137,19 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
end
end
end
+
+ describe '#identifier' do
+ let(:payload) { { identifier: 'ci_must_pass' } }
+
+ subject(:identifier) do
+ described_class
+ .new(
+ status: described_class::SUCCESS_STATUS,
+ payload: payload
+ )
+ .identifier
+ end
+
+ it { is_expected.to eq(:ci_must_pass) }
+ end
end
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index dc59fa804c4..ea98c8d7933 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
describe '#labels' do
context 'when request goes to Grape endpoint' do
before do
- route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
+ route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)', origin: '/:version/projects/:id/archive')
endpoint = double(:endpoint, route: route,
options: { for: API::Projects, path: [":id/archive"] },
namespace: "/projects")
@@ -76,7 +76,12 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
it 'provides labels with the method and path of the route in the grape endpoint' do
- expect(transaction.labels).to eq({ controller: 'Grape', action: 'GET /projects/:id/archive', feature_category: 'projects' })
+ expect(transaction.labels).to eq({
+ controller: 'Grape',
+ action: 'GET /projects/:id/archive',
+ feature_category: 'projects',
+ endpoint_id: 'GET /:version/projects/:id/archive'
+ })
end
it 'contains only the labels defined for transactions' do
@@ -103,7 +108,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
it 'tags a transaction with the name and action of a controller' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT, endpoint_id: 'TestController#show' })
end
it 'contains only the labels defined for transactions' do
@@ -114,7 +119,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
let(:request) { double(:request, format: double(:format, ref: :json)) }
it 'appends the mime type to the transaction action' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT, endpoint_id: 'TestController#show' })
end
end
@@ -122,7 +127,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
let(:request) { double(:request, format: double(:format, ref: 'http://example.com')) }
it 'does not append the MIME type to the transaction action' do
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT, endpoint_id: 'TestController#show' })
end
end
@@ -131,7 +136,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
# This is needed since we're not actually making a request, which would trigger the controller pushing to the context
::Gitlab::ApplicationContext.push(feature_category: 'source_code_management')
- expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: "source_code_management" })
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: 'source_code_management', endpoint_id: 'TestController#show' })
end
end
end
@@ -147,7 +152,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
let(:controller) { double(:controller, class: controller_class, action_name: 'show', request: request) }
let(:transaction_obj) { described_class.new({ 'action_controller.instance' => controller }) }
- let(:labels) { { controller: 'TestController', action: 'show', feature_category: 'projects' } }
+ let(:labels) { { controller: 'TestController', action: 'show', feature_category: 'projects', endpoint_id: 'TestController#show' } }
before do
::Gitlab::ApplicationContext.push(feature_category: 'projects')
diff --git a/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
index ed1440f23b6..7bc5fd853bf 100644
--- a/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
+++ b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
@@ -58,6 +58,39 @@ RSpec.describe Gitlab::Middleware::HandleMalformedStrings do
end
end
+ context 'with POST request' do
+ let(:request_env) do
+ Rack::MockRequest.env_for(
+ '/',
+ method: 'POST',
+ input: input,
+ 'CONTENT_TYPE' => 'application/json'
+ )
+ end
+
+ let(:params) { { method: 'POST' } }
+
+ context 'with valid JSON' do
+ let(:input) { %({"hello": "world"}) }
+
+ it 'returns no error' do
+ env = request_env
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ context 'with bad JSON' do
+ let(:input) { "{ bad json }" }
+
+ it 'rejects bad JSON with 400 error' do
+ env = request_env
+
+ expect(subject.call(env)).to eq error_400
+ end
+ end
+ end
+
context 'in authorization headers' do
let(:problematic_input) { null_byte }
diff --git a/spec/lib/gitlab/middleware/path_traversal_check_spec.rb b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
new file mode 100644
index 00000000000..3d334a60c49
--- /dev/null
+++ b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shared do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:fake_response) { [200, { 'Content-Type' => 'text/plain' }, ['OK']] }
+ let(:fake_app) { ->(_) { fake_response } }
+ let(:middleware) { described_class.new(fake_app) }
+
+ describe '#call' do
+ let(:fullpath) { ::Rack::Request.new(env).fullpath }
+ let(:decoded_fullpath) { CGI.unescape(fullpath) }
+
+ let(:env) do
+ Rack::MockRequest.env_for(
+ path,
+ method: method,
+ params: query_params
+ )
+ end
+
+ subject { middleware.call(env) }
+
+ shared_examples 'no issue' do
+ it 'measures and logs the execution time' do
+ expect(::Gitlab::PathTraversal)
+ .to receive(:check_path_traversal!)
+ .with(decoded_fullpath, skip_decoding: true)
+ .and_call_original
+ expect(::Gitlab::AppLogger)
+ .to receive(:warn)
+ .with({ class_name: described_class.name, duration_ms: instance_of(Float) })
+ .and_call_original
+
+ expect(subject).to eq(fake_response)
+ end
+
+ context 'with log_execution_time_path_traversal_middleware disabled' do
+ before do
+ stub_feature_flags(log_execution_time_path_traversal_middleware: false)
+ end
+
+ it 'does nothing' do
+ expect(::Gitlab::PathTraversal)
+ .to receive(:check_path_traversal!)
+ .with(decoded_fullpath, skip_decoding: true)
+ .and_call_original
+ expect(::Gitlab::AppLogger)
+ .not_to receive(:warn)
+
+ expect(subject).to eq(fake_response)
+ end
+ end
+ end
+
+ shared_examples 'path traversal' do
+ it 'logs the problem and measures the execution time' do
+ expect(::Gitlab::PathTraversal)
+ .to receive(:check_path_traversal!)
+ .with(decoded_fullpath, skip_decoding: true)
+ .and_call_original
+ expect(::Gitlab::AppLogger)
+ .to receive(:warn)
+ .with({ message: described_class::PATH_TRAVERSAL_MESSAGE, path: instance_of(String) })
+ expect(::Gitlab::AppLogger)
+ .to receive(:warn)
+ .with({
+ class_name: described_class.name,
+ duration_ms: instance_of(Float),
+ message: described_class::PATH_TRAVERSAL_MESSAGE,
+ fullpath: fullpath
+ }).and_call_original
+
+ expect(subject).to eq(fake_response)
+ end
+
+ context 'with log_execution_time_path_traversal_middleware disabled' do
+ before do
+ stub_feature_flags(log_execution_time_path_traversal_middleware: false)
+ end
+
+ it 'logs the problem without the execution time' do
+ expect(::Gitlab::PathTraversal)
+ .to receive(:check_path_traversal!)
+ .with(decoded_fullpath, skip_decoding: true)
+ .and_call_original
+ expect(::Gitlab::AppLogger)
+ .to receive(:warn)
+ .with({ message: described_class::PATH_TRAVERSAL_MESSAGE, path: instance_of(String) })
+ expect(::Gitlab::AppLogger)
+ .to receive(:warn)
+ .with({
+ class_name: described_class.name,
+ message: described_class::PATH_TRAVERSAL_MESSAGE,
+ fullpath: fullpath
+ }).and_call_original
+
+ expect(subject).to eq(fake_response)
+ end
+ end
+ end
+
+ # we use Rack request.full_path, this will dump the accessed path and
+ # the query string. The query string is only for GETs requests.
+ # Hence different expectation (when params are set) for GETs and
+ # the other methods (see below)
+ context 'when using get' do
+ let(:method) { 'get' }
+
+ where(:path, :query_params, :shared_example_name) do
+ '/foo/bar' | {} | 'no issue'
+ '/foo/../bar' | {} | 'path traversal'
+ '/foo%2Fbar' | {} | 'no issue'
+ '/foo%2F..%2Fbar' | {} | 'path traversal'
+ '/foo%252F..%252Fbar' | {} | 'no issue'
+ '/foo/bar' | { x: 'foo' } | 'no issue'
+ '/foo/bar' | { x: 'foo/../bar' } | 'path traversal'
+ '/foo/bar' | { x: 'foo%2Fbar' } | 'no issue'
+ '/foo/bar' | { x: 'foo%2F..%2Fbar' } | 'no issue'
+ '/foo/bar' | { x: 'foo%252F..%252Fbar' } | 'no issue'
+ '/foo%2F..%2Fbar' | { x: 'foo%252F..%252Fbar' } | 'path traversal'
+ end
+
+ with_them do
+ it_behaves_like params[:shared_example_name]
+ end
+
+ context 'with a issues search path' do
+ let(:query_params) { {} }
+ let(:path) do
+ 'project/-/issues/?sort=updated_desc&milestone_title=16.0&search=Release%20%252525&first_page_size=20'
+ end
+
+ it_behaves_like 'no issue'
+ end
+ end
+
+ %w[post put post delete patch].each do |http_method|
+ context "when using #{http_method}" do
+ let(:method) { http_method }
+
+ where(:path, :query_params, :shared_example_name) do
+ '/foo/bar' | {} | 'no issue'
+ '/foo/../bar' | {} | 'path traversal'
+ '/foo%2Fbar' | {} | 'no issue'
+ '/foo%2F..%2Fbar' | {} | 'path traversal'
+ '/foo%252F..%252Fbar' | {} | 'no issue'
+ '/foo/bar' | { x: 'foo' } | 'no issue'
+ '/foo/bar' | { x: 'foo/../bar' } | 'no issue'
+ '/foo/bar' | { x: 'foo%2Fbar' } | 'no issue'
+ '/foo/bar' | { x: 'foo%2F..%2Fbar' } | 'no issue'
+ '/foo/bar' | { x: 'foo%252F..%252Fbar' } | 'no issue'
+ '/foo%2F..%2Fbar' | { x: 'foo%252F..%252Fbar' } | 'path traversal'
+ end
+
+ with_them do
+ it_behaves_like params[:shared_example_name]
+ end
+ end
+ end
+
+ context 'with check_path_traversal_middleware disabled' do
+ before do
+ stub_feature_flags(check_path_traversal_middleware: false)
+ end
+
+ where(:path, :query_params) do
+ '/foo/bar' | {}
+ '/foo/../bar' | {}
+ '/foo%2Fbar' | {}
+ '/foo%2F..%2Fbar' | {}
+ '/foo%252F..%252Fbar' | {}
+ '/foo/bar' | { x: 'foo' }
+ '/foo/bar' | { x: 'foo/../bar' }
+ '/foo/bar' | { x: 'foo%2Fbar' }
+ '/foo/bar' | { x: 'foo%2F..%2Fbar' }
+ '/foo/bar' | { x: 'foo%252F..%252Fbar' }
+ end
+
+ with_them do
+ %w[get post put post delete patch].each do |http_method|
+ context "when using #{http_method}" do
+ let(:method) { http_method }
+
+ it 'does not check for path traversals' do
+ expect(::Gitlab::PathTraversal).not_to receive(:check_path_traversal!)
+
+ subject
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/observability_spec.rb b/spec/lib/gitlab/observability_spec.rb
index 04c35f0ee3a..7af2daea11c 100644
--- a/spec/lib/gitlab/observability_spec.rb
+++ b/spec/lib/gitlab/observability_spec.rb
@@ -46,206 +46,54 @@ RSpec.describe Gitlab::Observability, feature_category: :error_tracking do
it { is_expected.to eq("#{described_class.observability_url}/v3/tenant/#{project.id}") }
end
- describe '.build_full_url' do
- let_it_be(:group) { build_stubbed(:group, id: 123) }
- let(:observability_url) { described_class.observability_url }
+ describe '.should_enable_observability_auth_scopes?' do
+ subject { described_class.should_enable_observability_auth_scopes?(resource) }
- it 'returns the full observability url for the given params' do
- url = described_class.build_full_url(group, '/foo?bar=baz', '/')
- expect(url).to eq("https://observe.gitlab.com/-/123/foo?bar=baz")
- end
-
- it 'handles missing / from observability_path' do
- url = described_class.build_full_url(group, 'foo?bar=baz', '/')
- expect(url).to eq("https://observe.gitlab.com/-/123/foo?bar=baz")
- end
-
- it 'sanitises observability_path' do
- url = described_class.build_full_url(group, "/test?groupId=<script>alert('attack!')</script>", '/')
- expect(url).to eq("https://observe.gitlab.com/-/123/test?groupId=alert('attack!')")
- end
-
- context 'when observability_path is missing' do
- it 'builds the url with the fallback_path' do
- url = described_class.build_full_url(group, nil, '/fallback')
- expect(url).to eq("https://observe.gitlab.com/-/123/fallback")
- end
-
- it 'defaults to / if fallback_path is also missing' do
- url = described_class.build_full_url(group, nil, nil)
- expect(url).to eq("https://observe.gitlab.com/-/123/")
+ let(:parent) { build_stubbed(:group) }
+ let(:resource) do
+ build_stubbed(:group, parent: parent).tap do |g|
+ g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
end
end
- end
- describe '.embeddable_url' do
before do
- stub_config_setting(url: "https://www.gitlab.com")
- # Can't use build/build_stubbed as we want the routes to be generated as well
- create(:group, path: 'test-path', id: 123)
- end
-
- context 'when URL is valid' do
- where(:input, :expected) do
- [
- [
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=%2Fexplore%3FgroupId%3D14485840%26left%3D%255B%2522now-1h%2522,%2522now%2522,%2522new-sentry.gitlab.net%2522,%257B%257D%255D",
- "https://observe.gitlab.com/-/123/explore?groupId=14485840&left=%5B%22now-1h%22,%22now%22,%22new-sentry.gitlab.net%22,%7B%7D%5D"
- ],
- [
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=/goto/foo",
- "https://observe.gitlab.com/-/123/goto/foo"
- ]
- ]
- end
-
- with_them do
- it 'returns an embeddable observability url' do
- expect(described_class.embeddable_url(input)).to eq(expected)
- end
- end
+ stub_feature_flags(observability_tracing: parent)
end
- context 'when URL is invalid' do
- where(:input) do
- [
- # direct links to observe.gitlab.com
- "https://observe.gitlab.com/-/123/explore",
- 'https://observe.gitlab.com/v1/auth/start',
-
- # invalid GitLab URL
- "not a link",
- "https://foo.bar/groups/test-path/-/observability/explore?observability_path=/explore",
- "http://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=/explore",
- "https://www.gitlab.com:123/groups/test-path/-/observability/explore?observability_path=/explore",
- "https://www.gitlab.com@example.com/groups/test-path/-/observability/explore?observability_path=/explore",
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=@example.com",
-
- # invalid group/controller/actions
- "https://www.gitlab.com/groups/INVALID_GROUP/-/observability/explore?observability_path=/explore",
- "https://www.gitlab.com/groups/test-path/-/INVALID_CONTROLLER/explore?observability_path=/explore",
- "https://www.gitlab.com/groups/test-path/-/observability/INVALID_ACTION?observability_path=/explore",
-
- # invalid observablity path
- "https://www.gitlab.com/groups/test-path/-/observability/explore",
- "https://www.gitlab.com/groups/test-path/-/observability/explore?missing_observability_path=/explore",
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=/not_embeddable",
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=/datasources",
- "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=not a valid path"
- ]
+ describe 'when resource is group' do
+ context 'if observability_tracing FF enabled' do
+ it { is_expected.to be true }
end
- with_them do
- it 'returns nil' do
- expect(described_class.embeddable_url(input)).to be_nil
+ context 'if observability_tracing FF disabled' do
+ before do
+ stub_feature_flags(observability_tracing: false)
end
- end
-
- it 'returns nil if the path detection throws an error' do
- test_url = "https://www.gitlab.com/groups/test-path/-/observability/explore"
- allow(Rails.application.routes).to receive(:recognize_path).with(test_url) {
- raise ActionController::RoutingError, 'test'
- }
- expect(described_class.embeddable_url(test_url)).to be_nil
- end
-
- it 'returns nil if parsing observaboility path throws an error' do
- observability_path = 'some-path'
- test_url = "https://www.gitlab.com/groups/test-path/-/observability/explore?observability_path=#{observability_path}"
-
- allow(URI).to receive(:parse).and_call_original
- allow(URI).to receive(:parse).with(observability_path) {
- raise URI::InvalidURIError, 'test'
- }
- expect(described_class.embeddable_url(test_url)).to be_nil
+ it { is_expected.to be false }
end
end
- end
-
- describe '.allowed_for_action?' do
- let(:group) { build_stubbed(:group) }
- let(:user) { build_stubbed(:user) }
-
- before do
- allow(described_class).to receive(:allowed?).and_call_original
- end
-
- it 'returns false if action is nil' do
- expect(described_class.allowed_for_action?(user, group, nil)).to eq(false)
- end
- describe 'allowed? calls' do
- using RSpec::Parameterized::TableSyntax
+ describe 'when resource is project' do
+ let(:resource) { build_stubbed(:project, namespace: parent) }
- where(:action, :permission) do
- :foo | :admin_observability
- :explore | :read_observability
- :datasources | :admin_observability
- :manage | :admin_observability
- :dashboards | :read_observability
+ context 'if observability_tracing FF enabled' do
+ it { is_expected.to be true }
end
- with_them do
- it "calls allowed? with #{params[:permission]} when actions is #{params[:action]}" do
- described_class.allowed_for_action?(user, group, action)
- expect(described_class).to have_received(:allowed?).with(user, group, permission)
+ context 'if observability_tracing FF disabled' do
+ before do
+ stub_feature_flags(observability_tracing: false)
end
- end
- end
- end
-
- describe '.allowed?' do
- let(:user) { build_stubbed(:user) }
- let(:group) { build_stubbed(:group) }
- let(:test_permission) { :read_observability }
-
- before do
- allow(Ability).to receive(:allowed?).and_return(false)
- end
-
- subject do
- described_class.allowed?(user, group, test_permission)
- end
-
- it 'checks if ability is allowed for the given user and group' do
- allow(Ability).to receive(:allowed?).and_return(true)
-
- subject
-
- expect(Ability).to have_received(:allowed?).with(user, test_permission, group)
- end
-
- it 'checks for admin_observability if permission is missing' do
- described_class.allowed?(user, group)
-
- expect(Ability).to have_received(:allowed?).with(user, :admin_observability, group)
- end
-
- it 'returns true if the ability is allowed' do
- allow(Ability).to receive(:allowed?).and_return(true)
-
- expect(subject).to eq(true)
- end
- it 'returns false if the ability is not allowed' do
- allow(Ability).to receive(:allowed?).and_return(false)
-
- expect(subject).to eq(false)
- end
-
- it 'returns false if observability url is missing' do
- allow(described_class).to receive(:observability_url).and_return("")
-
- expect(subject).to eq(false)
+ it { is_expected.to be false }
+ end
end
- it 'returns false if group is missing' do
- expect(described_class.allowed?(user, nil, :read_observability)).to eq(false)
- end
+ describe 'when resource is not a group or project' do
+ let(:resource) { build_stubbed(:user) }
- it 'returns false if user is missing' do
- expect(described_class.allowed?(nil, group, :read_observability)).to eq(false)
+ it { is_expected.to be false }
end
end
end
diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb
index f7063f2c4f2..07936de9e78 100644
--- a/spec/lib/gitlab/octokit/middleware_spec.rb
+++ b/spec/lib/gitlab/octokit/middleware_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do
shared_examples 'Blocked URL' do
it 'raises an error' do
- expect { middleware.call(env) }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { middleware.call(env) }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do
let(:env) { { url: 'ssh://172.16.0.0' } }
it 'raises an error' do
- expect { middleware.call(env) }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { middleware.call(env) }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index 4128f745ce7..effe767e41d 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -6,52 +6,24 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
subject { described_class }
describe '.available_for_type?' do
- context 'with api_keyset_pagination_multi_order FF disabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: false)
- end
-
- it 'returns true for Group' do
- expect(subject.available_for_type?(Group.all)).to be_truthy
- end
-
- it 'returns true for Ci::Build' do
- expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
- end
-
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
- end
-
- it 'return false for User' do
- expect(subject.available_for_type?(User.all)).to be_falsey
- end
+ it 'returns true for Group' do
+ expect(subject.available_for_type?(Group.all)).to be_truthy
end
- context 'with api_keyset_pagination_multi_order FF enabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: true)
- end
-
- it 'returns true for Group' do
- expect(subject.available_for_type?(Group.all)).to be_truthy
- end
-
- it 'returns true for Ci::Build' do
- expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
- end
+ it 'returns true for Ci::Build' do
+ expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
+ end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
- end
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ end
- it 'returns true for User' do
- expect(subject.available_for_type?(User.all)).to be_truthy
- end
+ it 'returns true for User' do
+ expect(subject.available_for_type?(User.all)).to be_truthy
+ end
- it 'return false for other types of relations' do
- expect(subject.available_for_type?(Issue.all)).to be_falsey
- end
+ it 'return false for other types of relations' do
+ expect(subject.available_for_type?(Issue.all)).to be_falsey
end
end
@@ -100,48 +72,20 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
let(:order_by) { :id }
let(:sort) { :desc }
- context 'with api_keyset_pagination_multi_order FF disabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: false)
- end
-
- it 'returns true for Ci::Build' do
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
- end
-
- it 'returns true for AuditEvent' do
- expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
- end
-
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
- end
-
- it 'returns false for User' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
- end
+ it 'returns true for Ci::Build' do
+ expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
end
- context 'with api_keyset_pagination_multi_order FF enabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: true)
- end
-
- it 'returns true for Ci::Build' do
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
- end
-
- it 'returns true for AuditEvent' do
- expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
- end
+ it 'returns true for AuditEvent' do
+ expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
+ end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
- end
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ end
- it 'returns true for User' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
- end
+ it 'returns true for User' do
+ expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
end
end
diff --git a/spec/lib/gitlab/path_traversal_spec.rb b/spec/lib/gitlab/path_traversal_spec.rb
index bba6f8293c2..063919dd985 100644
--- a/spec/lib/gitlab/path_traversal_spec.rb
+++ b/spec/lib/gitlab/path_traversal_spec.rb
@@ -93,6 +93,13 @@ RSpec.describe Gitlab::PathTraversal, feature_category: :shared do
it 'raises for other non-strings' do
expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
end
+
+ context 'when skip_decoding is used' do
+ it 'does not detect double encoded chars' do
+ expect(check_path_traversal!('foo%252F..%2Fbar', skip_decoding: true)).to eq('foo%252F..%2Fbar')
+ expect(check_path_traversal!('foo%252F%2E%2E%2Fbar', skip_decoding: true)).to eq('foo%252F%2E%2E%2Fbar')
+ end
+ end
end
describe '.check_allowed_absolute_path!' do
diff --git a/spec/lib/gitlab/prometheus/metric_group_spec.rb b/spec/lib/gitlab/prometheus/metric_group_spec.rb
deleted file mode 100644
index a68cdfe5fb2..00000000000
--- a/spec/lib/gitlab/prometheus/metric_group_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::MetricGroup do
- describe '.common_metrics' do
- let!(:project_metric) { create(:prometheus_metric) }
- let!(:common_metric_group_a) { create(:prometheus_metric, :common, group: :aws_elb) }
- let!(:common_metric_group_b_q1) { create(:prometheus_metric, :common, group: :kubernetes) }
- let!(:common_metric_group_b_q2) { create(:prometheus_metric, :common, group: :kubernetes) }
-
- subject { described_class.common_metrics }
-
- it 'returns exactly two groups' do
- expect(subject.map(&:name)).to contain_exactly(
- 'Response metrics (AWS ELB)', 'System metrics (Kubernetes)')
- end
-
- it 'returns exactly three metric queries' do
- expect(subject.flat_map(&:metrics).map(&:id)).to contain_exactly(
- common_metric_group_a.id, common_metric_group_b_q1.id,
- common_metric_group_b_q2.id)
- end
-
- it 'orders by priority' do
- priorities = subject.map(&:priority)
- names = subject.map(&:name)
- expect(priorities).to eq([10, 5])
- expect(names).to eq(['Response metrics (AWS ELB)', 'System metrics (Kubernetes)'])
- end
- end
-
- describe '.for_project' do
- let!(:other_project) { create(:project) }
- let!(:project_metric) { create(:prometheus_metric) }
- let!(:common_metric) { create(:prometheus_metric, :common, group: :aws_elb) }
-
- subject do
- described_class.for_project(other_project)
- .flat_map(&:metrics)
- .map(&:id)
- end
-
- it 'returns exactly one common metric' do
- is_expected.to contain_exactly(common_metric.id)
- end
- end
-end
diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
deleted file mode 100644
index 66b93d0dd72..00000000000
--- a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::Queries::DeploymentQuery do
- let(:environment) { create(:environment, slug: 'environment-slug') }
- let(:deployment) { create(:deployment, environment: environment) }
- let(:client) { double('prometheus_client') }
-
- subject { described_class.new(client) }
-
- around do |example|
- time_without_subsecond_values = Time.local(2008, 9, 1, 12, 0, 0)
- travel_to(time_without_subsecond_values) { example.run }
- end
-
- it 'sends appropriate queries to prometheus' do
- start_time = (deployment.created_at - 30.minutes).to_f
- end_time = (deployment.created_at + 30.minutes).to_f
- created_at = deployment.created_at.to_f
-
- expect(client).to receive(:query_range).with('avg(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}) / 2^20',
- start_time: start_time, end_time: end_time)
- expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))',
- time: created_at)
- expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))',
- time: end_time)
-
- expect(client).to receive(:query_range).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[2m])) * 100',
- start_time: start_time, end_time: end_time)
- expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
- time: created_at)
- expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
- time: end_time)
-
- expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil,
- cpu_values: nil, cpu_before: nil, cpu_after: nil)
- end
-end
diff --git a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
deleted file mode 100644
index 60449aeef7d..00000000000
--- a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
- include Prometheus::MetricBuilders
-
- let(:metric_group_class) { Gitlab::Prometheus::MetricGroup }
- let(:metric_class) { Gitlab::Prometheus::Metric }
-
- def series_info_with_environment(*more_metrics)
- %w{metric_a metric_b}.concat(more_metrics).map { |metric_name| { '__name__' => metric_name, 'environment' => '' } }
- end
-
- let(:metric_names) { %w{metric_a metric_b} }
- let(:series_info_without_environment) do
- [{ '__name__' => 'metric_a' },
- { '__name__' => 'metric_b' }]
- end
-
- let(:partially_empty_series_info) { [{ '__name__' => 'metric_a', 'environment' => '' }] }
- let(:empty_series_info) { [] }
-
- let(:client) { double('prometheus_client') }
-
- subject { described_class.new(client) }
-
- context 'with one group where two metrics is found' do
- before do
- allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
- allow(client).to receive(:label_values).and_return(metric_names)
- end
-
- context 'both metrics in the group pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_with_environment)
- end
-
- it 'responds with both metrics as actve' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 2, metrics_missing_requirements: 0 }])
- end
- end
-
- context 'none of the metrics pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_without_environment)
- end
-
- it 'responds with both metrics missing requirements' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }])
- end
- end
-
- context 'no series information found about the metrics' do
- before do
- allow(client).to receive(:series).and_return(empty_series_info)
- end
-
- it 'responds with both metrics missing requirements' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }])
- end
- end
-
- context 'one of the series info was not found' do
- before do
- allow(client).to receive(:series).and_return(partially_empty_series_info)
- end
- it 'responds with one active and one missing metric' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 1 }])
- end
- end
- end
-
- context 'with one group where only one metric is found' do
- before do
- allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
- allow(client).to receive(:label_values).and_return('metric_a')
- end
-
- context 'both metrics in the group pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_with_environment)
- end
-
- it 'responds with one metrics as active and no missing requiremens' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 0 }])
- end
- end
-
- context 'no metrics in group pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_without_environment)
- end
-
- it 'responds with one metrics as active and no missing requiremens' do
- expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 1 }])
- end
- end
- end
-
- context 'with two groups where metrics are found in each group' do
- let(:second_metric_group) { simple_metric_group(name: 'nameb', metrics: simple_metrics(added_metric_name: 'metric_c')) }
-
- before do
- allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group, second_metric_group])
- allow(client).to receive(:label_values).and_return('metric_c')
- end
-
- context 'all metrics in both groups pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_with_environment('metric_c'))
- end
-
- it 'responds with one metrics as active and no missing requiremens' do
- expect(subject.query).to eq([
- { group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 0 },
- { group: 'nameb', priority: 1, active_metrics: 2, metrics_missing_requirements: 0 }
- ]
- )
- end
- end
-
- context 'no metrics in groups pass requirements' do
- before do
- allow(client).to receive(:series).and_return(series_info_without_environment)
- end
-
- it 'responds with one metrics as active and no missing requiremens' do
- expect(subject.query).to eq([
- { group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 1 },
- { group: 'nameb', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }
- ]
- )
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb b/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
deleted file mode 100644
index f09fa3548f8..00000000000
--- a/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::Queries::ValidateQuery do
- include PrometheusHelpers
-
- let(:api_url) { 'https://prometheus.example.com' }
- let(:client) { Gitlab::PrometheusClient.new(api_url) }
- let(:query) { 'avg(metric)' }
-
- subject { described_class.new(client) }
-
- context 'valid query' do
- before do
- allow(client).to receive(:query).with(query)
- end
-
- it 'passess query to prometheus' do
- expect(subject.query(query)).to eq(valid: true)
-
- expect(client).to have_received(:query).with(query)
- end
- end
-
- context 'invalid query' do
- let(:query) { 'invalid query' }
- let(:error_message) { "invalid parameter 'query': 1:9: parse error: unexpected identifier \"query\"" }
-
- it 'returns invalid' do
- freeze_time do
- stub_prometheus_query_error(
- prometheus_query_with_time_url(query, Time.now),
- error_message
- )
-
- expect(subject.query(query)).to eq(valid: false, error: error_message)
- end
- end
- end
-
- context 'when exceptions occur' do
- context 'Gitlab::HTTP::BlockedUrlError' do
- let(:api_url) { 'http://192.168.1.1' }
-
- let(:message) { "URL is blocked: Requests to the local network are not allowed" }
-
- before do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
- end
-
- it 'catches exception and returns invalid' do
- freeze_time do
- expect(subject.query(query)).to eq(valid: false, error: message)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
deleted file mode 100644
index d0947eef2d9..00000000000
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::QueryVariables do
- describe '.call' do
- let_it_be_with_refind(:environment) { create(:environment) }
-
- let(:project) { environment.project }
- let(:slug) { environment.slug }
- let(:params) { {} }
-
- subject { described_class.call(environment, **params) }
-
- it { is_expected.to include(ci_environment_slug: slug) }
- it { is_expected.to include(ci_project_name: project.name) }
- it { is_expected.to include(ci_project_namespace: project.namespace.name) }
- it { is_expected.to include(ci_project_path: project.full_path) }
- it { is_expected.to include(ci_environment_name: environment.name) }
-
- it do
- is_expected.to include(environment_filter:
- %[container_name!="POD",environment="#{slug}"])
- end
-
- context 'without deployment platform' do
- it { is_expected.to include(kube_namespace: '') }
- end
-
- context 'with deployment platform' do
- context 'with project cluster' do
- let(:kube_namespace) { environment.deployment_namespace }
-
- before do
- create(:cluster, :project, :provided_by_user, projects: [project])
- end
-
- it { is_expected.to include(kube_namespace: kube_namespace) }
- end
-
- context 'with group cluster' do
- let(:cluster) { create(:cluster, :group, :provided_by_user, groups: [group]) }
- let(:group) { create(:group) }
- let(:project2) { create(:project) }
- let(:kube_namespace) { k8s_ns.namespace }
-
- let!(:k8s_ns) { create(:cluster_kubernetes_namespace, cluster: cluster, project: project, environment: environment) }
- let!(:k8s_ns2) { create(:cluster_kubernetes_namespace, cluster: cluster, project: project2, environment: environment) }
-
- before do
- group.projects << project
- group.projects << project2
- end
-
- it { is_expected.to include(kube_namespace: kube_namespace) }
- end
- end
-
- context '__range' do
- context 'when start_time and end_time are present' do
- let(:params) do
- {
- start_time: Time.rfc3339('2020-05-29T07:23:05.008Z'),
- end_time: Time.rfc3339('2020-05-29T15:23:05.008Z')
- }
- end
-
- it { is_expected.to include(__range: "#{8.hours.to_i}s") }
- end
-
- context 'when start_time and end_time are not present' do
- it { is_expected.to include(__range: nil) }
- end
-
- context 'when end_time is not present' do
- let(:params) do
- {
- start_time: Time.rfc3339('2020-05-29T07:23:05.008Z')
- }
- end
-
- it { is_expected.to include(__range: nil) }
- end
-
- context 'when start_time is not present' do
- let(:params) do
- {
- end_time: Time.rfc3339('2020-05-29T07:23:05.008Z')
- }
- end
-
- it { is_expected.to include(__range: nil) }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/protocol_access_spec.rb b/spec/lib/gitlab/protocol_access_spec.rb
index 4722ea99608..cae14c3d7cf 100644
--- a/spec/lib/gitlab/protocol_access_spec.rb
+++ b/spec/lib/gitlab/protocol_access_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::ProtocolAccess do
+RSpec.describe Gitlab::ProtocolAccess, feature_category: :source_code_management do
using RSpec::Parameterized::TableSyntax
let_it_be(:group) { create(:group) }
@@ -10,25 +10,34 @@ RSpec.describe Gitlab::ProtocolAccess do
describe ".allowed?" do
where(:protocol, :project, :admin_setting, :namespace_setting, :expected_result) do
- "web" | nil | nil | nil | true
- "ssh" | nil | nil | nil | true
- "http" | nil | nil | nil | true
- "ssh" | nil | "" | nil | true
- "http" | nil | "" | nil | true
- "ssh" | nil | "ssh" | nil | true
- "http" | nil | "http" | nil | true
- "ssh" | nil | "http" | nil | false
- "http" | nil | "ssh" | nil | false
- "ssh" | ref(:p1) | nil | "all" | true
- "http" | ref(:p1) | nil | "all" | true
- "ssh" | ref(:p1) | nil | "ssh" | true
- "http" | ref(:p1) | nil | "http" | true
- "ssh" | ref(:p1) | nil | "http" | false
- "http" | ref(:p1) | nil | "ssh" | false
- "ssh" | ref(:p1) | "" | "all" | true
- "http" | ref(:p1) | "" | "all" | true
- "ssh" | ref(:p1) | "ssh" | "ssh" | true
- "http" | ref(:p1) | "http" | "http" | true
+ "web" | nil | nil | nil | true
+ "ssh" | nil | nil | nil | true
+ "http" | nil | nil | nil | true
+ "ssh_certificates" | nil | nil | nil | true
+ "ssh" | nil | "" | nil | true
+ "http" | nil | "" | nil | true
+ "ssh_certificates" | nil | "" | nil | true
+ "ssh" | nil | "ssh" | nil | true
+ "http" | nil | "http" | nil | true
+ "ssh_certificates" | nil | "ssh_certificates" | nil | true
+ "ssh" | nil | "http" | nil | false
+ "http" | nil | "ssh" | nil | false
+ "ssh_certificates" | nil | "ssh" | nil | false
+ "ssh" | ref(:p1) | nil | "all" | true
+ "http" | ref(:p1) | nil | "all" | true
+ "ssh_certificates" | ref(:p1) | nil | "all" | true
+ "ssh" | ref(:p1) | nil | "ssh" | true
+ "http" | ref(:p1) | nil | "http" | true
+ "ssh_certificates" | ref(:p1) | nil | "ssh_certificates" | true
+ "ssh" | ref(:p1) | nil | "http" | false
+ "http" | ref(:p1) | nil | "ssh" | false
+ "ssh_certificates" | ref(:p1) | nil | "ssh" | false
+ "ssh" | ref(:p1) | "" | "all" | true
+ "http" | ref(:p1) | "" | "all" | true
+ "ssh_certificates" | ref(:p1) | "" | "all" | true
+ "ssh" | ref(:p1) | "ssh" | "ssh" | true
+ "http" | ref(:p1) | "http" | "http" | true
+ "ssh_certificates" | ref(:p1) | "ssh_certificates" | "ssh_certificates" | true
end
with_them do
diff --git a/spec/lib/gitlab/puma/error_handler_spec.rb b/spec/lib/gitlab/puma/error_handler_spec.rb
new file mode 100644
index 00000000000..5b7cdf37af1
--- /dev/null
+++ b/spec/lib/gitlab/puma/error_handler_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
+ subject { described_class.new(is_production) }
+
+ let(:is_production) { true }
+ let(:ex) { StandardError.new('Sample error message') }
+ let(:env) { {} }
+ let(:status_code) { 500 }
+
+ describe '#execute' do
+ it 'captures the exception and returns a Rack response' do
+ allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
+ expect(Raven).to receive(:capture_exception).with(
+ ex,
+ tags: { handler: 'puma_low_level' },
+ extra: { puma_env: env, status_code: status_code }
+ ).and_call_original
+
+ status, headers, message = subject.execute(ex, env, status_code)
+
+ expect(status).to eq(500)
+ expect(headers).to eq({})
+ expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
+ end
+
+ context 'when capture is not allowed' do
+ it 'returns a Rack response without capturing the exception' do
+ allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
+ expect(Raven).not_to receive(:capture_exception)
+
+ status, headers, message = subject.execute(ex, env, status_code)
+
+ expect(status).to eq(500)
+ expect(headers).to eq({})
+ expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
+ end
+ end
+
+ context 'when not in production' do
+ let(:is_production) { false }
+
+ it 'returns a Rack response with dev error message' do
+ allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
+
+ status, headers, message = subject.execute(ex, env, status_code)
+
+ expect(status).to eq(500)
+ expect(headers).to eq({})
+ expect(message).to eq(described_class::DEV_ERROR_MESSAGE)
+ end
+ end
+
+ context 'when status code is nil' do
+ let(:status_code) { 500 }
+
+ it 'defaults to error 500' do
+ allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
+ expect(Raven).not_to receive(:capture_exception)
+
+ status, headers, message = subject.execute(ex, env, status_code)
+
+ expect(status).to eq(500)
+ expect(headers).to eq({})
+ expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
+ end
+ end
+
+ context 'when status code is provided' do
+ let(:status_code) { 404 }
+
+ it 'uses the provided status code in the response' do
+ allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
+
+ status, headers, message = subject.execute(ex, env, status_code)
+
+ expect(status).to eq(404)
+ expect(headers).to eq({})
+ expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb
index 9d2144f75db..92c9acb83cf 100644
--- a/spec/lib/gitlab/rack_attack/request_spec.rb
+++ b/spec/lib/gitlab/rack_attack/request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RackAttack::Request do
+RSpec.describe Gitlab::RackAttack::Request, feature_category: :rate_limiting do
using RSpec::Parameterized::TableSyntax
let(:path) { '/' }
@@ -38,8 +38,12 @@ RSpec.describe Gitlab::RackAttack::Request do
'/groups' | false
'/foo/api' | false
- '/api' | true
+ '/api' | false
+ '/api/' | true
'/api/v4/groups/1' | true
+
+ '/oauth/tokens' | true
+ '/oauth/userinfo' | true
end
with_them do
@@ -53,6 +57,36 @@ RSpec.describe Gitlab::RackAttack::Request do
it { is_expected.to eq(expected) }
end
end
+
+ context 'when rate_limit_oauth_api feature flag is disabled' do
+ before do
+ stub_feature_flags(rate_limit_oauth_api: false)
+ end
+
+ where(:path, :expected) do
+ '/' | false
+ '/groups' | false
+ '/foo/api' | false
+
+ '/api' | true
+ '/api/v4/groups/1' | true
+
+ '/oauth/tokens' | false
+ '/oauth/userinfo' | false
+ end
+
+ with_them do
+ it { is_expected.to eq(expected) }
+
+ context 'when the application is mounted at a relative URL' do
+ before do
+ stub_config_setting(relative_url_root: '/gitlab/root')
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+ end
+ end
end
describe '#api_internal_request?' do
@@ -196,7 +230,8 @@ RSpec.describe Gitlab::RackAttack::Request do
'/groups' | true
'/foo/api' | true
- '/api' | false
+ '/api' | true
+ '/api/' | false
'/api/v4/groups/1' | false
end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index ce21c2269cc..1745a745ec3 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -948,6 +948,55 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
+ describe '#close' do
+ subject { multi_store.close }
+
+ context 'when using both stores' do
+ before do
+ allow(multi_store).to receive(:use_primary_and_secondary_stores?).and_return(true)
+ end
+
+ it 'closes both stores' do
+ expect(primary_store).to receive(:close)
+ expect(secondary_store).to receive(:close)
+
+ subject
+ end
+ end
+
+ context 'when using only one store' do
+ before do
+ allow(multi_store).to receive(:use_primary_and_secondary_stores?).and_return(false)
+ end
+
+ context 'when using primary_store as default store' do
+ before do
+ allow(multi_store).to receive(:use_primary_store_as_default?).and_return(true)
+ end
+
+ it 'closes primary store' do
+ expect(primary_store).to receive(:close)
+ expect(secondary_store).not_to receive(:close)
+
+ subject
+ end
+ end
+
+ context 'when using secondary_store as default store' do
+ before do
+ allow(multi_store).to receive(:use_primary_store_as_default?).and_return(false)
+ end
+
+ it 'closes secondary store' do
+ expect(primary_store).not_to receive(:close)
+ expect(secondary_store).to receive(:close)
+
+ subject
+ end
+ end
+ end
+ end
+
context 'with unsupported command' do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
diff --git a/spec/lib/gitlab/redis/queues_metadata_spec.rb b/spec/lib/gitlab/redis/queues_metadata_spec.rb
index 693e8074b45..1ac5c3b4e70 100644
--- a/spec/lib/gitlab/redis/queues_metadata_spec.rb
+++ b/spec/lib/gitlab/redis/queues_metadata_spec.rb
@@ -5,39 +5,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::QueuesMetadata, feature_category: :redis do
include_examples "redis_new_instance_shared_examples", 'queues_metadata', Gitlab::Redis::Queues
include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
-
- subject { described_class.pool }
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- before do
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
-
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
- allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('QueuesMetadata')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_queues_metadata,
- :use_primary_store_as_default_for_queues_metadata
- end
end
diff --git a/spec/lib/gitlab/redis/workhorse_spec.rb b/spec/lib/gitlab/redis/workhorse_spec.rb
index 46931a6afcb..db5db18c732 100644
--- a/spec/lib/gitlab/redis/workhorse_spec.rb
+++ b/spec/lib/gitlab/redis/workhorse_spec.rb
@@ -2,43 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Redis::Workhorse, feature_category: :scalability do
+RSpec.describe Gitlab::Redis::Workhorse, feature_category: :redis do
include_examples "redis_new_instance_shared_examples", 'workhorse', Gitlab::Redis::SharedState
include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
-
- subject { described_class.pool }
-
- before do
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
-
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir)
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('Workhorse')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_workhorse,
- :use_primary_store_as_default_for_workhorse
- end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 02ae3f63918..381f3a80799 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -86,33 +86,6 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
it { is_expected.to match('<any-Charact3r$|any-Charact3r$>') }
end
- describe '.group_path_regex' do
- subject { described_class.group_path_regex }
-
- it { is_expected.not_to match('?gitlab') }
- it { is_expected.not_to match("Users's something") }
- it { is_expected.not_to match('/source') }
- it { is_expected.not_to match('http:') }
- it { is_expected.not_to match('https:') }
- it { is_expected.not_to match('example.com/?stuff=true') }
- it { is_expected.not_to match('example.com:5000/?stuff=true') }
- it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') }
- it { is_expected.not_to match('_good_for_me!') }
- it { is_expected.not_to match('good_for+you') }
- it { is_expected.not_to match('source/') }
- it { is_expected.not_to match('.source/full./path') }
-
- it { is_expected.not_to match('source/full') }
- it { is_expected.not_to match('source/full/path') }
- it { is_expected.not_to match('.source/.full/.path') }
-
- it { is_expected.to match('source') }
- it { is_expected.to match('.source') }
- it { is_expected.to match('_source') }
- it { is_expected.to match('domain_namespace') }
- it { is_expected.to match('gitlab-migration-test') }
- end
-
describe '.environment_name_regex' do
subject { described_class.environment_name_regex }
diff --git a/spec/lib/gitlab/saas_spec.rb b/spec/lib/gitlab/saas_spec.rb
index a8656c44831..3be0a6c7bf0 100644
--- a/spec/lib/gitlab/saas_spec.rb
+++ b/spec/lib/gitlab/saas_spec.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'support/helpers/saas_test_helper'
-RSpec.describe Gitlab::Saas do
+RSpec.describe Gitlab::Saas, feature_category: :shared do
include SaasTestHelper
describe '.canary_toggle_com_url' do
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index d1f19a5e1ba..00e68f73d2d 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -465,6 +465,6 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
expect(results.objects(scope)).to match_array([milestone_1, milestone_2, milestone_3])
end
- include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects'
+ include_examples 'search results filtered by archived'
end
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index 049b8d4ed86..22220efaa05 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -13,8 +13,6 @@ RSpec.describe Gitlab::Shell do
described_class.instance_variable_set(:@secret_token, nil)
end
- it { is_expected.to respond_to :remove_repository }
-
describe '.secret_token' do
let(:secret_file) { 'tmp/tests/.secret_shell_test' }
let(:link_file) { 'tmp/tests/shell-secret-test/.gitlab_shell_secret' }
@@ -74,67 +72,11 @@ RSpec.describe Gitlab::Shell do
end
end
- describe 'projects commands' do
- let(:gitlab_shell_path) { File.expand_path('tmp/tests/gitlab-shell') }
- let(:projects_path) { File.join(gitlab_shell_path, 'bin/gitlab-projects') }
-
- before do
- allow(Gitlab.config.gitlab_shell).to receive(:path).and_return(gitlab_shell_path)
- allow(Gitlab.config.gitlab_shell).to receive(:git_timeout).and_return(800)
- end
-
- describe '#remove_repository' do
- let!(:project) { create(:project, :repository, :legacy_storage) }
- let(:disk_path) { "#{project.disk_path}.git" }
-
- it 'returns true when the command succeeds' do
- expect(project.repository.raw).to exist
-
- expect(gitlab_shell.remove_repository(project.repository_storage, project.disk_path)).to be(true)
-
- expect(project.repository.raw).not_to exist
- end
- end
-
- describe '#mv_repository' do
- let!(:project2) { create(:project, :repository) }
-
- it 'returns true when the command succeeds' do
- old_repo = project2.repository.raw
- new_path = "project/new_path"
- new_repo = Gitlab::Git::Repository.new(project2.repository_storage, "#{new_path}.git", nil, nil)
-
- expect(old_repo).to exist
- expect(new_repo).not_to exist
-
- expect(gitlab_shell.mv_repository(project2.repository_storage, project2.disk_path, new_path)).to be_truthy
-
- expect(old_repo).not_to exist
- expect(new_repo).to exist
- end
-
- it 'returns false when the command fails' do
- expect(gitlab_shell.mv_repository(project2.repository_storage, project2.disk_path, '')).to be_falsy
- expect(project2.repository.raw).to exist
- end
- end
- end
-
describe 'namespace actions' do
subject { described_class.new }
let(:storage) { Gitlab.config.repositories.storages.each_key.first }
- describe '#add_namespace' do
- it 'creates a namespace' do
- Gitlab::GitalyClient::NamespaceService.allow do
- subject.add_namespace(storage, "mepmep")
-
- expect(Gitlab::GitalyClient::NamespaceService.new(storage).exists?("mepmep")).to be(true)
- end
- end
- end
-
describe '#repository_exists?' do
context 'when the repository does not exist' do
it 'returns false' do
@@ -150,28 +92,5 @@ RSpec.describe Gitlab::Shell do
end
end
end
-
- describe '#remove' do
- it 'removes the namespace' do
- Gitlab::GitalyClient::NamespaceService.allow do
- subject.add_namespace(storage, "mepmep")
- subject.rm_namespace(storage, "mepmep")
-
- expect(Gitlab::GitalyClient::NamespaceService.new(storage).exists?("mepmep")).to be(false)
- end
- end
- end
-
- describe '#mv_namespace' do
- it 'renames the namespace' do
- Gitlab::GitalyClient::NamespaceService.allow do
- subject.add_namespace(storage, "mepmep")
- subject.mv_namespace(storage, "mepmep", "2mep")
-
- expect(Gitlab::GitalyClient::NamespaceService.new(storage).exists?("mepmep")).to be(false)
- expect(Gitlab::GitalyClient::NamespaceService.new(storage).exists?("2mep")).to be(true)
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 937a1751cc7..7138ad04f69 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -3,8 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
- :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state, :clean_gitlab_redis_queues_metadata,
- feature_category: :shared do
+ :clean_gitlab_redis_queues_metadata, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
subject(:duplicate_job) do
@@ -79,7 +78,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
end
end
- shared_examples 'with Redis cookies' do
+ context 'with Redis cookies' do
+ def with_redis(&block)
+ Gitlab::Redis::QueuesMetadata.with(&block)
+ end
+
let(:cookie_key) { "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:#{idempotency_key}:cookie:v2" }
let(:cookie) { get_redis_msgpack(cookie_key) }
@@ -413,62 +416,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
end
end
- context 'with multi-store feature flags turned on' do
- def with_redis(&block)
- Gitlab::Redis::QueuesMetadata.with(&block)
- end
-
- shared_examples 'uses QueuesMetadata' do
- it 'use Gitlab::Redis::QueuesMetadata.with' do
- expect(Gitlab::Redis::QueuesMetadata).to receive(:with).and_call_original
- expect(Gitlab::Redis::Queues).not_to receive(:with)
-
- duplicate_job.check!
- end
- end
-
- context 'when migration is ongoing with double-write' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false)
- end
-
- it_behaves_like 'uses QueuesMetadata'
- it_behaves_like 'with Redis cookies'
- end
-
- context 'when migration is completed' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false)
- end
-
- it_behaves_like 'uses QueuesMetadata'
- it_behaves_like 'with Redis cookies'
- end
-
- it_behaves_like 'uses QueuesMetadata'
- it_behaves_like 'with Redis cookies'
- end
-
- context 'when both multi-store feature flags are off' do
- def with_redis(&block)
- Gitlab::Redis::Queues.with(&block)
- end
-
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false)
- stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false)
- end
-
- it 'use Gitlab::Redis::Queues' do
- expect(Gitlab::Redis::Queues).to receive(:with).and_call_original
- expect(Gitlab::Redis::QueuesMetadata).not_to receive(:with)
-
- duplicate_job.check!
- end
-
- it_behaves_like 'with Redis cookies'
- end
-
describe '#scheduled?' do
it 'returns false for non-scheduled jobs' do
expect(duplicate_job.scheduled?).to be(false)
diff --git a/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb b/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb
index dbab67f5996..5569bc01a6a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb
@@ -26,10 +26,18 @@ RSpec.describe Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata do
expect(job).to eq({ 'jid' => 123, 'extra.admin_email_worker.key1' => 15, 'extra.admin_email_worker.key2' => 16 })
end
- it 'does not raise when the worker does not respond to #done_log_extra_metadata' do
+ it 'does not raise when the worker does not respond to #logging_extras' do
expect { |b| subject.call(worker_without_application_worker, job, queue, &b) }.to yield_control
expect(job).to eq({ 'jid' => 123 })
end
+
+ it 'still merges logging_extras even when an error is raised during job execution' do
+ worker.log_extra_metadata_on_done(:key1, 15)
+ worker.log_extra_metadata_on_done(:key2, 16)
+ expect { subject.call(worker, job, queue) { raise 'an error' } }.to raise_error(StandardError, 'an error')
+
+ expect(job).to eq({ 'jid' => 123, 'extra.admin_email_worker.key1' => 15, 'extra.admin_email_worker.key2' => 16 })
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
index 620de7e7671..2fa0e44d44f 100644
--- a/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/skip_jobs_spec.rb
@@ -23,76 +23,76 @@ RSpec.describe Gitlab::SidekiqMiddleware::SkipJobs, feature_category: :scalabili
describe '#call' do
context 'with worker not opted for database health check' do
- describe "with all combinations of drop and defer FFs" do
- using RSpec::Parameterized::TableSyntax
+ let(:metric) { instance_double(Prometheus::Client::Counter, increment: true) }
- let(:metric) { instance_double(Prometheus::Client::Counter, increment: true) }
-
- shared_examples 'runs the job normally' do
- it 'yields control' do
- expect { |b| subject.call(TestWorker.new, job, queue, &b) }.to yield_control
- end
+ shared_examples 'runs the job normally' do
+ it 'yields control' do
+ expect { |b| subject.call(TestWorker.new, job, queue, &b) }.to yield_control
+ end
- it 'does not increment any metric counter' do
- expect(metric).not_to receive(:increment)
+ it 'does not increment any metric counter' do
+ expect(metric).not_to receive(:increment)
- subject.call(TestWorker.new, job, queue) { nil }
- end
+ subject.call(TestWorker.new, job, queue) { nil }
+ end
- it 'does not increment deferred_count' do
- subject.call(TestWorker.new, job, queue) { nil }
+ it 'does not increment deferred_count' do
+ subject.call(TestWorker.new, job, queue) { nil }
- expect(job).not_to include('deferred_count')
- end
+ expect(job).not_to include('deferred_count')
end
+ end
- shared_examples 'drops the job' do
- it 'does not yield control' do
- expect { |b| subject.call(TestWorker.new, job, queue, &b) }.not_to yield_control
- end
+ shared_examples 'drops the job' do
+ it 'does not yield control' do
+ expect { |b| subject.call(TestWorker.new, job, queue, &b) }.not_to yield_control
+ end
- it 'increments counter' do
- expect(metric).to receive(:increment).with({ worker: "TestWorker", action: "dropped" })
+ it 'increments counter' do
+ expect(metric).to receive(:increment).with({ worker: "TestWorker", action: "dropped" })
- subject.call(TestWorker.new, job, queue) { nil }
- end
+ subject.call(TestWorker.new, job, queue) { nil }
+ end
- it 'does not increment deferred_count' do
- subject.call(TestWorker.new, job, queue) { nil }
+ it 'does not increment deferred_count' do
+ subject.call(TestWorker.new, job, queue) { nil }
- expect(job).not_to include('deferred_count')
- end
+ expect(job).not_to include('deferred_count')
+ end
- it 'has dropped field in job equal to true' do
- subject.call(TestWorker.new, job, queue) { nil }
+ it 'has dropped field in job equal to true' do
+ subject.call(TestWorker.new, job, queue) { nil }
- expect(job).to include({ 'dropped' => true })
- end
+ expect(job).to include({ 'dropped' => true })
end
+ end
- shared_examples 'defers the job' do
- it 'does not yield control' do
- expect { |b| subject.call(TestWorker.new, job, queue, &b) }.not_to yield_control
- end
+ shared_examples 'defers the job' do
+ it 'does not yield control' do
+ expect { |b| subject.call(TestWorker.new, job, queue, &b) }.not_to yield_control
+ end
- it 'delays the job' do
- expect(TestWorker).to receive(:perform_in).with(described_class::DELAY, *job['args'])
+ it 'delays the job' do
+ expect(TestWorker).to receive(:perform_in).with(described_class::DELAY, *job['args'])
- subject.call(TestWorker.new, job, queue) { nil }
- end
+ subject.call(TestWorker.new, job, queue) { nil }
+ end
- it 'increments counter' do
- expect(metric).to receive(:increment).with({ worker: "TestWorker", action: "deferred" })
+ it 'increments counter' do
+ expect(metric).to receive(:increment).with({ worker: "TestWorker", action: "deferred" })
- subject.call(TestWorker.new, job, queue) { nil }
- end
+ subject.call(TestWorker.new, job, queue) { nil }
+ end
- it 'has deferred related fields in job payload' do
- subject.call(TestWorker.new, job, queue) { nil }
+ it 'has deferred related fields in job payload' do
+ subject.call(TestWorker.new, job, queue) { nil }
- expect(job).to include({ 'deferred' => true, 'deferred_by' => :feature_flag, 'deferred_count' => 1 })
- end
+ expect(job).to include({ 'deferred' => true, 'deferred_by' => :feature_flag, 'deferred_count' => 1 })
end
+ end
+
+ describe "with all combinations of drop and defer FFs" do
+ using RSpec::Parameterized::TableSyntax
before do
stub_feature_flags("drop_sidekiq_jobs_#{TestWorker.name}": drop_ff)
@@ -112,6 +112,45 @@ RSpec.describe Gitlab::SidekiqMiddleware::SkipJobs, feature_category: :scalabili
it_behaves_like params[:resulting_behavior]
end
end
+
+ describe 'using current_request actor', :request_store do
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_call_original
+ allow(Gitlab::Metrics).to receive(:counter).with(described_class::COUNTER, anything).and_return(metric)
+ end
+
+ context 'with drop_sidekiq_jobs FF' do
+ before do
+ stub_feature_flags("drop_sidekiq_jobs_#{TestWorker.name}": Feature.current_request)
+ end
+
+ it_behaves_like 'drops the job'
+
+ context 'for different request' do
+ before do
+ stub_with_new_feature_current_request
+ end
+
+ it_behaves_like 'runs the job normally'
+ end
+ end
+
+ context 'with run_sidekiq_jobs FF' do
+ before do
+ stub_feature_flags("run_sidekiq_jobs_#{TestWorker.name}": Feature.current_request)
+ end
+
+ it_behaves_like 'runs the job normally'
+
+ context 'for different request' do
+ before do
+ stub_with_new_feature_current_request
+ end
+
+ it_behaves_like 'defers the job'
+ end
+ end
+ end
end
context 'with worker opted for database health check' do
diff --git a/spec/lib/gitlab/slash_commands/run_spec.rb b/spec/lib/gitlab/slash_commands/run_spec.rb
index 9d204228d21..5d228a9ba6a 100644
--- a/spec/lib/gitlab/slash_commands/run_spec.rb
+++ b/spec/lib/gitlab/slash_commands/run_spec.rb
@@ -39,16 +39,6 @@ RSpec.describe Gitlab::SlashCommands::Run do
expect(described_class.available?(project)).to eq(false)
end
-
- it 'returns false when chatops is not available' do
- allow(Gitlab::Chat)
- .to receive(:available?)
- .and_return(false)
-
- project = double(:project, builds_enabled?: true)
-
- expect(described_class.available?(project)).to eq(false)
- end
end
describe '.allowed?' do
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index cfd40fb93b5..0f827921a66 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
include_context 'when instance configured to deny all requests'
it 'blocks the request' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:arg_value) { proc { true } }
it 'blocks the request' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:arg_value) { true }
it 'blocks the request' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -228,7 +228,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:lfs_enabled) { false }
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -236,7 +236,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:lfs_enabled) { true }
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
@@ -251,7 +251,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
end
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
@@ -259,7 +259,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:host) { 'http://127.0.0.1:9000' }
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
@@ -290,7 +290,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
end
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
context 'with HTTP_PROXY' do
@@ -324,7 +324,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:import_url) { "https://example#{'a' * 1024}.com" }
it 'raises an error' do
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
@@ -346,7 +346,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
it 'raises an error' do
stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
- expect { subject }.to raise_error(described_class::BlockedUrlError)
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 865a8384405..68eb38a1335 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe Gitlab::UrlBuilder do
:commit | ->(commit) { "/#{commit.project.full_path}/-/commit/#{commit.id}" }
:issue | ->(issue) { "/#{issue.project.full_path}/-/issues/#{issue.iid}" }
[:issue, :task] | ->(issue) { "/#{issue.project.full_path}/-/work_items/#{issue.iid}" }
- :work_item | ->(work_item) { "/#{work_item.project.full_path}/-/work_items/#{work_item.iid}" }
+ [:work_item, :task] | ->(work_item) { "/#{work_item.project.full_path}/-/work_items/#{work_item.iid}" }
+ [:work_item, :issue] | ->(work_item) { "/#{work_item.project.full_path}/-/issues/#{work_item.iid}" }
:merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" }
:project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" }
:project_snippet | ->(snippet) { "/#{snippet.project.full_path}/-/snippets/#{snippet.id}" }
@@ -59,7 +60,8 @@ RSpec.describe Gitlab::UrlBuilder do
:discussion_note_on_project_snippet | ->(note) { "/#{note.project.full_path}/-/snippets/#{note.noteable_id}#note_#{note.id}" }
:discussion_note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
:note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
- :package | ->(package) { "/#{package.project.full_path}/-/packages/#{package.id}" }
+ :note_on_abuse_report | ->(note) { "/admin/abuse_reports/#{note.noteable_id}#note_#{note.id}" }
+ :package | ->(package) { "/#{package.project.full_path}/-/packages/#{package.id}" }
end
with_them do
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 6695736e54c..51d3090c825 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -40,13 +40,10 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
File.write(path, content)
end
- after do
- # Reset memoized `definitions` result
- described_class.instance_variable_set(:@definitions, nil)
- end
-
- it 'has all definitons valid' do
- expect { described_class.definitions }.not_to raise_error
+ it 'has only valid definitions' do
+ described_class.all.each do |definition|
+ expect { definition.validate! }.not_to raise_error
+ end
end
describe 'not_removed' do
@@ -126,11 +123,13 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
context 'with data_source redis metric' do
before do
attributes[:data_source] = 'redis'
- attributes[:options] = { prefix: 'web_ide', event: 'views_count', include_usage_prefix: false }
+ attributes[:events] = [
+ { name: 'web_ide_viewed' }
+ ]
end
- it 'returns a ServicePingContext with redis key as event_name' do
- expect(subject.to_h[:data][:event_name]).to eq('WEB_IDE_VIEWS_COUNT')
+ it 'returns a ServicePingContext with first event as event_name' do
+ expect(subject.to_h[:data][:event_name]).to eq('web_ide_viewed')
end
end
@@ -182,20 +181,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
described_class.new(path, attributes).validate!
end
-
- context 'with skip_validation' do
- it 'raise exception if skip_validation: false' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes.merge( { skip_validation: false } )).validate!
- end
-
- it 'does not raise exception if has skip_validation: true' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.new(path, attributes.merge( { skip_validation: true } )).validate!
- end
- end
end
context 'conditional validations' do
@@ -358,71 +343,4 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
is_expected.to eq([attributes, other_attributes].map(&:deep_stringify_keys).to_yaml)
end
end
-
- describe '.metric_definitions_changed?', :freeze_time do
- let(:metric1) { Dir.mktmpdir('metric1') }
- let(:metric2) { Dir.mktmpdir('metric2') }
-
- before do
- allow(Rails).to receive_message_chain(:env, :development?).and_return(is_dev)
- allow(described_class).to receive(:paths).and_return(
- [
- File.join(metric1, '**', '*.yml'),
- File.join(metric2, '**', '*.yml')
- ]
- )
-
- write_metric(metric1, path, yaml_content)
- write_metric(metric2, path, yaml_content)
- end
-
- after do
- FileUtils.rm_rf(metric1)
- FileUtils.rm_rf(metric2)
- end
-
- context 'in development', :freeze_time do
- let(:is_dev) { true }
-
- it 'has changes on the first invocation' do
- expect(described_class.metric_definitions_changed?).to be_truthy
- end
-
- context 'when no files are changed' do
- it 'does not have changes on the second invocation' do
- described_class.metric_definitions_changed?
-
- expect(described_class.metric_definitions_changed?).to be_falsy
- end
- end
-
- context 'when file is changed' do
- it 'has changes on the next invocation when more than 3 seconds have passed' do
- described_class.metric_definitions_changed?
-
- write_metric(metric1, path, yaml_content)
- travel_to 10.seconds.from_now
-
- expect(described_class.metric_definitions_changed?).to be_truthy
- end
-
- it 'does not have changes on the next invocation when less than 3 seconds have passed' do
- described_class.metric_definitions_changed?
-
- write_metric(metric1, path, yaml_content)
- travel_to 1.second.from_now
-
- expect(described_class.metric_definitions_changed?).to be_falsy
- end
- end
-
- context 'in production' do
- let(:is_dev) { false }
-
- it 'does not detect changes' do
- expect(described_class.metric_definitions_changed?).to be_falsy
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index 59b944ac398..18a97447f1c 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -88,10 +88,12 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
describe '.save_aggregated_metrics' do
subject(:save_aggregated_metrics) do
- described_class.save_aggregated_metrics(metric_name: metric_1,
- time_period: time_period,
- recorded_at_timestamp: recorded_at,
- data: data)
+ described_class.save_aggregated_metrics(
+ metric_name: metric_1,
+ time_period: time_period,
+ recorded_at_timestamp: recorded_at,
+ data: data
+ )
end
context 'with compatible data argument' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/container_registry_db_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/container_registry_db_enabled_metric_spec.rb
new file mode 100644
index 00000000000..605764cd7f8
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/container_registry_db_enabled_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ContainerRegistryDbEnabledMetric, feature_category: :service_ping do
+ let(:expected_value) { Gitlab::CurrentSettings.container_registry_db_enabled }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb
index 77c49d448d7..2b6e17f615c 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountCiInternalPipelinesMetric,
-feature_category: :service_ping do
+ feature_category: :service_ping do
let_it_be(:ci_pipeline_1) { create(:ci_pipeline, source: :external, created_at: 3.days.ago) }
let_it_be(:ci_pipeline_2) { create(:ci_pipeline, source: :push, created_at: 3.days.ago) }
let_it_be(:old_pipeline) { create(:ci_pipeline, source: :push, created_at: 2.months.ago) }
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_csv_imports_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_csv_imports_metric_spec.rb
new file mode 100644
index 00000000000..2b481563ecd
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_csv_imports_metric_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountCsvImportsMetric, feature_category: :service_ping do
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:old_import) { create(:issue_csv_import, user: user, created_at: 2.months.ago) }
+ let_it_be(:new_import) { create(:issue_csv_import, user: user, created_at: 21.days.ago) }
+
+ context 'with all time frame' do
+ let(:expected_value) { 2 }
+ let(:expected_query) do
+ %q{SELECT COUNT("csv_issue_imports"."id") FROM "csv_issue_imports"}
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: 'all'
+ end
+
+ context 'for 28d time frame' do
+ let(:expected_value) { 1 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(\"csv_issue_imports\".\"id\") FROM \"csv_issue_imports\" " \
+ "WHERE \"csv_issue_imports\".\"created_at\" " \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: '28d'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb
index 65e514bf345..56b847257a5 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesCreatedManuallyFromAlertsMetric,
-feature_category: :service_ping do
+ feature_category: :service_ping do
let_it_be(:issue) { create(:issue) }
let_it_be(:issue_with_alert) { create(:issue, :with_alert) }
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_jira_imports_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_jira_imports_metric_spec.rb
new file mode 100644
index 00000000000..9a51c3cc408
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_jira_imports_metric_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountJiraImportsMetric, feature_category: :service_ping do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id) }
+
+ let_it_be(:old_import) { create(:jira_import_state, :finished, project: project, created_at: 2.months.ago) }
+ let_it_be(:new_import) { create(:jira_import_state, :finished, project: project, created_at: 21.days.ago) }
+
+ context 'with all time frame' do
+ let(:expected_value) { 2 }
+ let(:expected_query) do
+ %q{SELECT COUNT("jira_imports"."id") FROM "jira_imports"}
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: 'all'
+ end
+
+ context 'for 28d time frame' do
+ let(:expected_value) { 1 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(\"jira_imports\".\"id\") FROM \"jira_imports\" WHERE \"jira_imports\".\"created_at\" " \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: '28d'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_packages_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_packages_metric_spec.rb
new file mode 100644
index 00000000000..9a2e5c27c1d
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_packages_metric_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountPackagesMetric, feature_category: :service_ping do
+ before_all do
+ create :package, created_at: 2.months.ago
+ create :package, created_at: 21.days.ago
+ create :package, created_at: 7.days.ago
+ end
+
+ context "with all time frame" do
+ let(:expected_value) { 3 }
+ let(:expected_query) do
+ 'SELECT COUNT("packages_packages"."id") FROM "packages_packages"'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+ end
+
+ context "with 28d time frame" do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ 'SELECT COUNT("packages_packages"."id") FROM "packages_packages" ' \
+ 'WHERE "packages_packages"."created_at" ' \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_metric_spec.rb
new file mode 100644
index 00000000000..28185fb9df4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_metric_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsMetric, feature_category: :service_ping do
+ before_all do
+ create :project, created_at: 2.months.ago
+ create :project, created_at: 21.days.ago
+ create :project, created_at: 7.days.ago
+ end
+
+ context "with all time frame" do
+ let(:expected_value) { 3 }
+ let(:expected_query) do
+ 'SELECT COUNT("projects"."id") FROM "projects"'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+ end
+
+ context "with 28d time frame" do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ 'SELECT COUNT("projects"."id") FROM "projects" ' \
+ 'WHERE "projects"."created_at" ' \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
index cb94da11d58..91ad81c4291 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmail
let(:options) { { track: 'verify', series: 0 } }
let(:expected_value) { 2 }
let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
- ' WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL' \
- ' AND "in_product_marketing_emails"."series" = 0'\
- ' AND "in_product_marketing_emails"."track" = 1'
+ 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL ' \
+ 'AND "in_product_marketing_emails"."series" = 0 ' \
+ 'AND "in_product_marketing_emails"."track" = 1'
end
before do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
index 0cc82773d56..3c51368f396 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmail
let(:email_attributes) { { track: 'verify', series: 0 } }
let(:expected_value) { 2 }
let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
- ' WHERE "in_product_marketing_emails"."series" = 0'\
- ' AND "in_product_marketing_emails"."track" = 1'
+ 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."series" = 0 ' \
+ 'AND "in_product_marketing_emails"."track" = 1'
end
before do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb
index b1b193c8d04..ad1f231a12d 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::IncomingEmailEncryptedSecretsEnabledMetric,
-feature_category: :service_ping do
+ feature_category: :service_ping do
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do
let(:expected_value) { ::Gitlab::Email::IncomingEmail.encrypted_secrets.active? }
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb
index ea239e53d01..dae7f17a3b6 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ServiceDeskEmailEncryptedSecretsEnabledMetric,
-feature_category: :service_ping do
+ feature_category: :service_ping do
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do
let(:expected_value) { ::Gitlab::Email::ServiceDeskEmail.encrypted_secrets.active? }
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
new file mode 100644
index 00000000000..f3aa1ba4f88
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric, :clean_gitlab_redis_shared_state,
+ feature_category: :product_analytics_data_management do
+ before do
+ allow(Gitlab::InternalEvents::EventDefinitions).to receive(:known_event?).and_return(true)
+ end
+
+ context 'with multiple similar events' do
+ let(:expected_value) { 10 }
+
+ before do
+ 10.times do
+ Gitlab::InternalEvents.track_event('my_event')
+ end
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', events: [{ name: 'my_event' }] }
+ end
+
+ context 'with multiple different events' do
+ let(:expected_value) { 2 }
+
+ before do
+ Gitlab::InternalEvents.track_event('my_event1')
+ Gitlab::InternalEvents.track_event('my_event2')
+ end
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: 'all', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+
+ describe '.redis_key' do
+ it 'adds the key prefix to the event name' do
+ expect(described_class.redis_key('my_event')).to eq('{event_counters}_my_event')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb
index 750d340551a..418bbf322d0 100644
--- a/spec/lib/gitlab/usage/metrics/query_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/query_spec.rb
@@ -75,9 +75,9 @@ RSpec.describe Gitlab::Usage::Metrics::Query do
describe '.histogram' do
it 'returns the histogram sql' do
- expect(described_class.for(:histogram, AlertManagement::HttpIntegration.active,
- :project_id, buckets: 1..2, bucket_size: 101))
- .to match(/^WITH "count_cte" AS MATERIALIZED/)
+ expect(described_class.for(
+ :histogram, AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101
+ )).to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index eeef9406841..2c9506dd498 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -17,24 +17,24 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter, feature_categ
described_class.ci_template_event_name(expanded_template_name, config_source)
end
- it "has an event defined for template" do
+ it 'has an event defined for template' do
expect do
subject
end.not_to raise_error
end
- it "tracks template" do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(template_name, values: project.id)
+ it 'tracks template' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event).with(template_name, values: project.id).once
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event).with('ci_template_included', values: project.id).once
subject
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
- let(:category) { described_class.to_s }
- let(:action) { 'ci_templates_unique' }
+ it_behaves_like 'internal event tracking' do
+ let(:event) { 'ci_template_included' }
let(:namespace) { project.namespace }
- let(:label) { 'redis_hll_counters.ci_templates.ci_templates_total_unique_counts_monthly' }
- let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: template_name).to_context] }
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index ab92b59c845..71e9e7a8e7d 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for web IDE edit actions' do
- let(:action) { described_class::EDIT_BY_WEB_IDE }
+ let(:event) { described_class::EDIT_BY_WEB_IDE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for SFE edit actions' do
- let(:action) { described_class::EDIT_BY_SFE }
+ let(:event) { described_class::EDIT_BY_SFE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for snippet editor edit actions' do
- let(:action) { described_class::EDIT_BY_SNIPPET_EDITOR }
+ let(:event) { described_class::EDIT_BY_SNIPPET_EDITOR }
it_behaves_like 'tracks and counts action' do
def track_action(params)
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index 21a820deaa4..2c2bdbeb3e6 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue title edit actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_TITLE_CHANGED }
+ let(:event) { described_class::ISSUE_TITLE_CHANGED }
subject(:track_event) { described_class.track_issue_title_changed_action(author: user, project: project) }
end
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue description edit actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
+ let(:event) { described_class::ISSUE_DESCRIPTION_CHANGED }
subject(:track_event) { described_class.track_issue_description_changed_action(author: user, project: project) }
end
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue assignee edit actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
+ let(:event) { described_class::ISSUE_ASSIGNEE_CHANGED }
subject(:track_event) { described_class.track_issue_assignee_changed_action(author: user, project: project) }
end
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue make confidential actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
+ let(:event) { described_class::ISSUE_MADE_CONFIDENTIAL }
subject(:track_event) { described_class.track_issue_made_confidential_action(author: user, project: project) }
end
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue make visible actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_MADE_VISIBLE }
+ let(:event) { described_class::ISSUE_MADE_VISIBLE }
subject(:track_event) { described_class.track_issue_made_visible_action(author: user, project: project) }
end
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue created actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_CREATED }
+ let(:event) { described_class::ISSUE_CREATED }
let(:project) { nil }
subject(:track_event) { described_class.track_issue_created_action(author: user, namespace: namespace) }
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue closed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_CLOSED }
+ let(:event) { described_class::ISSUE_CLOSED }
subject(:track_event) { described_class.track_issue_closed_action(author: user, project: project) }
end
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue reopened actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_REOPENED }
+ let(:event) { described_class::ISSUE_REOPENED }
subject(:track_event) { described_class.track_issue_reopened_action(author: user, project: project) }
end
@@ -78,7 +78,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue label changed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_LABEL_CHANGED }
+ let(:event) { described_class::ISSUE_LABEL_CHANGED }
subject(:track_event) { described_class.track_issue_label_changed_action(author: user, project: project) }
end
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue label milestone actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_MILESTONE_CHANGED }
+ let(:event) { described_class::ISSUE_MILESTONE_CHANGED }
subject(:track_event) { described_class.track_issue_milestone_changed_action(author: user, project: project) }
end
@@ -94,7 +94,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue cross-referenced actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_CROSS_REFERENCED }
+ let(:event) { described_class::ISSUE_CROSS_REFERENCED }
subject(:track_event) { described_class.track_issue_cross_referenced_action(author: user, project: project) }
end
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue moved actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_MOVED }
+ let(:event) { described_class::ISSUE_MOVED }
subject(:track_event) { described_class.track_issue_moved_action(author: user, project: project) }
end
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue cloned actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_CLONED }
+ let(:event) { described_class::ISSUE_CLONED }
subject(:track_event) { described_class.track_issue_cloned_action(author: user, project: project) }
end
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue relate actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_RELATED }
+ let(:event) { described_class::ISSUE_RELATED }
subject(:track_event) { described_class.track_issue_related_action(author: user, project: project) }
end
@@ -126,7 +126,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue unrelate actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_UNRELATED }
+ let(:event) { described_class::ISSUE_UNRELATED }
subject(:track_event) { described_class.track_issue_unrelated_action(author: user, project: project) }
end
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue marked as duplicate actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE }
+ let(:event) { described_class::ISSUE_MARKED_AS_DUPLICATE }
subject(:track_event) { described_class.track_issue_marked_as_duplicate_action(author: user, project: project) }
end
@@ -142,7 +142,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue locked actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_LOCKED }
+ let(:event) { described_class::ISSUE_LOCKED }
subject(:track_event) { described_class.track_issue_locked_action(author: user, project: project) }
end
@@ -150,7 +150,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue unlocked actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_UNLOCKED }
+ let(:event) { described_class::ISSUE_UNLOCKED }
subject(:track_event) { described_class.track_issue_unlocked_action(author: user, project: project) }
end
@@ -158,7 +158,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue designs added actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DESIGNS_ADDED }
+ let(:event) { described_class::ISSUE_DESIGNS_ADDED }
subject(:track_event) { described_class.track_issue_designs_added_action(author: user, project: project) }
end
@@ -166,7 +166,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue designs modified actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DESIGNS_MODIFIED }
+ let(:event) { described_class::ISSUE_DESIGNS_MODIFIED }
subject(:track_event) { described_class.track_issue_designs_modified_action(author: user, project: project) }
end
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue designs removed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DESIGNS_REMOVED }
+ let(:event) { described_class::ISSUE_DESIGNS_REMOVED }
subject(:track_event) { described_class.track_issue_designs_removed_action(author: user, project: project) }
end
@@ -182,7 +182,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue due date changed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DUE_DATE_CHANGED }
+ let(:event) { described_class::ISSUE_DUE_DATE_CHANGED }
subject(:track_event) { described_class.track_issue_due_date_changed_action(author: user, project: project) }
end
@@ -190,7 +190,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue time estimate changed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
+ let(:event) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
subject(:track_event) { described_class.track_issue_time_estimate_changed_action(author: user, project: project) }
end
@@ -198,7 +198,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue time spent changed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED }
+ let(:event) { described_class::ISSUE_TIME_SPENT_CHANGED }
subject(:track_event) { described_class.track_issue_time_spent_changed_action(author: user, project: project) }
end
@@ -206,7 +206,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue comment added actions', :snowplow do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_COMMENT_ADDED }
+ let(:event) { described_class::ISSUE_COMMENT_ADDED }
subject(:track_event) { described_class.track_issue_comment_added_action(author: user, project: project) }
end
@@ -214,7 +214,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue comment edited actions', :snowplow do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_COMMENT_EDITED }
+ let(:event) { described_class::ISSUE_COMMENT_EDITED }
subject(:track_event) { described_class.track_issue_comment_edited_action(author: user, project: project) }
end
@@ -222,7 +222,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue comment removed actions', :snowplow do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_COMMENT_REMOVED }
+ let(:event) { described_class::ISSUE_COMMENT_REMOVED }
subject(:track_event) { described_class.track_issue_comment_removed_action(author: user, project: project) }
end
@@ -230,7 +230,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
context 'for Issue design comment removed actions' do
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::ISSUE_DESIGN_COMMENT_REMOVED }
+ let(:event) { described_class::ISSUE_DESIGN_COMMENT_REMOVED }
subject(:track_event) { described_class.track_issue_design_comment_removed_action(author: user, project: project) }
end
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index 53eee62b386..c3a718e669a 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
end
it_behaves_like 'internal event tracking' do
- let(:action) { described_class::MR_USER_CREATE_ACTION }
+ let(:event) { described_class::MR_USER_CREATE_ACTION }
let(:project) { target_project }
let(:namespace) { project.namespace }
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 3ec7bf33623..6d30947167c 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -72,17 +72,18 @@ RSpec.describe Gitlab::UsageDataQueries do
describe '.add' do
it 'returns the combined raw SQL with an inner query' do
- expect(described_class.add('SELECT COUNT("users"."id") FROM "users"',
- 'SELECT COUNT("issues"."id") FROM "issues"'))
- .to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
+ expect(described_class.add(
+ 'SELECT COUNT("users"."id") FROM "users"',
+ 'SELECT COUNT("issues"."id") FROM "issues"'
+ )).to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
end
end
describe '.histogram' do
it 'returns the histogram sql' do
- expect(described_class.histogram(AlertManagement::HttpIntegration.active,
- :project_id, buckets: 1..2, bucket_size: 101))
- .to match(/^WITH "count_cte" AS MATERIALIZED/)
+ expect(described_class.histogram(
+ AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101
+ )).to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 143d0484392..6f188aa408e 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'includes basic top and second level keys' do
is_expected.to include(:counts)
- is_expected.to include(:counts_monthly)
is_expected.to include(:counts_weekly)
is_expected.to include(:license)
@@ -152,8 +151,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
- project = create(:project, :repository_private,
- :test_repo, :remote_mirror, creator: user)
+ project = create(:project, :repository_private, :test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
@@ -293,22 +291,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
bulk_imports: {
gitlab_v1: 2
},
- project_imports: {
- bitbucket: 2,
- bitbucket_server: 2,
- git: 2,
- gitea: 2,
- github: 2,
- gitlab_migration: 2,
- gitlab_project: 2,
- manifest: 2,
- total: 16
- },
- issue_imports: {
- jira: 2,
- fogbugz: 2,
- csv: 2
- },
group_imports: {
group_import: 2,
gitlab_migration: 2
@@ -320,22 +302,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
bulk_imports: {
gitlab_v1: 1
},
- project_imports: {
- bitbucket: 1,
- bitbucket_server: 1,
- git: 1,
- gitea: 1,
- github: 1,
- gitlab_migration: 1,
- gitlab_project: 1,
- manifest: 1,
- total: 8
- },
- issue_imports: {
- jira: 1,
- fogbugz: 1,
- csv: 1
- },
group_imports: {
group_import: 1,
gitlab_migration: 1
@@ -623,28 +589,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
end
end
- describe '.system_usage_data_monthly' do
- let_it_be(:project) { create(:project, created_at: 3.days.ago) }
-
- before do
- create(:package, project: project, created_at: 3.days.ago)
- create(:package, created_at: 2.months.ago, project: project)
-
- for_defined_days_back do
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
- end
- end
-
- subject { described_class.system_usage_data_monthly }
-
- it 'gathers monthly usage counts correctly' do
- counts_monthly = subject[:counts_monthly]
-
- expect(counts_monthly[:projects]).to eq(1)
- expect(counts_monthly[:packages]).to eq(1)
- end
- end
-
context 'when not relying on database records' do
describe '.features_usage_data_ce' do
subject { described_class.features_usage_data_ce }
@@ -885,8 +829,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'gathers Service Desk data' do
create_list(:issue, 2, :confidential, author: Users::Internal.support_bot, project: project)
- expect(subject).to eq(service_desk_enabled_projects: 1,
- service_desk_issues: 2)
+ expect(subject).to eq(service_desk_enabled_projects: 1, service_desk_issues: 2)
end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 9bc1ebaebcb..cca18cb05c7 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -371,50 +371,13 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
subject(:cleanup_key) { described_class.cleanup_key(key) }
- shared_examples 'cleans up key' do |redis = Gitlab::Redis::Workhorse|
- before do
- described_class.set_key_and_notify(key, value)
- end
-
- it 'deletes the key' do
- expect { cleanup_key }
- .to change { redis.with { |c| c.exists?(key) } }.from(true).to(false)
- end
+ before do
+ described_class.set_key_and_notify(key, value)
end
- it_behaves_like 'cleans up key'
-
- context 'when workhorse migration feature flags are disabled' do
- before do
- stub_feature_flags(
- use_primary_and_secondary_stores_for_workhorse: false,
- use_primary_store_as_default_for_workhorse: false
- )
- end
-
- it_behaves_like 'cleans up key', Gitlab::Redis::SharedState
- end
-
- context 'when either workhorse migration feature flags are enabled' do
- context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do
- before do
- stub_feature_flags(
- use_primary_store_as_default_for_workhorse: false
- )
- end
-
- it_behaves_like 'cleans up key'
- end
-
- context 'when use_primary_store_as_default_for_workhorse is enabled' do
- before do
- stub_feature_flags(
- use_primary_and_secondary_stores_for_workhorse: false
- )
- end
-
- it_behaves_like 'cleans up key'
- end
+ it 'deletes the key' do
+ expect { cleanup_key }
+ .to change { Gitlab::Redis::Workhorse.with { |c| c.exists?(key) } }.from(true).to(false)
end
end
@@ -424,13 +387,13 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
subject { described_class.set_key_and_notify(key, value, overwrite: overwrite) }
- shared_examples 'set and notify' do |redis = Gitlab::Redis::Workhorse|
+ shared_examples 'set and notify' do
it 'set and return the same value' do
is_expected.to eq(value)
end
it 'set and notify' do
- expect(redis).to receive(:with).and_call_original
+ expect(Gitlab::Redis::Workhorse).to receive(:with).and_call_original
expect_any_instance_of(::Redis).to receive(:publish)
.with(described_class::NOTIFICATION_PREFIX + 'test-key', "test-value")
@@ -442,39 +405,6 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let(:overwrite) { true }
it_behaves_like 'set and notify'
-
- context 'when workhorse migration feature flags are disabled' do
- before do
- stub_feature_flags(
- use_primary_and_secondary_stores_for_workhorse: false,
- use_primary_store_as_default_for_workhorse: false
- )
- end
-
- it_behaves_like 'set and notify', Gitlab::Redis::SharedState
- end
-
- context 'when either workhorse migration feature flags are enabled' do
- context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do
- before do
- stub_feature_flags(
- use_primary_store_as_default_for_workhorse: false
- )
- end
-
- it_behaves_like 'set and notify'
- end
-
- context 'when use_primary_store_as_default_for_workhorse is enabled' do
- before do
- stub_feature_flags(
- use_primary_and_secondary_stores_for_workhorse: false
- )
- end
-
- it_behaves_like 'set and notify'
- end
- end
end
context 'when we set an existing key' do
diff --git a/spec/lib/product_analytics/settings_spec.rb b/spec/lib/product_analytics/settings_spec.rb
deleted file mode 100644
index 9ba5dbfc8fc..00000000000
--- a/spec/lib/product_analytics/settings_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics_data_management do
- let_it_be(:project) { create(:project) }
-
- subject { described_class.for_project(project) }
-
- describe 'config settings' do
- context 'when configured' do
- before do
- mock_settings('test')
- end
-
- it 'will be configured' do
- expect(subject.configured?).to be_truthy
- end
- end
-
- context 'when not configured' do
- before do
- mock_settings('')
- end
-
- it 'will not be configured' do
- expect(subject.configured?).to be_falsey
- end
- end
-
- context 'when one configuration setting is missing' do
- before do
- missing_key = ProductAnalytics::Settings::ALL_CONFIG_KEYS.last
- mock_settings('test', ProductAnalytics::Settings::ALL_CONFIG_KEYS - [missing_key])
- allow(::Gitlab::CurrentSettings).to receive(missing_key).and_return('')
- end
-
- it 'will not be configured' do
- expect(subject.configured?).to be_falsey
- end
- end
-
- ProductAnalytics::Settings::ALL_CONFIG_KEYS.each do |key|
- it "can read #{key}" do
- expect(::Gitlab::CurrentSettings).to receive(key).and_return('test')
-
- expect(subject.send(key)).to eq('test')
- end
-
- context 'with project' do
- it "will override when provided a project #{key}" do
- expect(::Gitlab::CurrentSettings).not_to receive(key)
- expect(project.project_setting).to receive(key).and_return('test')
-
- expect(subject.send(key)).to eq('test')
- end
-
- it "will will not override when provided a blank project #{key}" do
- expect(::Gitlab::CurrentSettings).to receive(key).and_return('test')
- expect(project.project_setting).to receive(key).and_return('')
-
- expect(subject.send(key)).to eq('test')
- end
- end
- end
- end
-
- describe '.enabled?' do
- before do
- allow(subject).to receive(:configured?).and_return(true)
- end
-
- context 'when enabled' do
- before do
- allow(::Gitlab::CurrentSettings).to receive(:product_analytics_enabled?).and_return(true)
- end
-
- it 'will be enabled' do
- expect(subject.enabled?).to be_truthy
- end
- end
-
- context 'when disabled' do
- before do
- allow(::Gitlab::CurrentSettings).to receive(:product_analytics_enabled?).and_return(false)
- end
-
- it 'will be enabled' do
- expect(subject.enabled?).to be_falsey
- end
- end
- end
-
- private
-
- def mock_settings(setting, keys = ProductAnalytics::Settings::ALL_CONFIG_KEYS)
- keys.each do |key|
- allow(::Gitlab::CurrentSettings).to receive(key).and_return(setting)
- end
- end
-end
diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb
index 7cfeffb095a..47f645d9c87 100644
--- a/spec/lib/release_highlights/validator_spec.rb
+++ b/spec/lib/release_highlights/validator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ReleaseHighlights::Validator, feature_category: :experimentation_adoption do
+RSpec.describe ReleaseHighlights::Validator, feature_category: :activation do
let(:validator) { described_class.new(file: yaml_path) }
let(:yaml_path) { 'spec/fixtures/whats_new/valid.yml' }
let(:invalid_yaml_path) { 'spec/fixtures/whats_new/invalid.yml' }
diff --git a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb b/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
deleted file mode 100644
index 573760cddb6..00000000000
--- a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Groups::Menus::ObservabilityMenu, feature_category: :navigation do
- let(:owner) { build_stubbed(:user) }
- let(:root_group) do
- build(:group, :private).tap do |g|
- g.add_owner(owner)
- end
- end
-
- let(:group) { root_group }
- let(:user) { owner }
- let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
- let(:menu) { described_class.new(context) }
-
- describe '#render?' do
- before do
- allow(menu).to receive(:can?).and_call_original
- end
-
- context 'when observability#explore is allowed' do
- before do
- allow(Gitlab::Observability).to receive(:allowed_for_action?).with(user, group, :explore).and_return(true)
- end
-
- it 'returns true' do
- expect(menu.render?).to eq true
- expect(Gitlab::Observability).to have_received(:allowed_for_action?).with(user, group, :explore)
- end
- end
-
- context 'when observability#explore is not allowed' do
- before do
- allow(Gitlab::Observability).to receive(:allowed_for_action?).with(user, group, :explore).and_return(false)
- end
-
- it 'returns false' do
- expect(menu.render?).to eq false
- expect(Gitlab::Observability).to have_received(:allowed_for_action?).with(user, group, :explore)
- end
- end
- end
-
- describe "Menu items" do
- before do
- allow(Gitlab::Observability).to receive(:allowed_for_action?).and_return(false)
- end
-
- subject { find_menu(menu, item_id) }
-
- shared_examples 'observability menu entry' do
- context 'when action is allowed' do
- before do
- allow(Gitlab::Observability).to receive(:allowed_for_action?).with(user, group, item_id).and_return(true)
- end
-
- it 'the menu item is added to list of menu items' do
- is_expected.not_to be_nil
- end
- end
-
- context 'when action is not allowed' do
- before do
- allow(Gitlab::Observability).to receive(:allowed_for_action?).with(user, group, item_id).and_return(false)
- end
-
- it 'the menu item is added to list of menu items' do
- is_expected.to be_nil
- end
- end
- end
-
- describe 'Explore' do
- it_behaves_like 'observability menu entry' do
- let(:item_id) { :explore }
- end
- end
-
- describe 'Datasources' do
- it_behaves_like 'observability menu entry' do
- let(:item_id) { :datasources }
- end
- end
- end
-
- private
-
- def find_menu(menu, item)
- menu.renderable_items.find { |i| i.item_id == item }
- end
-end
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb
deleted file mode 100644
index 759975856b8..00000000000
--- a/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Groups::SuperSidebarMenus::MonitorMenu, feature_category: :navigation do
- subject { described_class.new({}) }
-
- let(:items) { subject.instance_variable_get(:@items) }
-
- it 'has title and sprite_icon' do
- expect(subject.title).to eq(s_("Navigation|Monitor"))
- expect(subject.sprite_icon).to eq("monitor")
- end
-
- it 'defines list of NilMenuItem placeholders' do
- expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
- expect(items.map(&:item_id)).to eq([
- :explore,
- :datasources
- ])
- end
-end
diff --git a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
index c939dd870c4..fe1491a736e 100644
--- a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
@@ -34,7 +34,6 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio
Sidebars::Groups::SuperSidebarMenus::SecureMenu,
Sidebars::Groups::SuperSidebarMenus::DeployMenu,
Sidebars::Groups::SuperSidebarMenus::OperationsMenu,
- Sidebars::Groups::SuperSidebarMenus::MonitorMenu,
Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu,
Sidebars::UncategorizedMenu,
Sidebars::Groups::Menus::SettingsMenu
diff --git a/spec/lib/sidebars/organizations/menus/settings_menu_spec.rb b/spec/lib/sidebars/organizations/menus/settings_menu_spec.rb
new file mode 100644
index 00000000000..fb9f9ee3cce
--- /dev/null
+++ b/spec/lib/sidebars/organizations/menus/settings_menu_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Organizations::Menus::SettingsMenu, feature_category: :navigation do
+ let_it_be(:organization) { build(:organization) }
+
+ let(:user) { build(:user) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: organization) }
+ let(:items) { subject.instance_variable_get(:@items) }
+
+ subject { described_class.new(context) }
+
+ it 'has title and sprite_icon' do
+ expect(subject.title).to eq(_("Settings"))
+ expect(subject.sprite_icon).to eq("settings")
+ end
+
+ describe '#render?' do
+ context 'when user is signed out' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when `current_user` is an admin', :enable_admin_mode do
+ let(:user) { build(:admin) }
+
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when `current_user` not an admin' do
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id } }
+
+ describe 'General' do
+ let(:item_id) { :organization_settings_general }
+
+ it { is_expected.not_to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
index b8ceda615c4..b9025b69926 100644
--- a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
@@ -22,7 +22,8 @@ RSpec.describe Sidebars::Organizations::SuperSidebarPanel, feature_category: :na
let(:category_menu) do
[
Sidebars::StaticMenu,
- Sidebars::Organizations::Menus::ManageMenu
+ Sidebars::Organizations::Menus::ManageMenu,
+ Sidebars::Organizations::Menus::SettingsMenu
]
end
diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
index 75f612e9c7c..3224c4cdd33 100644
--- a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu, feature_category: :na
let(:item_id) { :pages }
before do
- allow(project).to receive(:pages_available?).and_return(pages_enabled)
+ allow(::Gitlab::Pages).to receive(:enabled?).and_return(pages_enabled)
end
describe 'when pages are enabled' do
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 605cec8be5e..81ca9670ac6 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -59,6 +59,18 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu, feature_category: :navig
let(:item_id) { :access_tokens }
it_behaves_like 'access rights checks'
+
+ describe 'when the user is not an admin but has manage_resource_access_tokens' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :admin_project, project).and_return(false)
+ allow(Ability).to receive(:allowed?).with(user, :manage_resource_access_tokens, project).and_return(true)
+ end
+
+ it 'includes access token menu item' do
+ expect(subject.title).to eql('Access Tokens')
+ end
+ end
end
describe 'Repository' do
diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb
deleted file mode 100644
index 93a06bfc881..00000000000
--- a/spec/mailers/emails/in_product_marketing_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require 'email_spec'
-
-RSpec.describe Emails::InProductMarketing do
- include EmailSpec::Matchers
- include Gitlab::Routing.url_helpers
-
- let_it_be(:user) { create(:user) }
-
- shared_examples 'has custom headers when on gitlab.com' do
- context 'when on gitlab.com', :saas do
- it 'has custom headers' do
- aggregate_failures do
- expect(subject).to deliver_from(described_class::FROM_ADDRESS)
- expect(subject).to reply_to(described_class::FROM_ADDRESS)
- expect(subject).to have_header('X-Mailgun-Track', 'yes')
- expect(subject).to have_header('X-Mailgun-Track-Clicks', 'yes')
- expect(subject).to have_header('X-Mailgun-Track-Opens', 'yes')
- expect(subject).to have_header('X-Mailgun-Tag', 'marketing')
- expect(subject).to have_body_text('%tag_unsubscribe_url%')
- end
- end
- end
- end
-
- describe '#build_ios_app_guide_email' do
- subject { Notify.build_ios_app_guide_email(user.notification_email_or_default) }
-
- it 'sends to the right user' do
- expect(subject).to deliver_to(user.notification_email_or_default)
- end
-
- it 'has the correct subject and content' do
- message = Gitlab::Email::Message::BuildIosAppGuide.new
- cta_url = 'https://about.gitlab.com/blog/2019/03/06/ios-publishing-with-gitlab-and-fastlane/'
- cta2_url = 'https://www.youtube.com/watch?v=325FyJt7ZG8'
-
- aggregate_failures do
- is_expected.to have_subject(message.subject_line)
- is_expected.to have_body_text(message.title)
- is_expected.to have_body_text(message.body_line1)
- is_expected.to have_body_text(CGI.unescapeHTML(message.cta_link))
- is_expected.to have_body_text(CGI.unescapeHTML(message.cta2_link))
- is_expected.to have_body_text(cta_url)
- is_expected.to have_body_text(cta2_url)
- end
- end
- end
-end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 4816e88a311..7ddb4810d53 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -199,6 +199,10 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'resource about to expire email'
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text _('You are receiving this email because you are an Owner of the Group.')
+ end
end
context 'when access token belongs to a project' do
@@ -218,6 +222,10 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'resource about to expire email'
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text _('You are receiving this email because you are a Maintainer of the Project.')
+ end
end
end
@@ -289,7 +297,7 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
end
it 'has the correct subject' do
- is_expected.to have_subject /^A personal access token has been revoked$/i
+ is_expected.to have_subject /^Your personal access token has been revoked$/i
end
it 'provides the names of the token' do
@@ -317,7 +325,7 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
end
it 'has the correct subject' do
- is_expected.to have_subject /^A personal access token has been revoked$/i
+ is_expected.to have_subject /^Your personal access token has been revoked$/i
end
it 'provides the names of the token' do
diff --git a/spec/migrations/20230721095222_delete_orphans_scan_finding_license_scanning_approval_rules2_spec.rb b/spec/migrations/20230721095222_delete_orphans_scan_finding_license_scanning_approval_rules2_spec.rb
new file mode 100644
index 00000000000..2baf0975c5c
--- /dev/null
+++ b/spec/migrations/20230721095222_delete_orphans_scan_finding_license_scanning_approval_rules2_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DeleteOrphansScanFindingLicenseScanningApprovalRules2, feature_category: :security_policy_management do
+ describe '#up' do
+ it 'schedules background migration for both levels of approval rules' do
+ migrate!
+
+ expect(described_class::MERGE_REQUEST_MIGRATION).to have_scheduled_batched_migration(
+ table_name: :approval_merge_request_rules,
+ column_name: :id,
+ interval: described_class::INTERVAL)
+
+ expect(described_class::PROJECT_MIGRATION).to have_scheduled_batched_migration(
+ table_name: :approval_project_rules,
+ column_name: :id,
+ interval: described_class::INTERVAL)
+ end
+ end
+end
diff --git a/spec/migrations/20230905064317_swap_columns_for_ci_pipeline_variables_pipeline_id_bigint_spec.rb b/spec/migrations/20230905064317_swap_columns_for_ci_pipeline_variables_pipeline_id_bigint_spec.rb
new file mode 100644
index 00000000000..d5cc98831b6
--- /dev/null
+++ b/spec/migrations/20230905064317_swap_columns_for_ci_pipeline_variables_pipeline_id_bigint_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapColumnsForCiPipelineVariablesPipelineIdBigint, feature_category: :continuous_integration do
+ it_behaves_like(
+ 'swap conversion columns',
+ table_name: :ci_pipeline_variables,
+ from: :pipeline_id,
+ to: :pipeline_id_convert_to_bigint
+ )
+end
diff --git a/spec/migrations/20230908033511_swap_columns_for_ci_pipeline_chat_data_pipeline_id_bigint_spec.rb b/spec/migrations/20230908033511_swap_columns_for_ci_pipeline_chat_data_pipeline_id_bigint_spec.rb
new file mode 100644
index 00000000000..082bbeb9060
--- /dev/null
+++ b/spec/migrations/20230908033511_swap_columns_for_ci_pipeline_chat_data_pipeline_id_bigint_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapColumnsForCiPipelineChatDataPipelineIdBigint, feature_category: :continuous_integration do
+ let(:connection) { active_record_base.connection }
+ let(:table_ci_pipeline_chat_data) { table(:ci_pipeline_chat_data) }
+
+ before do
+ connection.execute('ALTER TABLE ci_pipeline_chat_data ALTER COLUMN pipeline_id TYPE integer')
+ connection.execute('ALTER TABLE ci_pipeline_chat_data ALTER COLUMN pipeline_id_convert_to_bigint TYPE bigint')
+ end
+
+ it 'swaps columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(column('pipeline_id').sql_type).to eq('integer')
+ expect(column('pipeline_id_convert_to_bigint').sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ expect(column('pipeline_id').sql_type).to eq('bigint')
+ expect(column('pipeline_id_convert_to_bigint').sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+
+ context 'when legacy foreign key exists' do
+ before do
+ if connection.foreign_key_exists?(
+ :ci_pipeline_chat_data, name: :fk_64ebfab6b3)
+ connection.remove_foreign_key(:ci_pipeline_chat_data, :ci_pipelines,
+ name: :fk_64ebfab6b3)
+ end
+
+ connection.add_foreign_key(:ci_pipeline_chat_data, :ci_pipelines, column: :pipeline_id,
+ name: :fk_rails_64ebfab6b3)
+ end
+
+ it 'renames the legacy foreign key fk_rails_64ebfab6b3' do
+ expect(connection.foreign_key_exists?(:ci_pipeline_chat_data, name: :fk_rails_64ebfab6b3)).to be_truthy
+ expect(connection.foreign_key_exists?(:ci_pipeline_chat_data, name: :fk_64ebfab6b3)).to be_falsy
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(column('pipeline_id').sql_type).to eq('integer')
+ expect(column('pipeline_id_convert_to_bigint').sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ expect(column('pipeline_id').sql_type).to eq('bigint')
+ expect(column('pipeline_id_convert_to_bigint').sql_type).to eq('integer')
+
+ expect(connection.foreign_key_exists?(:ci_pipeline_chat_data, name: :fk_rails_64ebfab6b3)).to be_falsy
+ expect(connection.foreign_key_exists?(:ci_pipeline_chat_data, name: :fk_64ebfab6b3)).to be_truthy
+ }
+ end
+ end
+ end
+ end
+
+ private
+
+ def column(name)
+ table_ci_pipeline_chat_data.reset_column_information
+ table_ci_pipeline_chat_data.columns.find { |c| c.name == name.to_s }
+ end
+end
diff --git a/spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb b/spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..02c39408d40
--- /dev/null
+++ b/spec/migrations/20230912105945_queue_backfill_finding_id_in_vulnerabilities_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillFindingIdInVulnerabilities, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230913071219_delete_pages_domain_with_reserved_domains_spec.rb b/spec/migrations/20230913071219_delete_pages_domain_with_reserved_domains_spec.rb
new file mode 100644
index 00000000000..a70bed53615
--- /dev/null
+++ b/spec/migrations/20230913071219_delete_pages_domain_with_reserved_domains_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DeletePagesDomainWithReservedDomains, feature_category: :pages do
+ describe 'migrates' do
+ context 'when a reserved domain is provided' do
+ it 'delete the domain' do
+ table(:pages_domains).create!(domain: 'gmail.com', verification_code: 'gmail')
+ expect { migrate! }.to change { PagesDomain.count }.by(-1)
+ end
+ end
+
+ context 'when a reserved domain is provided with non standard case' do
+ it 'delete the domain' do
+ table(:pages_domains).create!(domain: 'AOl.com', verification_code: 'aol')
+ expect { migrate! }.to change { PagesDomain.count }.by(-1)
+ end
+ end
+
+ context 'when a non reserved domain is provided' do
+ it 'does not delete the domain' do
+ table(:pages_domains).create!(domain: 'example.com', verification_code: 'example')
+ expect { migrate! }.not_to change { PagesDomain.count }
+ expect(table(:pages_domains).find_by(domain: 'example.com')).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230920154302_change_epics_hierarchy_restrictions_spec.rb b/spec/migrations/20230920154302_change_epics_hierarchy_restrictions_spec.rb
new file mode 100644
index 00000000000..b3366c0bae6
--- /dev/null
+++ b/spec/migrations/20230920154302_change_epics_hierarchy_restrictions_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ChangeEpicsHierarchyRestrictions, :migration, feature_category: :portfolio_management do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:work_item_types) { table(:work_item_types) }
+ let(:work_item_hierarchy_restrictions) { table(:work_item_hierarchy_restrictions) }
+ let(:base_types) { { issue: 0, epic: 7 } }
+
+ let(:epic_type) { work_item_types.find_by!(namespace_id: nil, base_type: base_types[:epic]) }
+ let(:issue_type) { work_item_types.find_by!(namespace_id: nil, base_type: base_types[:issue]) }
+
+ shared_examples 'migration that updates cross_hierarchy_enabled column' do
+ it 'updates column value' do
+ expect { subject }.to not_change { work_item_hierarchy_restrictions.count }
+
+ expect(
+ work_item_hierarchy_restrictions.where(parent_type_id: epic_type.id)
+ .pluck(:child_type_id, :maximum_depth, :cross_hierarchy_enabled)
+ ).to contain_exactly(
+ [epic_type.id, 9, expected_cross_hierarchy_status],
+ [issue_type.id, 1, expected_cross_hierarchy_status]
+ )
+ end
+
+ it_behaves_like 'logs an error if type is missing', 'Epic'
+ it_behaves_like 'logs an error if type is missing', 'Issue'
+ end
+
+ shared_examples 'logs an error if type is missing' do |type_name|
+ let(:error_msg) { 'Issue or Epic work item types not found, skipping hierarchy restrictions update' }
+
+ it 'logs a warning' do
+ allow(described_class::MigrationWorkItemType).to receive(:find_by_name_and_namespace_id).and_call_original
+ allow(described_class::MigrationWorkItemType).to receive(:find_by_name_and_namespace_id).with(type_name, nil)
+ .and_return(nil)
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(error_msg)
+ migrate!
+ end
+ end
+
+ describe 'up' do
+ let(:expected_cross_hierarchy_status) { true }
+
+ subject { migrate! }
+
+ it_behaves_like 'migration that updates cross_hierarchy_enabled column'
+ end
+
+ describe 'down' do
+ let(:expected_cross_hierarchy_status) { false }
+
+ subject do
+ migrate!
+ schema_migrate_down!
+ end
+
+ it_behaves_like 'migration that updates cross_hierarchy_enabled column'
+ end
+end
diff --git a/spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb b/spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..f89fc55b6b8
--- /dev/null
+++ b/spec/migrations/20231001105945_requeue_backfill_finding_id_in_vulnerabilities_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RequeueBackfillFindingIdInVulnerabilities, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20231003142706_lower_project_build_timeout_to_respect_max_validation_spec.rb b/spec/migrations/20231003142706_lower_project_build_timeout_to_respect_max_validation_spec.rb
new file mode 100644
index 00000000000..5528f6a3115
--- /dev/null
+++ b/spec/migrations/20231003142706_lower_project_build_timeout_to_respect_max_validation_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db/post_migrate/20231003142706_lower_project_build_timeout_to_respect_max_validation.rb')
+
+RSpec.describe LowerProjectBuildTimeoutToRespectMaxValidation, feature_category: :continuous_integration do
+ let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
+ let(:projects) { table(:projects) }
+ let(:project) do
+ projects.create!(name: "project", path: "project", namespace_id: namespace.id, project_namespace_id: namespace.id)
+ end
+
+ before do
+ project.update_column(:build_timeout, 2.months.to_i)
+ end
+
+ describe "#up" do
+ it 'updates the build timeout' do
+ expect(project.build_timeout).to be > 1.month.to_i
+
+ migrate!
+
+ expect(project.reload.build_timeout).to be <= 1.month.to_i
+ end
+ end
+
+ describe "#down" do
+ it 'does nothing' do
+ expect(project.build_timeout).to be > 1.month.to_i
+
+ migrate!
+
+ expect(project.reload.build_timeout).to be <= 1.month.to_i
+
+ schema_migrate_down!
+
+ expect(project.reload.build_timeout).to be <= 1.month.to_i
+ end
+ end
+end
diff --git a/spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb b/spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..27ecc255a2a
--- /dev/null
+++ b/spec/migrations/20231011142714_queue_backfill_has_remediations_of_vulnerability_reads_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillHasRemediationsOfVulnerabilityReads, feature_category: :database do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :vulnerability_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb b/spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb
index 6cab3ca3d8f..c63724497c2 100644
--- a/spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb
+++ b/spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb
@@ -1,18 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake_helper'
require_migration!
RSpec.describe AddNamespacesEmailsEnabledColumnData, :migration, feature_category: :database do
- before :all do
- Rake.application.rake_require 'active_record/railties/databases'
- Rake.application.rake_require 'tasks/gitlab/db'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
- end
-
let(:migration) { described_class::MIGRATION }
let(:projects) { table(:projects) }
let(:namespace_settings_table) { table(:namespace_settings) }
@@ -41,7 +32,10 @@ RSpec.describe AddNamespacesEmailsEnabledColumnData, :migration, feature_categor
end
end
- it 'sets emails_enabled to be the opposite of emails_disabled' do
+ it 'sets emails_enabled to be the opposite of emails_disabled', type: :task do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/gitlab/db'
+
disabled_records_to_migrate = 6
enabled_records_to_migrate = 4
diff --git a/spec/migrations/add_projects_emails_enabled_column_data_spec.rb b/spec/migrations/add_projects_emails_enabled_column_data_spec.rb
index 1d021ecd439..d10fa78f846 100644
--- a/spec/migrations/add_projects_emails_enabled_column_data_spec.rb
+++ b/spec/migrations/add_projects_emails_enabled_column_data_spec.rb
@@ -1,18 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake_helper'
require_migration!
RSpec.describe AddProjectsEmailsEnabledColumnData, :migration, feature_category: :database do
- before :all do
- Rake.application.rake_require 'active_record/railties/databases'
- Rake.application.rake_require 'tasks/gitlab/db'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
- end
-
let(:migration) { described_class::MIGRATION }
let(:project_settings) { table(:project_settings) }
let(:projects) { table(:projects) }
@@ -41,7 +32,10 @@ RSpec.describe AddProjectsEmailsEnabledColumnData, :migration, feature_category:
end
end
- it 'sets emails_enabled to be the opposite of emails_disabled' do
+ it 'sets emails_enabled to be the opposite of emails_disabled', type: :task do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/gitlab/db'
+
disabled_records_to_migrate = 4
enabled_records_to_migrate = 2
diff --git a/spec/migrations/add_work_items_related_link_restrictions_spec.rb b/spec/migrations/add_work_items_related_link_restrictions_spec.rb
new file mode 100644
index 00000000000..e1e0b4c35ff
--- /dev/null
+++ b/spec/migrations/add_work_items_related_link_restrictions_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddWorkItemsRelatedLinkRestrictions, :migration, feature_category: :portfolio_management do
+ let!(:restrictions) { table(:work_item_related_link_restrictions) }
+ let!(:work_item_types) { table(:work_item_types) }
+
+ # These rules are documented in https://docs.gitlab.com/ee/development/work_items.html#write-a-database-migration
+ it 'creates default restrictions' do
+ restrictions.delete_all
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(restrictions.count).to eq(0)
+ }
+
+ migration.after -> {
+ expect(restrictions.count).to eq(34)
+ }
+ end
+ end
+
+ context 'when work item types are missing' do
+ before do
+ work_item_types.delete_all
+ end
+
+ it 'does not add restrictions' do
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with('Default WorkItemType records are missing, not adding RelatedLinkRestrictions.')
+
+ expect { migrate! }.not_to change { restrictions.count }
+ end
+ end
+end
diff --git a/spec/models/abuse/reports/user_mention_spec.rb b/spec/models/abuse/reports/user_mention_spec.rb
new file mode 100644
index 00000000000..c5048134382
--- /dev/null
+++ b/spec/models/abuse/reports/user_mention_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Abuse::Reports::UserMention, feature_category: :insider_threat do
+ describe 'associations' do
+ it { is_expected.to belong_to(:abuse_report).optional(false) }
+ it { is_expected.to belong_to(:note).optional(false) }
+ end
+
+ it_behaves_like 'has user mentions'
+end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 1fa60a210e2..6500e5fac02 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
it { is_expected.to belong_to(:assignee).class_name('User').inverse_of(:assigned_abuse_reports) }
it { is_expected.to belong_to(:user).inverse_of(:abuse_reports) }
it { is_expected.to have_many(:events).class_name('ResourceEvents::AbuseReportEvent').inverse_of(:abuse_report) }
+ it { is_expected.to have_many(:notes) }
+ it { is_expected.to have_many(:user_mentions).class_name('Abuse::Reports::UserMention') }
it "aliases reporter to author" do
expect(subject.author).to be(subject.reporter)
@@ -263,7 +265,7 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:user) { create(:user) }
- subject { report.report_type }
+ subject(:report_type) { report.report_type }
context 'when reported from an issue' do
let(:url) { project_issue_url(issue.project, issue) }
@@ -322,7 +324,7 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
let_it_be(:merge_request) { create(:merge_request, description: 'mr description') }
let_it_be(:user) { create(:user) }
- subject { report.reported_content }
+ subject(:reported_content) { report.reported_content }
context 'when reported from an issue' do
let(:url) { project_issue_url(issue.project, issue) }
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 3fc7d8f6fc8..78bf410075b 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -69,8 +69,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value("dev.gitlab.com").for(:commit_email_hostname) }
it { is_expected.not_to allow_value("@dev.gitlab").for(:commit_email_hostname) }
- it { is_expected.to allow_value(true, false).for(:container_expiration_policies_enable_historic_entries) }
- it { is_expected.not_to allow_value(nil).for(:container_expiration_policies_enable_historic_entries) }
+ it { is_expected.to validate_inclusion_of(:container_expiration_policies_enable_historic_entries).in_array([true, false]) }
it { is_expected.to allow_value("myemail@gitlab.com").for(:lets_encrypt_notification_email) }
it { is_expected.to allow_value(nil).for(:lets_encrypt_notification_email) }
@@ -113,7 +112,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_data_repair_detail_worker_max_concurrency).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to allow_value(true, false).for(:container_registry_expiration_policies_caching) }
+ it { is_expected.to validate_inclusion_of(:container_registry_expiration_policies_caching).in_array([true, false]) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_tags_count).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_retries).only_integer.is_greater_than_or_equal_to(0) }
@@ -149,8 +148,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
- it { is_expected.to allow_value(true, false).for(:wiki_asciidoc_allow_uri_includes) }
- it { is_expected.not_to allow_value(nil).for(:wiki_asciidoc_allow_uri_includes) }
+ it { is_expected.to validate_inclusion_of(:wiki_asciidoc_allow_uri_includes).in_array([true, false]) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_yaml_size_bytes) }
@@ -162,11 +160,9 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_presence_of(:max_terraform_state_size_bytes) }
it { is_expected.to validate_numericality_of(:max_terraform_state_size_bytes).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to allow_value(true, false).for(:user_defaults_to_private_profile) }
- it { is_expected.not_to allow_value(nil).for(:user_defaults_to_private_profile) }
+ it { is_expected.to validate_inclusion_of(:user_defaults_to_private_profile).in_array([true, false]) }
- it { is_expected.to allow_values([true, false]).for(:deny_all_requests_except_allowed) }
- it { is_expected.not_to allow_value(nil).for(:deny_all_requests_except_allowed) }
+ it { is_expected.to validate_inclusion_of(:deny_all_requests_except_allowed).in_array([true, false]) }
it 'ensures max_pages_size is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than_or_equal_to(0)
@@ -254,8 +250,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value('http://example.com/').for(:public_runner_releases_url) }
it { is_expected.not_to allow_value(nil).for(:public_runner_releases_url) }
- it { is_expected.to allow_value([true, false]).for(:update_runner_versions_enabled) }
- it { is_expected.not_to allow_value(nil).for(:update_runner_versions_enabled) }
+ it { is_expected.to validate_inclusion_of(:update_runner_versions_enabled).in_array([true, false]) }
it { is_expected.not_to allow_value(['']).for(:valid_runner_registrars) }
it { is_expected.not_to allow_value(['OBVIOUSLY_WRONG']).for(:valid_runner_registrars) }
@@ -268,21 +263,17 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value(http).for(:jira_connect_proxy_url) }
it { is_expected.to allow_value(https).for(:jira_connect_proxy_url) }
- it { is_expected.to allow_value(true, false).for(:bulk_import_enabled) }
- it { is_expected.not_to allow_value(nil).for(:bulk_import_enabled) }
+ it { is_expected.to validate_inclusion_of(:bulk_import_enabled).in_array([true, false]) }
- it { is_expected.to allow_value(true, false).for(:allow_runner_registration_token) }
- it { is_expected.not_to allow_value(nil).for(:allow_runner_registration_token) }
+ it { is_expected.to validate_inclusion_of(:allow_runner_registration_token).in_array([true, false]) }
- it { is_expected.to allow_value(true, false).for(:gitlab_dedicated_instance) }
- it { is_expected.not_to allow_value(nil).for(:gitlab_dedicated_instance) }
+ it { is_expected.to validate_inclusion_of(:gitlab_dedicated_instance).in_array([true, false]) }
it { is_expected.not_to allow_value(apdex_slo: '10').for(:prometheus_alert_db_indicators_settings) }
it { is_expected.to allow_value(nil).for(:prometheus_alert_db_indicators_settings) }
it { is_expected.to allow_value(valid_prometheus_alert_db_indicators_settings).for(:prometheus_alert_db_indicators_settings) }
- it { is_expected.to allow_value([true, false]).for(:silent_mode_enabled) }
- it { is_expected.not_to allow_value(nil).for(:silent_mode_enabled) }
+ it { is_expected.to validate_inclusion_of(:silent_mode_enabled).in_array([true, false]) }
it { is_expected.to allow_value(0).for(:ci_max_includes) }
it { is_expected.to allow_value(200).for(:ci_max_includes) }
@@ -298,16 +289,16 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(10.5).for(:ci_max_total_yaml_size_bytes) }
it { is_expected.not_to allow_value(-1).for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.to allow_value([true, false]).for(:remember_me_enabled) }
- it { is_expected.not_to allow_value(nil).for(:remember_me_enabled) }
+ it { is_expected.to validate_inclusion_of(:remember_me_enabled).in_array([true, false]) }
it { is_expected.to validate_numericality_of(:namespace_aggregation_schedule_lease_duration_in_seconds).only_integer.is_greater_than(0) }
- it { is_expected.to allow_values([true, false]).for(:instance_level_code_suggestions_enabled) }
- it { is_expected.not_to allow_value(nil).for(:instance_level_code_suggestions_enabled) }
+ it { is_expected.to validate_inclusion_of(:instance_level_code_suggestions_enabled).in_array([true, false]) }
- it { is_expected.to allow_values([true, false]).for(:package_registry_allow_anyone_to_pull_option) }
- it { is_expected.not_to allow_value(nil).for(:package_registry_allow_anyone_to_pull_option) }
+ it { is_expected.to validate_inclusion_of(:package_registry_allow_anyone_to_pull_option).in_array([true, false]) }
+
+ it { is_expected.to allow_value([true, false]).for(:math_rendering_limits_enabled) }
+ it { is_expected.not_to allow_value(nil).for(:math_rendering_limits_enabled) }
context 'when deactivate_dormant_users is enabled' do
before do
@@ -639,6 +630,18 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
specify do
+ is_expected.to validate_numericality_of(:failed_login_attempts_unlock_period_in_minutes)
+ .only_integer
+ .is_greater_than(0)
+ end
+
+ specify do
+ is_expected.to validate_numericality_of(:max_login_attempts)
+ .only_integer
+ .is_greater_than(0)
+ end
+
+ specify do
is_expected.to validate_numericality_of(:local_markdown_version)
.only_integer
.is_greater_than_or_equal_to(0)
diff --git a/spec/models/approval_spec.rb b/spec/models/approval_spec.rb
index 3d382c1712a..ff2f7408941 100644
--- a/spec/models/approval_spec.rb
+++ b/spec/models/approval_spec.rb
@@ -13,4 +13,16 @@ RSpec.describe Approval, feature_category: :code_review_workflow do
it { is_expected.to validate_uniqueness_of(:user_id).scoped_to([:merge_request_id]) }
end
+
+ describe '.with_invalid_patch_id_sha' do
+ let(:patch_id_sha) { 'def456' }
+ let!(:approval_1) { create(:approval, patch_id_sha: 'abc123') }
+ let!(:approval_2) { create(:approval, patch_id_sha: nil) }
+ let!(:approval_3) { create(:approval, patch_id_sha: patch_id_sha) }
+
+ it 'returns approvals with patch_id_sha not matching specified patch_id_sha' do
+ expect(described_class.with_invalid_patch_id_sha(patch_id_sha))
+ .to match_array([approval_1, approval_2])
+ end
+ end
end
diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
index 36b75e5338a..b6321b9aaf3 100644
--- a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
+++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
@@ -55,7 +55,8 @@ RSpec.describe BlobViewer::GitlabCiYml, feature_category: :source_code_managemen
context 'when a project ref does not contain the sha' do
it 'returns an error' do
- expect(validation_message).to match(/Could not validate configuration/)
+ expect(validation_message).to match(
+ /configuration originates from an external project or a commit not associated with a Git reference/)
end
end
end
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index a50fc6eaba4..ff24f57f7c4 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -40,6 +40,14 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
it { expect(described_class.min_gl_version_for_project_migration.to_s).to eq('14.4.0') }
end
+ describe '#completed?' do
+ it { expect(described_class.new(status: -1)).to be_completed }
+ it { expect(described_class.new(status: 0)).not_to be_completed }
+ it { expect(described_class.new(status: 1)).not_to be_completed }
+ it { expect(described_class.new(status: 2)).to be_completed }
+ it { expect(described_class.new(status: 3)).to be_completed }
+ end
+
describe '#source_version_info' do
it 'returns source_version as Gitlab::VersionInfo' do
bulk_import = build(:bulk_import, source_version: '9.13.2')
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index a618a12df6b..edd9adfa5f6 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Tracker, type: :model do
+RSpec.describe BulkImports::Tracker, type: :model, feature_category: :importers do
describe 'associations' do
it do
is_expected.to belong_to(:entity).required.class_name('BulkImports::Entity')
@@ -30,19 +30,14 @@ RSpec.describe BulkImports::Tracker, type: :model do
end
end
- describe '.stage_running?' do
- it 'returns true if there is any unfinished pipeline in the given stage' do
- tracker = create(:bulk_import_tracker)
-
- expect(described_class.stage_running?(tracker.entity.id, 0))
- .to eq(true)
- end
-
- it 'returns false if there are no unfinished pipeline in the given stage' do
- tracker = create(:bulk_import_tracker, :finished)
+ describe '.running_trackers' do
+ it 'returns trackers that are running for a given entity' do
+ entity = create(:bulk_import_entity)
+ BulkImports::Tracker.state_machines[:status].states.map(&:value).each do |status|
+ create(:bulk_import_tracker, status: status, entity: entity)
+ end
- expect(described_class.stage_running?(tracker.entity.id, 0))
- .to eq(false)
+ expect(described_class.running_trackers(entity.id).pluck(:status)).to include(1, 3)
end
end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 9d6b1a56458..dbe013f3872 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ChatName, feature_category: :integrations do
- let_it_be(:chat_name) { create(:chat_name) }
+ let_it_be_with_reload(:chat_name) { create(:chat_name) }
subject { chat_name }
@@ -33,6 +33,22 @@ RSpec.describe ChatName, feature_category: :integrations do
expect(subject.last_used_at).to eq(time)
end
+
+ it 'updates last_used_at if it was not recently updated' do
+ allow_next_instance_of(Gitlab::ExclusiveLease) do |lease|
+ allow(lease).to receive(:try_obtain).and_return('successful_lease_guid')
+ end
+
+ subject.update_last_used_at
+
+ new_time = ChatName::LAST_USED_AT_INTERVAL.from_now + 5.minutes
+
+ travel_to(new_time) do
+ subject.update_last_used_at
+ end
+
+ expect(subject.last_used_at).to be_like_time(new_time)
+ end
end
it_behaves_like 'it has loose foreign keys' do
diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb
index e46a2b8cf85..4f76a7650ec 100644
--- a/spec/models/ci/build_need_spec.rb
+++ b/spec/models/ci/build_need_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Ci::BuildNeed, model: true, feature_category: :continuous_integra
it { is_expected.to validate_presence_of(:build) }
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_length_of(:name).is_at_most(128) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
describe '.artifacts' do
let_it_be(:with_artifacts) { create(:ci_build_need, artifacts: true) }
diff --git a/spec/models/ci/catalog/components_project_spec.rb b/spec/models/ci/catalog/components_project_spec.rb
new file mode 100644
index 00000000000..d7e0ee2079c
--- /dev/null
+++ b/spec/models/ci/catalog/components_project_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::ComponentsProject, feature_category: :pipeline_composition do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:files) do
+ {
+ 'templates/secret-detection.yml' => "spec:\n inputs:\n website:\n---\nimage: alpine_1",
+ 'templates/dast/template.yml' => 'image: alpine_2',
+ 'templates/template.yml' => 'image: alpine_3',
+ 'templates/blank-yaml.yml' => '',
+ 'templates/dast/sub-folder/template.yml' => 'image: alpine_4',
+ 'tests/test.yml' => 'image: alpine_5',
+ 'README.md' => 'Read me'
+ }
+ end
+
+ let_it_be(:project) do
+ create(
+ :project, :custom_repo,
+ description: 'Simple, complex, and other components',
+ files: files
+ )
+ end
+
+ let_it_be(:catalog_resource) { create(:ci_catalog_resource, project: project) }
+
+ let(:components_project) { described_class.new(project, project.default_branch) }
+
+ describe '#fetch_component_paths' do
+ it 'retrieves all the paths for valid components' do
+ paths = components_project.fetch_component_paths(project.default_branch)
+
+ expect(paths).to contain_exactly(
+ 'templates/blank-yaml.yml', 'templates/dast/template.yml', 'templates/secret-detection.yml',
+ 'templates/template.yml'
+ )
+ end
+ end
+
+ describe '#extract_component_name' do
+ context 'with invalid component path' do
+ it 'raises an error' do
+ expect(components_project.extract_component_name('not-template/this-is-wrong.yml')).to be_nil
+ end
+ end
+
+ context 'with valid component paths' do
+ where(:path, :name) do
+ 'templates/secret-detection.yml' | 'secret-detection'
+ 'templates/dast/template.yml' | 'dast'
+ 'templates/template.yml' | 'template'
+ 'templates/blank-yaml.yml' | 'blank-yaml'
+ end
+
+ with_them do
+ it 'extracts the component name from the path' do
+ expect(components_project.extract_component_name(path)).to eq(name)
+ end
+ end
+ end
+ end
+
+ describe '#extract_inputs' do
+ context 'with valid inputs' do
+ it 'extracts the inputs from a blob' do
+ blob = "spec:\n inputs:\n website:\n---\nimage: alpine_1"
+
+ expect(components_project.extract_inputs(blob)).to eq({ website: nil })
+ end
+ end
+
+ context 'with invalid inputs' do
+ it 'raises InvalidFormatError' do
+ blob = "spec:\n inputs:\n website:\n---\nsome: invalid: string"
+
+ expect do
+ components_project.extract_inputs(blob)
+ end.to raise_error(::Gitlab::Config::Loader::FormatError,
+ /mapping values are not allowed in this context/)
+ end
+ end
+ end
+
+ describe '#fetch_component' do
+ where(:component_name, :content, :path) do
+ 'secret-detection' | "spec:\n inputs:\n website:\n---\nimage: alpine_1" | 'templates/secret-detection.yml'
+ 'dast' | 'image: alpine_2' | 'templates/dast/template.yml'
+ 'template' | 'image: alpine_3' | 'templates/template.yml'
+ 'blank-yaml' | '' | 'templates/blank-yaml.yml'
+ end
+
+ with_them do
+ it 'fetches the content for a component' do
+ data = components_project.fetch_component(component_name)
+
+ expect(data.path).to eq(path)
+ expect(data.content).to eq(content)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb
index f28a0e82bbd..7524d908252 100644
--- a/spec/models/ci/catalog/listing_spec.rb
+++ b/spec/models/ci/catalog/listing_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
let_it_be(:namespace) { create(:group) }
let_it_be(:project_1) { create(:project, namespace: namespace, name: 'X Project') }
let_it_be(:project_2) { create(:project, namespace: namespace, name: 'B Project') }
- let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_3) { create(:project, namespace: namespace, name: 'A Project') }
+ let_it_be(:project_4) { create(:project) }
let_it_be(:user) { create(:user) }
let(:list) { described_class.new(namespace, user) }
@@ -34,12 +35,20 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the namespace has catalog resources' do
- let_it_be(:resource) { create(:ci_catalog_resource, project: project_1) }
- let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
- let_it_be(:other_namespace_resource) { create(:ci_catalog_resource, project: project_3) }
+ let_it_be(:today) { Time.zone.now }
+ let_it_be(:yesterday) { today - 1.day }
+ let_it_be(:tomorrow) { today + 1.day }
+
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project_1, latest_released_at: yesterday) }
+ let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2, latest_released_at: today) }
+ let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3, latest_released_at: nil) }
+
+ let_it_be(:other_namespace_resource) do
+ create(:ci_catalog_resource, project: project_4, latest_released_at: tomorrow)
+ end
it 'contains only catalog resources for projects in that namespace' do
- is_expected.to contain_exactly(resource, resource_2)
+ is_expected.to contain_exactly(resource, resource_2, resource_3)
end
context 'with a sort parameter' do
@@ -48,16 +57,32 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
context 'when the sort is name ascending' do
let_it_be(:sort) { :name_asc }
- it 'contains catalog resources for projects sorted by name' do
- is_expected.to eq([resource_2, resource])
+ it 'contains catalog resources for projects sorted by name ascending' do
+ is_expected.to eq([resource_3, resource_2, resource])
end
end
context 'when the sort is name descending' do
let_it_be(:sort) { :name_desc }
- it 'contains catalog resources for projects sorted by name' do
- is_expected.to eq([resource, resource_2])
+ it 'contains catalog resources for projects sorted by name descending' do
+ is_expected.to eq([resource, resource_2, resource_3])
+ end
+ end
+
+ context 'when the sort is latest_released_at ascending' do
+ let_it_be(:sort) { :latest_released_at_asc }
+
+ it 'contains catalog resources sorted by latest_released_at ascending with nulls last' do
+ is_expected.to eq([resource, resource_2, resource_3])
+ end
+ end
+
+ context 'when the sort is latest_released_at descending' do
+ let_it_be(:sort) { :latest_released_at_desc }
+
+ it 'contains catalog resources sorted by latest_released_at descending with nulls last' do
+ is_expected.to eq([resource_2, resource, resource_3])
end
end
end
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index 082283bb7bc..4ce1433e015 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -3,16 +3,20 @@
require 'spec_helper'
RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
+ let_it_be(:today) { Time.zone.now }
+ let_it_be(:yesterday) { today - 1.day }
+ let_it_be(:tomorrow) { today + 1.day }
+
let_it_be(:project) { create(:project, name: 'A') }
let_it_be(:project_2) { build(:project, name: 'Z') }
let_it_be(:project_3) { build(:project, name: 'L') }
- let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
- let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
- let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3) }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project, latest_released_at: tomorrow) }
+ let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2, latest_released_at: today) }
+ let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3, latest_released_at: nil) }
- let_it_be(:release1) { create(:release, project: project, released_at: Time.zone.now - 2.days) }
- let_it_be(:release2) { create(:release, project: project, released_at: Time.zone.now - 1.day) }
- let_it_be(:release3) { create(:release, project: project, released_at: Time.zone.now) }
+ let_it_be(:release1) { create(:release, project: project, released_at: yesterday) }
+ let_it_be(:release2) { create(:release, project: project, released_at: today) }
+ let_it_be(:release3) { create(:release, project: project, released_at: tomorrow) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:components).class_name('Ci::Catalog::Resources::Component') }
@@ -58,6 +62,22 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
end
end
+ describe '.order_by_latest_released_at_desc' do
+ it 'returns catalog resources sorted by latest_released_at descending with nulls last' do
+ ordered_resources = described_class.order_by_latest_released_at_desc
+
+ expect(ordered_resources).to eq([resource, resource_2, resource_3])
+ end
+ end
+
+ describe '.order_by_latest_released_at_asc' do
+ it 'returns catalog resources sorted by latest_released_at ascending with nulls last' do
+ ordered_resources = described_class.order_by_latest_released_at_asc
+
+ expect(ordered_resources).to eq([resource_2, resource, resource_3])
+ end
+ end
+
describe '#versions' do
it 'returns releases ordered by released date descending' do
expect(resource.versions).to eq([release3, release2, release1])
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 7e572e2fdc6..887ec48ec8f 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2925,7 +2925,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, :created) }
it 'returns detailed status for created pipeline' do
- expect(subject.text).to eq s_('CiStatusText|created')
+ expect(subject.text).to eq s_('CiStatusText|Created')
end
end
@@ -2933,7 +2933,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :pending) }
it 'returns detailed status for pending pipeline' do
- expect(subject.text).to eq s_('CiStatusText|pending')
+ expect(subject.text).to eq s_('CiStatusText|Pending')
end
end
@@ -2941,7 +2941,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :running) }
it 'returns detailed status for running pipeline' do
- expect(subject.text).to eq s_('CiStatus|running')
+ expect(subject.text).to eq s_('CiStatusText|Running')
end
end
@@ -2949,7 +2949,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :success) }
it 'returns detailed status for successful pipeline' do
- expect(subject.text).to eq s_('CiStatusText|passed')
+ expect(subject.text).to eq s_('CiStatusText|Passed')
end
end
@@ -2957,7 +2957,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :failed) }
it 'returns detailed status for failed pipeline' do
- expect(subject.text).to eq s_('CiStatusText|failed')
+ expect(subject.text).to eq s_('CiStatusText|Failed')
end
end
@@ -2965,7 +2965,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :canceled) }
it 'returns detailed status for canceled pipeline' do
- expect(subject.text).to eq s_('CiStatusText|canceled')
+ expect(subject.text).to eq s_('CiStatusText|Canceled')
end
end
@@ -2973,7 +2973,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :skipped) }
it 'returns detailed status for skipped pipeline' do
- expect(subject.text).to eq s_('CiStatusText|skipped')
+ expect(subject.text).to eq s_('CiStatusText|Skipped')
end
end
@@ -2981,7 +2981,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
let(:pipeline) { create(:ci_pipeline, status: :manual) }
it 'returns detailed status for blocked pipeline' do
- expect(subject.text).to eq s_('CiStatusText|blocked')
+ expect(subject.text).to eq s_('CiStatusText|Blocked')
end
end
@@ -3250,22 +3250,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
shared_examples 'a method that returns all merge requests for a given pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: pipeline_project, ref: 'master') }
+ let(:merge_request) do
+ create(
+ :merge_request,
+ source_project: pipeline_project,
+ target_project: project,
+ source_branch: pipeline.ref
+ )
+ end
it 'returns all merge requests having the same source branch and the pipeline sha' do
- merge_request = create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: pipeline.ref)
-
- create(:merge_request_diff, merge_request: merge_request).tap do |diff|
- create(:merge_request_diff_commit, merge_request_diff: diff, sha: pipeline.sha)
- end
+ create(:merge_request_diff_commit, merge_request_diff: merge_request.merge_request_diff, sha: pipeline.sha)
expect(pipeline.all_merge_requests).to eq([merge_request])
end
it "doesn't return merge requests having the same source branch without the pipeline sha" do
- merge_request = create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: pipeline.ref)
- create(:merge_request_diff, merge_request: merge_request).tap do |diff|
- create(:merge_request_diff_commit, merge_request_diff: diff, sha: 'unrelated')
- end
+ create(:merge_request_diff_commit, merge_request_diff: merge_request.merge_request_diff, sha: 'unrelated')
expect(pipeline.all_merge_requests).to be_empty
end
@@ -5577,4 +5578,25 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
end
+
+ describe '#reduced_build_attributes_list_for_rules?' do
+ subject { pipeline.reduced_build_attributes_list_for_rules? }
+
+ let(:pipeline) { build_stubbed(:ci_pipeline, project: project, user: user) }
+
+ it { is_expected.to be_truthy }
+
+ it 'memoizes the result' do
+ expect { subject }
+ .to change { pipeline.strong_memoized?(:reduced_build_attributes_list_for_rules?) }
+ end
+
+ context 'with the FF disabled' do
+ before do
+ stub_feature_flags(reduced_build_attributes_list_for_rules: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index c6af7609778..8c0143d5f18 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
pipeline_id report_results pending_state pages_deployments
queuing_entry runtime_metadata trace_metadata
dast_site_profile dast_scanner_profile stage_id dast_site_profiles_build
- dast_scanner_profiles_build].freeze
+ dast_scanner_profiles_build auto_canceled_by_partition_id].freeze
end
before_all do
diff --git a/spec/models/ci/ref_spec.rb b/spec/models/ci/ref_spec.rb
index a60aed98a21..75071a17fa9 100644
--- a/spec/models/ci/ref_spec.rb
+++ b/spec/models/ci/ref_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Ref do
+RSpec.describe Ci::Ref, feature_category: :continuous_integration do
using RSpec::Parameterized::TableSyntax
it { is_expected.to belong_to(:project) }
@@ -10,13 +10,13 @@ RSpec.describe Ci::Ref do
describe 'state machine transitions' do
context 'unlock artifacts transition' do
let(:ci_ref) { create(:ci_ref) }
- let(:unlock_artifacts_worker_spy) { class_spy(::Ci::PipelineSuccessUnlockArtifactsWorker) }
+ let(:unlock_previous_pipelines_worker_spy) { class_spy(::Ci::Refs::UnlockPreviousPipelinesWorker) }
before do
- stub_const('Ci::PipelineSuccessUnlockArtifactsWorker', unlock_artifacts_worker_spy)
+ stub_const('Ci::Refs::UnlockPreviousPipelinesWorker', unlock_previous_pipelines_worker_spy)
end
- context 'pipline is locked' do
+ context 'pipeline is locked' do
let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :artifacts_locked) }
where(:initial_state, :action, :count) do
@@ -41,10 +41,10 @@ RSpec.describe Ci::Ref do
ci_ref.update!(status: status_value)
end
- it 'calls unlock artifacts service' do
+ it 'calls pipeline complete unlock artifacts service' do
ci_ref.send(action)
- expect(unlock_artifacts_worker_spy).to have_received(:perform_async).exactly(count).times
+ expect(unlock_previous_pipelines_worker_spy).to have_received(:perform_async).exactly(count).times
end
end
end
@@ -53,10 +53,10 @@ RSpec.describe Ci::Ref do
context 'pipeline is unlocked' do
let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :unlocked) }
- it 'does not call unlock artifacts service' do
+ it 'does not unlock pipelines' do
ci_ref.succeed!
- expect(unlock_artifacts_worker_spy).not_to have_received(:perform_async)
+ expect(unlock_previous_pipelines_worker_spy).not_to have_received(:perform_async)
end
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index a8e9d36a3a7..3a3ef072b28 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -309,19 +309,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- context 'when use_traversal_ids* are enabled' do
- it_behaves_like '.belonging_to_parent_groups_of_project'
- end
-
- context 'when use_traversal_ids* are disabled' do
- before do
- stub_feature_flags(
- use_traversal_ids: false
- )
- end
-
- it_behaves_like '.belonging_to_parent_groups_of_project'
- end
+ it_behaves_like '.belonging_to_parent_groups_of_project'
context 'with instance runners sharing enabled' do
# group specific
diff --git a/spec/models/ci/unlock_pipeline_request_spec.rb b/spec/models/ci/unlock_pipeline_request_spec.rb
new file mode 100644
index 00000000000..ddfc6210349
--- /dev/null
+++ b/spec/models/ci/unlock_pipeline_request_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnlockPipelineRequest, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '.enqueue' do
+ let(:pipeline_id) { 123 }
+
+ subject(:enqueue) { described_class.enqueue(pipeline_id) }
+
+ it 'creates a redis entry for the given pipeline ID and returns the number of added entries' do
+ freeze_time do
+ expect(described_class).to receive(:log_event).with(:enqueued, [pipeline_id])
+ expect { enqueue }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([pipeline_id])
+
+ expect(enqueue).to eq(1)
+ expect_to_have_pending_unlock_pipeline_request(pipeline_id, Time.current.utc.to_i)
+ end
+ end
+
+ context 'when the pipeline ID is already in the queue' do
+ before do
+ travel_to(3.minutes.ago) do
+ described_class.enqueue(pipeline_id)
+ end
+ end
+
+ it 'does not create another redis entry for the same pipeline ID nor update it' do
+ expect(described_class).not_to receive(:log_event)
+
+ expect { enqueue }
+ .to not_change { pipeline_ids_waiting_to_be_unlocked }
+ .and not_change { timestamp_of_pending_unlock_pipeline_request(pipeline_id) }
+
+ expect(enqueue).to eq(0)
+ end
+ end
+
+ context 'when given an array of pipeline IDs' do
+ let(:pipeline_ids) { [1, 2, 1] }
+
+ subject(:enqueue) { described_class.enqueue(pipeline_ids) }
+
+ it 'creates a redis entry for each unique pipeline ID' do
+ freeze_time do
+ expect(described_class).to receive(:log_event).with(:enqueued, pipeline_ids.uniq)
+ expect { enqueue }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([1, 2])
+
+ expect(enqueue).to eq(2)
+
+ unix_timestamp = Time.current.utc.to_i
+ expect_to_have_pending_unlock_pipeline_request(1, unix_timestamp)
+ expect_to_have_pending_unlock_pipeline_request(2, unix_timestamp)
+ end
+ end
+ end
+ end
+
+ describe '.next!' do
+ subject(:next_result) { described_class.next! }
+
+ context 'when there are pending pipeline IDs' do
+ it 'pops and returns the oldest pipeline ID from the queue (FIFO)' do
+ expected_enqueue_time = nil
+ expected_pipeline_id = 1
+ travel_to(3.minutes.ago) do
+ expected_enqueue_time = Time.current.utc.to_i
+ described_class.enqueue(expected_pipeline_id)
+ end
+
+ travel_to(2.minutes.ago) { described_class.enqueue(2) }
+ travel_to(1.minute.ago) { described_class.enqueue(3) }
+
+ expect(described_class).to receive(:log_event).with(:picked_next, 1)
+
+ expect { next_result }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([1, 2, 3])
+ .to([2, 3])
+
+ pipeline_id, enqueue_timestamp = next_result
+
+ expect(pipeline_id).to eq(expected_pipeline_id)
+ expect(enqueue_timestamp).to eq(expected_enqueue_time)
+ end
+ end
+
+ context 'when the queue is empty' do
+ it 'does nothing' do
+ expect(described_class).not_to receive(:log_event)
+ expect(next_result).to be_nil
+ end
+ end
+ end
+
+ describe '.total_pending' do
+ subject { described_class.total_pending }
+
+ before do
+ described_class.enqueue(1)
+ described_class.enqueue(2)
+ described_class.enqueue(3)
+ end
+
+ it { is_expected.to eq(3) }
+ end
+end
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
index bc158fc9117..5f731336b4b 100644
--- a/spec/models/clusters/agent_token_spec.rb
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -95,6 +95,15 @@ RSpec.describe Clusters::AgentToken, feature_category: :deployment_management do
expect(agent_token.token).to start_with described_class::TOKEN_PREFIX
end
+
+ it 'is revoked on revoke!' do
+ agent_token = build(:cluster_agent_token, token_encrypted: nil)
+ agent_token.save!
+
+ agent_token.revoke!
+
+ expect(agent_token.active?).to be_falsey
+ end
end
describe '#to_ability_name' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 7dafec2536f..5fc5bbd41ff 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_one(:cluster_project) }
it { is_expected.to have_many(:deployment_clusters) }
- it { is_expected.to have_many(:successful_deployments) }
it { is_expected.to have_many(:environments).through(:deployments) }
it { is_expected.to delegate_method(:status).to(:provider) }
diff --git a/spec/models/concerns/integrations/enable_ssl_verification_spec.rb b/spec/models/concerns/integrations/enable_ssl_verification_spec.rb
index 418f3f4dbc6..c9a9d33631b 100644
--- a/spec/models/concerns/integrations/enable_ssl_verification_spec.rb
+++ b/spec/models/concerns/integrations/enable_ssl_verification_spec.rb
@@ -2,18 +2,14 @@
require 'spec_helper'
-RSpec.describe Integrations::EnableSslVerification do
+RSpec.describe Integrations::EnableSslVerification, feature_category: :integrations do
let(:described_class) do
Class.new(Integration) do
prepend Integrations::EnableSslVerification
- def fields
- [
- { name: 'main_url' },
- { name: 'other_url' },
- { name: 'username' }
- ]
- end
+ field :main_url
+ field :other_url
+ field :username
end
end
diff --git a/spec/models/concerns/integrations/has_web_hook_spec.rb b/spec/models/concerns/integrations/has_web_hook_spec.rb
index 9061cb90f90..69617b29f12 100644
--- a/spec/models/concerns/integrations/has_web_hook_spec.rb
+++ b/spec/models/concerns/integrations/has_web_hook_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::HasWebHook do
+RSpec.describe Integrations::HasWebHook, feature_category: :webhooks do
let(:integration_class) do
Class.new(Integration) do
include Integrations::HasWebHook
@@ -21,7 +21,7 @@ RSpec.describe Integrations::HasWebHook do
end
context 'when integration responds to enable_ssl_verification' do
- let(:integration) { build(:drone_ci_integration) }
+ let(:integration) { build(:drone_ci_integration, enable_ssl_verification: true) }
it { expect(integration.hook_ssl_verification).to eq true }
end
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index dd180749e94..82c63eea33a 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -493,4 +493,24 @@ RSpec.describe Noteable, feature_category: :code_review_workflow do
end
end
end
+
+ describe '#supports_resolvable_notes' do
+ context 'when noteable is an abuse report' do
+ let(:abuse_report) { build(:abuse_report) }
+
+ it 'returns true' do
+ expect(abuse_report.supports_resolvable_notes?).to be(true)
+ end
+ end
+ end
+
+ describe '#supports_replying_to_individual_notes' do
+ context 'when noteable is an abuse report' do
+ let(:abuse_report) { build(:abuse_report) }
+
+ it 'returns true' do
+ expect(abuse_report.supports_replying_to_individual_notes?).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index a3f2e99f3da..d17059ccc6d 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -15,144 +15,6 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
end
- let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
-
- describe '#query' do
- describe 'validate_query' do
- let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
- let(:validation_query) { Gitlab::Prometheus::Queries::ValidateQuery.name }
- let(:query) { 'avg(response)' }
- let(:validation_respone) { { data: { valid: true } } }
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'with valid data' do
- subject { integration.query(:validate, query) }
-
- before do
- stub_reactive_cache(integration, validation_respone, validation_query, query)
- end
-
- it 'returns query data' do
- is_expected.to eq(query: { valid: true })
- end
- end
- end
-
- describe 'environment' do
- let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'with valid data' do
- subject { integration.query(:environment, environment) }
-
- before do
- stub_reactive_cache(integration, prometheus_data, environment_query, environment.id)
- end
-
- it 'returns reactive data' do
- is_expected.to eq(prometheus_metrics_data)
- end
- end
- end
-
- describe 'matched_metrics' do
- let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricQuery }
- let(:prometheus_client) { double(:prometheus_client, label_values: nil) }
-
- context 'with valid data' do
- subject { integration.query(:matched_metrics) }
-
- before do
- allow(integration).to receive(:prometheus_client).and_return(prometheus_client)
- synchronous_reactive_cache(integration)
- end
-
- it 'returns reactive data' do
- expect(subject[:success]).to be_truthy
- expect(subject[:data]).to eq([])
- end
- end
- end
-
- describe 'deployment' do
- let(:deployment) { build_stubbed(:deployment) }
- let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'with valid data' do
- subject { integration.query(:deployment, deployment) }
-
- before do
- stub_reactive_cache(integration, prometheus_data, deployment_query, deployment.id)
- end
-
- it 'returns reactive data' do
- expect(subject).to eq(prometheus_metrics_data)
- end
- end
- end
- end
-
- describe '#calculate_reactive_cache' do
- let(:environment) { create(:environment, slug: 'env-slug') }
-
- before do
- integration.manual_configuration = true
- integration.active = true
- end
-
- subject do
- integration.calculate_reactive_cache(environment_query.name, environment.id)
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'when integration is inactive' do
- before do
- integration.active = false
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'when Prometheus responds with valid data' do
- before do
- stub_all_prometheus_requests(environment.slug)
- end
-
- it { expect(subject.to_json).to eq(prometheus_data.to_json) }
- end
-
- [404, 500].each do |status|
- context "when Prometheus responds with #{status}" do
- before do
- stub_all_prometheus_requests(environment.slug, status: status, body: "QUERY FAILED!")
- end
-
- it { is_expected.to eq(success: false, result: %(#{status} - "QUERY FAILED!")) }
- end
- end
-
- context "when client raises Gitlab::PrometheusClient::ConnectionError" do
- before do
- stub_any_prometheus_request.to_raise(Gitlab::PrometheusClient::ConnectionError)
- end
-
- it { is_expected.to include(success: false, result: kind_of(String)) }
- end
- end
-
describe '#build_query_args' do
subject { integration.build_query_args(*args) }
diff --git a/spec/models/concerns/reset_on_column_errors_spec.rb b/spec/models/concerns/reset_on_column_errors_spec.rb
new file mode 100644
index 00000000000..38ba0f447f5
--- /dev/null
+++ b/spec/models/concerns/reset_on_column_errors_spec.rb
@@ -0,0 +1,243 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResetOnColumnErrors, :delete, feature_category: :shared do
+ let(:test_reviewer_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_reviewers_table'
+
+ def self.name
+ 'TestReviewer'
+ end
+ end
+ end
+
+ let(:test_attribute_reviewer_model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = '_test_attribute_reviewers_table'
+
+ belongs_to :test_attribute, class_name: 'TestAttribute'
+ belongs_to :test_reviewer, class_name: 'TestReviewer'
+
+ def self.name
+ 'TestAttributeReviewer'
+ end
+ end
+ end
+
+ let(:test_attribute_model) do
+ Class.new(ApplicationRecord) do
+ include FromUnion
+
+ self.table_name = '_test_attribute_table'
+
+ has_many :attribute_reviewers, class_name: 'TestAttributeReviewer'
+ has_many :reviewers, class_name: 'TestReviewer', through: :attribute_reviewers, source: :test_reviewer
+
+ def self.name
+ 'TestAttribute'
+ end
+ end
+ end
+
+ before do
+ stub_const('TestReviewer', test_reviewer_model)
+ stub_const('TestAttributeReviewer', test_attribute_reviewer_model)
+ stub_const('TestAttribute', test_attribute_model)
+ end
+
+ before(:context) do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE TABLE _test_attribute_table (
+ id serial NOT NULL PRIMARY KEY,
+ created_at timestamptz NOT NULL
+ );
+
+ CREATE TABLE _test_attribute_reviewers_table (
+ test_attribute_id bigint,
+ test_reviewer_id bigint
+ );
+
+ CREATE TABLE _test_reviewers_table (
+ id serial NOT NULL PRIMARY KEY,
+ created_at timestamptz NOT NULL
+ );
+
+ CREATE UNIQUE INDEX index_test_attribute_reviewers_table_unique
+ ON _test_attribute_reviewers_table
+ USING btree (test_attribute_id, test_reviewer_id);
+ SQL
+ end
+
+ after(:context) do
+ ApplicationRecord.connection.execute(<<~SQL)
+ DROP TABLE _test_attribute_table;
+ DROP TABLE _test_attribute_reviewers_table;
+ DROP TABLE _test_reviewers_table;
+ SQL
+ end
+
+ describe 'resetting on union errors' do
+ let(:expected_error_message) { /must have the same number of columns/ }
+
+ def load_query
+ scopes = [
+ TestAttribute.select('*'),
+ TestAttribute.select(TestAttribute.column_names.join(','))
+ ]
+
+ TestAttribute.from_union(scopes).load
+ end
+
+ context 'with mismatched columns due to schema cache' do
+ before do
+ load_query
+
+ ApplicationRecord.connection.execute(<<~SQL)
+ ALTER TABLE _test_attribute_table ADD COLUMN _test_new_column int;
+ SQL
+ end
+
+ after do
+ ApplicationRecord.connection.execute(<<~SQL)
+ ALTER TABLE _test_attribute_table DROP COLUMN _test_new_column;
+ SQL
+
+ TestAttribute.reset_column_information
+ end
+
+ it 'resets column information when encountering an UNION error' do
+ expect do
+ load_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+ .and change { TestAttribute.column_names }
+ .from(%w[id created_at]).to(%w[id created_at _test_new_column])
+
+ # Subsequent query load from new schema cache, so no more error
+ expect do
+ load_query
+ end.not_to raise_error
+ end
+
+ it 'logs when column is reset' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error)
+ .with(hash_including("extra.reset_model_name" => "TestAttribute"))
+ .and_call_original
+
+ expect do
+ load_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+ end
+ end
+
+ context 'with mismatched columns due to coding error' do
+ def load_mismatched_query
+ scopes = [
+ TestAttribute.select("id"),
+ TestAttribute.select("id, created_at")
+ ]
+
+ TestAttribute.from_union(scopes).load
+ end
+
+ it 'limits reset_column_information calls' do
+ expect(TestAttribute).to receive(:reset_column_information).and_call_original
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+
+ expect(TestAttribute).not_to receive(:reset_column_information)
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+ end
+
+ it 'does reset_column_information after some time has passed' do
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+
+ travel_to(described_class::MAX_RESET_PERIOD.from_now + 1.minute)
+ expect(TestAttribute).to receive(:reset_column_information).and_call_original
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, expected_error_message)
+ end
+ end
+
+ it 'handles ActiveRecord::StatementInvalid on the instance level' do
+ t = TestAttribute.create!
+ reviewer = TestReviewer.create!
+
+ expect do
+ t.assign_attributes(reviewer_ids: [reviewer.id, reviewer.id])
+ end.to raise_error(ActiveRecord::RecordNotUnique)
+ end
+ end
+
+ describe 'resetting on missing column error on save' do
+ let(:expected_error_message) { /unknown attribute '_test_new_column'/ }
+
+ context 'with mismatched columns due to schema cache' do
+ let!(:attrs) { TestAttribute.new.attributes }
+
+ def initialize_with_new_column
+ TestAttribute.new(attrs.merge(_test_new_column: 123))
+ end
+
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ ALTER TABLE _test_attribute_table ADD COLUMN _test_new_column int;
+ SQL
+ end
+
+ after do
+ ApplicationRecord.connection.execute(<<~SQL)
+ ALTER TABLE _test_attribute_table DROP COLUMN _test_new_column;
+ SQL
+
+ TestAttribute.reset_column_information
+ end
+
+ it 'resets column information when encountering an UnknownAttributeError' do
+ expect do
+ initialize_with_new_column
+ end.to raise_error(ActiveModel::UnknownAttributeError, expected_error_message)
+ .and change { TestAttribute.column_names }
+ .from(%w[id created_at]).to(%w[id created_at _test_new_column])
+
+ # Subsequent query load from new schema cache, so no more error
+ expect do
+ initialize_with_new_column
+ end.not_to raise_error
+ end
+
+ it 'logs when column is reset' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error)
+ .with(hash_including("extra.reset_model_name" => "TestAttribute"))
+ .and_call_original
+
+ expect do
+ initialize_with_new_column
+ end.to raise_error(ActiveModel::UnknownAttributeError, expected_error_message)
+ end
+
+ context 'when reset_column_information_on_statement_invalid FF is disabled' do
+ before do
+ stub_feature_flags(reset_column_information_on_statement_invalid: false)
+ end
+
+ it 'does not reset column information' do
+ expect do
+ initialize_with_new_column
+ end.to raise_error(ActiveModel::UnknownAttributeError, expected_error_message)
+ .and not_change { TestAttribute.column_names }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/reset_on_union_error_spec.rb b/spec/models/concerns/reset_on_union_error_spec.rb
deleted file mode 100644
index 70993b92c90..00000000000
--- a/spec/models/concerns/reset_on_union_error_spec.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ResetOnUnionError, :delete, feature_category: :shared do
- let(:test_unioned_model) do
- Class.new(ApplicationRecord) do
- include FromUnion
-
- self.table_name = '_test_unioned_model'
-
- def self.name
- 'TestUnion'
- end
- end
- end
-
- before(:context) do
- ApplicationRecord.connection.execute(<<~SQL)
- CREATE TABLE _test_unioned_model (
- id serial NOT NULL PRIMARY KEY,
- created_at timestamptz NOT NULL
- );
- SQL
- end
-
- after(:context) do
- ApplicationRecord.connection.execute(<<~SQL)
- DROP TABLE _test_unioned_model
- SQL
- end
-
- context 'with mismatched columns due to schema cache' do
- def load_query
- scopes = [
- test_unioned_model.select('*'),
- test_unioned_model.select(test_unioned_model.column_names.join(','))
- ]
-
- test_unioned_model.from_union(scopes).load
- end
-
- before do
- load_query
-
- ApplicationRecord.connection.execute(<<~SQL)
- ALTER TABLE _test_unioned_model ADD COLUMN _test_new_column int;
- SQL
- end
-
- after do
- ApplicationRecord.connection.execute(<<~SQL)
- ALTER TABLE _test_unioned_model DROP COLUMN _test_new_column;
- SQL
-
- test_unioned_model.reset_column_information
- end
-
- it 'resets column information when encountering an UNION error' do
- expect do
- load_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
- .and change { test_unioned_model.column_names }.from(%w[id created_at]).to(%w[id created_at _test_new_column])
-
- # Subsequent query load from new schema cache, so no more error
- expect do
- load_query
- end.not_to raise_error
- end
-
- it 'logs when column is reset' do
- expect(Gitlab::ErrorTracking::Logger).to receive(:error)
- .with(hash_including("extra.reset_model_name" => "TestUnion"))
- .and_call_original
-
- expect do
- load_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
- end
-
- context 'when reset_column_information_on_statement_invalid FF is disabled' do
- before do
- stub_feature_flags(reset_column_information_on_statement_invalid: false)
- end
-
- it 'does not reset column information' do
- expect do
- load_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
- .and not_change { test_unioned_model.column_names }
- end
- end
- end
-
- context 'with mismatched columns due to coding error' do
- def load_mismatched_query
- scopes = [
- test_unioned_model.select("id"),
- test_unioned_model.select("id, created_at")
- ]
-
- test_unioned_model.from_union(scopes).load
- end
-
- it 'limits reset_column_information calls' do
- expect(test_unioned_model).to receive(:reset_column_information).and_call_original
-
- expect do
- load_mismatched_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
-
- expect(test_unioned_model).not_to receive(:reset_column_information)
-
- expect do
- load_mismatched_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
- end
-
- it 'does reset_column_information after some time has passed' do
- expect do
- load_mismatched_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
-
- travel_to(described_class::MAX_RESET_PERIOD.from_now + 1.minute)
- expect(test_unioned_model).to receive(:reset_column_information).and_call_original
-
- expect do
- load_mismatched_query
- end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
- end
- end
-end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 2b6f8535743..7e324812b97 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -3,18 +3,12 @@
require 'spec_helper'
RSpec.shared_examples 'routable resource' do
- shared_examples_for '.find_by_full_path' do
+ shared_examples_for '.find_by_full_path' do |has_cross_join: false|
it 'finds records by their full path' do
expect(described_class.find_by_full_path(record.full_path)).to eq(record)
expect(described_class.find_by_full_path(record.full_path.upcase)).to eq(record)
end
- it 'checks if `optimize_routable` is enabled only once' do
- expect(Routable).to receive(:optimize_routable_enabled?).once
-
- described_class.find_by_full_path(record.full_path)
- end
-
it 'returns nil for unknown paths' do
expect(described_class.find_by_full_path('unknown')).to be_nil
end
@@ -51,27 +45,23 @@ RSpec.shared_examples 'routable resource' do
end
end
end
- end
-
- it_behaves_like '.find_by_full_path', :aggregate_failures
-
- context 'when the `optimize_routable` feature flag is turned OFF' do
- before do
- stub_feature_flags(optimize_routable: false)
- end
- it_behaves_like '.find_by_full_path', :aggregate_failures
+ if has_cross_join
+ it 'has a cross-join' do
+ expect(Gitlab::Database).to receive(:allow_cross_joins_across_databases)
- it 'includes route information when loading a record' do
- control_count = ActiveRecord::QueryRecorder.new do
described_class.find_by_full_path(record.full_path)
- end.count
+ end
+ else
+ it 'does not have cross-join' do
+ expect(Gitlab::Database).not_to receive(:allow_cross_joins_across_databases)
- expect do
- described_class.find_by_full_path(record.full_path).route
- end.not_to exceed_all_query_limit(control_count)
+ described_class.find_by_full_path(record.full_path)
+ end
end
end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures
end
RSpec.shared_examples 'routable resource with parent' do
@@ -274,22 +264,6 @@ RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache
end
end
-RSpec.describe Routable, feature_category: :groups_and_projects do
- describe '.optimize_routable_enabled?' do
- subject { described_class.optimize_routable_enabled? }
-
- it { is_expected.to eq(true) }
-
- context 'when the `optimize_routable` feature flag is turned OFF' do
- before do
- stub_feature_flags(optimize_routable: false)
- end
-
- it { is_expected.to eq(false) }
- end
- end
-end
-
def forcibly_hit_cached_lookup(record, method)
stub_feature_flags(cached_route_lookups: true)
expect(record).to receive(:persisted?).and_return(true)
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index e5f9fdd410e..1e911af5670 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -11,8 +11,7 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
it { is_expected.to validate_presence_of(:project) }
describe '#enabled' do
- it { is_expected.to allow_value(true, false).for(:enabled) }
- it { is_expected.not_to allow_value(nil).for(:enabled) }
+ it { is_expected.to validate_inclusion_of(:enabled).in_array([true, false]) }
end
describe '#cadence' do
diff --git a/spec/models/container_registry/protection/rule_spec.rb b/spec/models/container_registry/protection/rule_spec.rb
new file mode 100644
index 00000000000..9f162736efd
--- /dev/null
+++ b/spec/models/container_registry/protection/rule_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_category: :container_registry do
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project).inverse_of(:container_registry_protection_rules) }
+ end
+
+ describe 'enums' do
+ it {
+ is_expected.to(
+ define_enum_for(:push_protected_up_to_access_level)
+ .with_values(
+ developer: Gitlab::Access::DEVELOPER,
+ maintainer: Gitlab::Access::MAINTAINER,
+ owner: Gitlab::Access::OWNER
+ )
+ .with_prefix(:push_protected_up_to)
+ )
+ }
+
+ it {
+ is_expected.to(
+ define_enum_for(:delete_protected_up_to_access_level)
+ .with_values(
+ developer: Gitlab::Access::DEVELOPER,
+ maintainer: Gitlab::Access::MAINTAINER,
+ owner: Gitlab::Access::OWNER
+ )
+ .with_prefix(:delete_protected_up_to)
+ )
+ }
+ end
+
+ describe 'validations' do
+ subject { build(:container_registry_protection_rule) }
+
+ describe '#container_path_pattern' do
+ it { is_expected.to validate_presence_of(:container_path_pattern) }
+ it { is_expected.to validate_length_of(:container_path_pattern).is_at_most(255) }
+ end
+
+ describe '#delete_protected_up_to_access_level' do
+ it { is_expected.to validate_presence_of(:delete_protected_up_to_access_level) }
+ end
+
+ describe '#push_protected_up_to_access_level' do
+ it { is_expected.to validate_presence_of(:push_protected_up_to_access_level) }
+ end
+ end
+end
diff --git a/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb b/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
index a58e8df45e4..203f477c1a0 100644
--- a/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
+++ b/spec/models/dependency_proxy/image_ttl_group_policy_spec.rb
@@ -11,8 +11,7 @@ RSpec.describe DependencyProxy::ImageTtlGroupPolicy, type: :model do
it { is_expected.to validate_presence_of(:group) }
describe '#enabled' do
- it { is_expected.to allow_value(true, false).for(:enabled) }
- it { is_expected.not_to allow_value(nil).for(:enabled) }
+ it { is_expected.to validate_inclusion_of(:enabled).in_array([true, false]) }
end
describe '#ttl' do
diff --git a/spec/models/discussion_note_spec.rb b/spec/models/discussion_note_spec.rb
index 6e1b39cc438..09adf4a95b5 100644
--- a/spec/models/discussion_note_spec.rb
+++ b/spec/models/discussion_note_spec.rb
@@ -8,4 +8,12 @@ RSpec.describe DiscussionNote do
it { is_expected.to eq('note') }
end
+
+ describe 'validations' do
+ context 'when noteable is an abuse report' do
+ subject { build(:discussion_note, noteable: build_stubbed(:abuse_report)) }
+
+ it { is_expected.to be_valid }
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 9d4699cb91e..dcfee7fcc8c 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -1516,42 +1516,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
end
end
- describe '#metrics' do
- let_it_be(:project) { create(:project, :with_prometheus_integration) }
-
- subject { environment.metrics }
-
- context 'when the environment has metrics' do
- before do
- allow(environment).to receive(:has_metrics?).and_return(true)
- end
-
- it 'returns the metrics from the deployment service' do
- expect(environment.prometheus_adapter)
- .to receive(:query).with(:environment, environment)
- .and_return(:fake_metrics)
-
- is_expected.to eq(:fake_metrics)
- end
-
- context 'and the prometheus client is not present' do
- before do
- allow(environment.prometheus_adapter).to receive(:promethus_client).and_return(nil)
- end
-
- it { is_expected.to be_nil }
- end
- end
-
- context 'when the environment does not have metrics' do
- before do
- allow(environment).to receive(:has_metrics?).and_return(false)
- end
-
- it { is_expected.to be_nil }
- end
- end
-
describe '#additional_metrics' do
let_it_be(:project) { create(:project, :with_prometheus_integration) }
let(:metric_params) { [] }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index ddeab16908d..96ef36a5b75 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -683,160 +683,126 @@ RSpec.describe Group, feature_category: :groups_and_projects do
context 'traversal queries' do
let_it_be(:group, reload: true) { create(:group, :nested) }
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'namespace traversal'
+ it_behaves_like 'namespace traversal'
- describe '#self_and_descendants' do
- it { expect(group.self_and_descendants.to_sql).not_to include 'traversal_ids @>' }
- end
+ describe '#self_and_descendants' do
+ it { expect(group.self_and_descendants.to_sql).to include 'traversal_ids @>' }
+ end
- describe '#self_and_descendant_ids' do
- it { expect(group.self_and_descendant_ids.to_sql).not_to include 'traversal_ids @>' }
- end
+ describe '#self_and_descendant_ids' do
+ it { expect(group.self_and_descendant_ids.to_sql).to include 'traversal_ids @>' }
+ end
- describe '#descendants' do
- it { expect(group.descendants.to_sql).not_to include 'traversal_ids @>' }
- end
+ describe '#descendants' do
+ it { expect(group.descendants.to_sql).to include 'traversal_ids @>' }
+ end
- describe '#self_and_hierarchy' do
- it { expect(group.self_and_hierarchy.to_sql).not_to include 'traversal_ids @>' }
- end
+ describe '#self_and_hierarchy' do
+ it { expect(group.self_and_hierarchy.to_sql).to include 'traversal_ids @>' }
+ end
- describe '#ancestors' do
- it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
- end
+ describe '#ancestors' do
+ it { expect(group.ancestors.to_sql).to include "\"namespaces\".\"id\" = #{group.parent_id}" }
- describe '.shortest_traversal_ids_prefixes' do
- it { expect { described_class.shortest_traversal_ids_prefixes }.to raise_error /Feature not supported since the `:use_traversal_ids` is disabled/ }
+ it 'hierarchy order' do
+ expect(group.ancestors(hierarchy_order: :asc).to_sql).to include 'ORDER BY "depth" ASC'
end
end
- context 'linear' do
- it_behaves_like 'namespace traversal'
+ describe '#ancestors_upto' do
+ it { expect(group.ancestors_upto.to_sql).to include "WITH ORDINALITY" }
+ end
- describe '#self_and_descendants' do
- it { expect(group.self_and_descendants.to_sql).to include 'traversal_ids @>' }
- end
+ describe '.shortest_traversal_ids_prefixes' do
+ subject { filter.shortest_traversal_ids_prefixes }
- describe '#self_and_descendant_ids' do
- it { expect(group.self_and_descendant_ids.to_sql).to include 'traversal_ids @>' }
- end
+ context 'for many top-level namespaces' do
+ let!(:top_level_groups) { create_list(:group, 4) }
- describe '#descendants' do
- it { expect(group.descendants.to_sql).to include 'traversal_ids @>' }
- end
+ context 'when querying all groups' do
+ let(:filter) { described_class.id_in(top_level_groups) }
- describe '#self_and_hierarchy' do
- it { expect(group.self_and_hierarchy.to_sql).to include 'traversal_ids @>' }
- end
+ it "returns all traversal_ids" do
+ is_expected.to contain_exactly(
+ *top_level_groups.map { |group| [group.id] }
+ )
+ end
+ end
- describe '#ancestors' do
- it { expect(group.ancestors.to_sql).to include "\"namespaces\".\"id\" = #{group.parent_id}" }
+ context 'when querying selected groups' do
+ let(:filter) { described_class.id_in(top_level_groups.first) }
- it 'hierarchy order' do
- expect(group.ancestors(hierarchy_order: :asc).to_sql).to include 'ORDER BY "depth" ASC'
+ it "returns only a selected traversal_ids" do
+ is_expected.to contain_exactly([top_level_groups.first.id])
+ end
end
end
- describe '#ancestors_upto' do
- it { expect(group.ancestors_upto.to_sql).to include "WITH ORDINALITY" }
- end
+ context 'for namespace hierarchy' do
+ let!(:group_a) { create(:group) }
+ let!(:group_a_sub_1) { create(:group, parent: group_a) }
+ let!(:group_a_sub_2) { create(:group, parent: group_a) }
+ let!(:group_b) { create(:group) }
+ let!(:group_b_sub_1) { create(:group, parent: group_b) }
+ let!(:group_c) { create(:group) }
- describe '.shortest_traversal_ids_prefixes' do
- subject { filter.shortest_traversal_ids_prefixes }
+ context 'when querying all groups' do
+ let(:filter) { described_class.id_in([group_a, group_a_sub_1, group_a_sub_2, group_b, group_b_sub_1, group_c]) }
- context 'for many top-level namespaces' do
- let!(:top_level_groups) { create_list(:group, 4) }
-
- context 'when querying all groups' do
- let(:filter) { described_class.id_in(top_level_groups) }
-
- it "returns all traversal_ids" do
- is_expected.to contain_exactly(
- *top_level_groups.map { |group| [group.id] }
- )
- end
- end
-
- context 'when querying selected groups' do
- let(:filter) { described_class.id_in(top_level_groups.first) }
-
- it "returns only a selected traversal_ids" do
- is_expected.to contain_exactly([top_level_groups.first.id])
- end
+ it 'returns only shortest prefixes of top-level groups' do
+ is_expected.to contain_exactly(
+ [group_a.id],
+ [group_b.id],
+ [group_c.id]
+ )
end
end
- context 'for namespace hierarchy' do
- let!(:group_a) { create(:group) }
- let!(:group_a_sub_1) { create(:group, parent: group_a) }
- let!(:group_a_sub_2) { create(:group, parent: group_a) }
- let!(:group_b) { create(:group) }
- let!(:group_b_sub_1) { create(:group, parent: group_b) }
- let!(:group_c) { create(:group) }
+ context 'when sub-group is reparented' do
+ let(:filter) { described_class.id_in([group_b_sub_1, group_c]) }
- context 'when querying all groups' do
- let(:filter) { described_class.id_in([group_a, group_a_sub_1, group_a_sub_2, group_b, group_b_sub_1, group_c]) }
-
- it 'returns only shortest prefixes of top-level groups' do
- is_expected.to contain_exactly(
- [group_a.id],
- [group_b.id],
- [group_c.id]
- )
- end
+ before do
+ group_b_sub_1.update!(parent: group_c)
end
- context 'when sub-group is reparented' do
- let(:filter) { described_class.id_in([group_b_sub_1, group_c]) }
-
- before do
- group_b_sub_1.update!(parent: group_c)
- end
-
- it 'returns a proper shortest prefix of a new group' do
- is_expected.to contain_exactly(
- [group_c.id]
- )
- end
+ it 'returns a proper shortest prefix of a new group' do
+ is_expected.to contain_exactly(
+ [group_c.id]
+ )
end
+ end
- context 'when querying sub-groups' do
- let(:filter) { described_class.id_in([group_a_sub_1, group_b_sub_1, group_c]) }
+ context 'when querying sub-groups' do
+ let(:filter) { described_class.id_in([group_a_sub_1, group_b_sub_1, group_c]) }
- it 'returns sub-groups as they are shortest prefixes' do
- is_expected.to contain_exactly(
- [group_a.id, group_a_sub_1.id],
- [group_b.id, group_b_sub_1.id],
- [group_c.id]
- )
- end
+ it 'returns sub-groups as they are shortest prefixes' do
+ is_expected.to contain_exactly(
+ [group_a.id, group_a_sub_1.id],
+ [group_b.id, group_b_sub_1.id],
+ [group_c.id]
+ )
end
+ end
- context 'when querying group and sub-group of this group' do
- let(:filter) { described_class.id_in([group_a, group_a_sub_1, group_c]) }
+ context 'when querying group and sub-group of this group' do
+ let(:filter) { described_class.id_in([group_a, group_a_sub_1, group_c]) }
- it 'returns parent groups as this contains all sub-groups' do
- is_expected.to contain_exactly(
- [group_a.id],
- [group_c.id]
- )
- end
+ it 'returns parent groups as this contains all sub-groups' do
+ is_expected.to contain_exactly(
+ [group_a.id],
+ [group_c.id]
+ )
end
end
end
+ end
- context 'when project namespace exists in the group' do
- let!(:project) { create(:project, group: group) }
- let!(:project_namespace) { project.project_namespace }
+ context 'when project namespace exists in the group' do
+ let!(:project) { create(:project, group: group) }
+ let!(:project_namespace) { project.project_namespace }
- it 'filters out project namespace' do
- expect(group.descendants.find_by_id(project_namespace.id)).to be_nil
- end
+ it 'filters out project namespace' do
+ expect(group.descendants.find_by_id(project_namespace.id)).to be_nil
end
end
end
@@ -921,6 +887,143 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
+ describe '.sort_by_attribute' do
+ before do
+ group.destroy!
+ end
+
+ let!(:group_1) { create(:group, name: 'Y group') }
+ let!(:group_2) { create(:group, name: 'J group', created_at: 2.days.ago, updated_at: 1.day.ago) }
+ let!(:group_3) { create(:group, name: 'A group') }
+ let!(:group_4) { create(:group, name: 'F group', created_at: 1.day.ago, updated_at: 1.day.ago) }
+
+ subject { described_class.with_statistics.with_route.sort_by_attribute(sort) }
+
+ context 'when sort by is not provided (id desc by default)' do
+ let(:sort) { nil }
+
+ it { is_expected.to eq([group_1, group_2, group_3, group_4]) }
+ end
+
+ context 'when sort by name_asc' do
+ let(:sort) { 'name_asc' }
+
+ it { is_expected.to eq([group_3, group_4, group_2, group_1]) }
+ end
+
+ context 'when sort by name_desc' do
+ let(:sort) { 'name_desc' }
+
+ it { is_expected.to eq([group_1, group_2, group_4, group_3]) }
+ end
+
+ context 'when sort by recently_created' do
+ let(:sort) { 'created_desc' }
+
+ it { is_expected.to eq([group_3, group_1, group_4, group_2]) }
+ end
+
+ context 'when sort by oldest_created' do
+ let(:sort) { 'created_asc' }
+
+ it { is_expected.to eq([group_2, group_4, group_1, group_3]) }
+ end
+
+ context 'when sort by latest_activity' do
+ let(:sort) { 'latest_activity_desc' }
+
+ it { is_expected.to eq([group_1, group_2, group_3, group_4]) }
+ end
+
+ context 'when sort by oldest_activity' do
+ let(:sort) { 'latest_activity_asc' }
+
+ it { is_expected.to eq([group_1, group_2, group_3, group_4]) }
+ end
+
+ context 'when sort by storage_size_desc' do
+ let!(:project_1) do
+ create(:project,
+ namespace: group_1,
+ statistics: build(
+ :project_statistics,
+ namespace: group_1,
+ repository_size: 2178370,
+ storage_size: 1278370,
+ wiki_size: 505,
+ lfs_objects_size: 202,
+ build_artifacts_size: 303,
+ pipeline_artifacts_size: 707,
+ packages_size: 404,
+ snippets_size: 605,
+ uploads_size: 808
+ )
+ )
+ end
+
+ let!(:project_2) do
+ create(:project,
+ namespace: group_2,
+ statistics: build(
+ :project_statistics,
+ namespace: group_2,
+ repository_size: 3178370,
+ storage_size: 3178370,
+ wiki_size: 505,
+ lfs_objects_size: 202,
+ build_artifacts_size: 303,
+ pipeline_artifacts_size: 707,
+ packages_size: 404,
+ snippets_size: 605,
+ uploads_size: 808
+ )
+ )
+ end
+
+ let!(:project_3) do
+ create(:project,
+ namespace: group_3,
+ statistics: build(
+ :project_statistics,
+ namespace: group_3,
+ repository_size: 1278370,
+ storage_size: 1178370,
+ wiki_size: 505,
+ lfs_objects_size: 202,
+ build_artifacts_size: 303,
+ pipeline_artifacts_size: 707,
+ packages_size: 404,
+ snippets_size: 605,
+ uploads_size: 808
+ )
+ )
+ end
+
+ let!(:project_4) do
+ create(:project,
+ namespace: group_4,
+ statistics: build(
+ :project_statistics,
+ namespace: group_4,
+ repository_size: 2178370,
+ storage_size: 2278370,
+ wiki_size: 505,
+ lfs_objects_size: 202,
+ build_artifacts_size: 303,
+ pipeline_artifacts_size: 707,
+ packages_size: 404,
+ snippets_size: 605,
+ uploads_size: 808
+ )
+ )
+ end
+
+ let(:sort) { 'storage_size_desc' }
+
+ it { is_expected.to eq([group_2, group_4, group_1, group_3]) }
+ end
+ end
+
describe 'scopes' do
let_it_be(:private_group) { create(:group, :private) }
let_it_be(:internal_group) { create(:group, :internal) }
@@ -1152,21 +1255,6 @@ RSpec.describe Group, feature_category: :groups_and_projects do
expect(group.group_members.developers.map(&:user)).to include(user)
expect(group.group_members.guests.map(&:user)).not_to include(user)
end
-
- context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
- let!(:project) { create(:project, group: group) }
-
- before do
- group.add_members([create(:user)], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
- end
-
- it 'creates a member_task with the correct attributes', :aggregate_failures do
- member = group.group_members.last
-
- expect(member.tasks_to_be_done).to match_array([:ci, :code])
- expect(member.member_task.project).to eq(project)
- end
- end
end
describe '#avatar_type' do
@@ -1340,6 +1428,11 @@ RSpec.describe Group, feature_category: :groups_and_projects do
group.add_member(user, GroupMember::OWNER)
end
+ before do
+ # Add an invite to the group, which should be filtered out
+ create(:group_member, :invited, source: group)
+ end
+
it 'returns the member-owners' do
expect(group.member_owners_excluding_project_bots).to contain_exactly(member_owner)
end
@@ -1367,6 +1460,16 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it 'returns only direct member-owners' do
expect(group.member_owners_excluding_project_bots).to contain_exactly(member_owner)
end
+
+ context 'when there is an invite in the linked group' do
+ before do
+ create(:group_member, :invited, source: subgroup)
+ end
+
+ it 'returns only direct member-owners' do
+ expect(group.member_owners_excluding_project_bots).to contain_exactly(member_owner)
+ end
+ end
end
end
@@ -1382,6 +1485,31 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it 'returns member-owners including parents' do
expect(subgroup.member_owners_excluding_project_bots).to contain_exactly(member_owner, member_owner_2)
end
+
+ context 'with group sharing' do
+ let_it_be(:invited_group) { create(:group) }
+
+ let!(:invited_group_owner) { invited_group.add_member(user, GroupMember::OWNER) }
+
+ before do
+ create(:group_group_link, :owner, shared_group: subgroup, shared_with_group: invited_group)
+ end
+
+ it 'returns member-owners including parents, and member-owners of the invited group' do
+ expect(subgroup.member_owners_excluding_project_bots).to contain_exactly(member_owner, member_owner_2, invited_group_owner)
+ end
+
+ context 'when there is an invite in the linked group' do
+ before do
+ # Add an invite to this group, which should be filtered out
+ create(:group_member, :invited, source: invited_group)
+ end
+
+ it 'returns member-owners including parents, and member-owners of the invited group' do
+ expect(subgroup.member_owners_excluding_project_bots).to contain_exactly(member_owner, member_owner_2, invited_group_owner)
+ end
+ end
+ end
end
end
@@ -1561,6 +1689,14 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it 'returns correct access level' do
expect(group.max_member_access_for_user(group_user)).to eq(Gitlab::Access::OWNER)
end
+
+ context 'when user is not active' do
+ let_it_be(:group_user) { create(:user, :deactivated) }
+
+ it 'returns NO_ACCESS' do
+ expect(group.max_member_access_for_user(group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
end
context 'when user is nil' do
@@ -3320,13 +3456,6 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
- describe '#content_editor_on_issues_feature_flag_enabled?' do
- it_behaves_like 'checks self and root ancestor feature flag' do
- let(:feature_flag) { :content_editor_on_issues }
- let(:feature_flag_method) { :content_editor_on_issues_feature_flag_enabled? }
- end
- end
-
describe '#work_items_feature_flag_enabled?' do
it_behaves_like 'checks self and root ancestor feature flag' do
let(:feature_flag) { :work_items }
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 67e12092e1a..d7b69546de6 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -598,23 +598,7 @@ RSpec.describe Integration, feature_category: :integrations do
end
end
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples 'correct ancestor order'
- end
-
- context 'linear' do
- before do
- stub_feature_flags(use_traversal_ids: true)
-
- sub_subgroup.reload # make sure traversal_ids are reloaded
- end
-
- include_examples 'correct ancestor order'
- end
+ include_examples 'correct ancestor order'
end
end
end
@@ -1206,11 +1190,10 @@ RSpec.describe Integration, feature_category: :integrations do
end
end
- describe 'boolean_accessor' do
+ describe 'Checkbox field booleans' do
let(:klass) do
Class.new(Integration) do
- prop_accessor :test_value
- boolean_accessor :test_value
+ field :test_value, type: :checkbox
end
end
@@ -1284,24 +1267,6 @@ RSpec.describe Integration, feature_category: :integrations do
test_value?: be(false)
)
end
-
- context 'when getter is not defined' do
- let(:input) { true }
- let(:klass) do
- Class.new(Integration) do
- boolean_accessor :test_value
- end
- end
-
- it 'defines a prop_accessor' do
- expect(integration).to have_attributes(
- test_value: true,
- test_value?: true
- )
-
- expect(integration.properties['test_value']).to be(true)
- end
- end
end
describe '#attributes' do
diff --git a/spec/models/integrations/apple_app_store_spec.rb b/spec/models/integrations/apple_app_store_spec.rb
index 9864fe38d3f..ea66c382726 100644
--- a/spec/models/integrations/apple_app_store_spec.rb
+++ b/spec/models/integrations/apple_app_store_spec.rb
@@ -13,8 +13,7 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
it { is_expected.to validate_presence_of :app_store_key_id }
it { is_expected.to validate_presence_of :app_store_private_key }
it { is_expected.to validate_presence_of :app_store_private_key_file_name }
- it { is_expected.to allow_value(true, false).for(:app_store_protected_refs) }
- it { is_expected.not_to allow_value(nil).for(:app_store_protected_refs) }
+ it { is_expected.to validate_inclusion_of(:app_store_protected_refs).in_array([true, false]) }
it { is_expected.to allow_value('aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee').for(:app_store_issuer_id) }
it { is_expected.not_to allow_value('abcde').for(:app_store_issuer_id) }
it { is_expected.to allow_value(File.read('spec/fixtures/ssl_key.pem')).for(:app_store_private_key) }
diff --git a/spec/models/integrations/asana_spec.rb b/spec/models/integrations/asana_spec.rb
index 376aec1088e..70c56d35a04 100644
--- a/spec/models/integrations/asana_spec.rb
+++ b/spec/models/integrations/asana_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let_it_be(:project) { build(:project) }
let(:gid) { "123456789ABCD" }
- let(:asana_task) { double(::Asana::Resources::Task) }
+ let(:asana_task) { double(data: { gid: gid }) }
let(:asana_integration) { described_class.new }
let(:ref) { 'main' }
let(:restrict_to_branch) { nil }
@@ -41,6 +41,15 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
}
end
+ let(:completed_message) do
+ {
+ body: {
+ completed: true
+ },
+ headers: { "Authorization" => "Bearer verySecret" }
+ }
+ end
+
before do
allow(asana_integration).to receive_messages(
project: project,
@@ -60,9 +69,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:ref) { 'main' }
it 'calls the Asana integration' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456789/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456789", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -72,8 +82,8 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:ref) { 'mai' }
it 'does not call the Asana integration' do
- expect(asana_task).not_to receive(:add_comment)
- expect(::Asana::Resources::Task).not_to receive(:find_by_id)
+ expect(Gitlab::HTTP).not_to receive(:post)
+ expect(Gitlab::HTTP).not_to receive(:put)
execute_integration
end
@@ -83,12 +93,17 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
context 'when creating a story' do
let(:message) { "Message from commit. related to ##{gid}" }
let(:expected_message) do
- "#{user.name} pushed to branch main of #{project.full_name} ( https://gitlab.com/ ): #{message}"
+ {
+ body: {
+ text: "#{user.name} pushed to branch main of #{project.full_name} ( https://gitlab.com/ ): #{message}"
+ },
+ headers: { "Authorization" => "Bearer verySecret" }
+ }
end
it 'calls Asana integration to create a story' do
- expect(asana_task).to receive(:add_comment).with(text: expected_message)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, gid).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/#{gid}/stories", expected_message).once.and_return(asana_task)
execute_integration
end
@@ -98,9 +113,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:message) { 'fix #456789' }
it 'calls Asana integration to create a story and close a task' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456789/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456789", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -110,9 +126,10 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
let(:message) { 'closes https://app.asana.com/19292/956299/42' }
it 'calls Asana integration to close via url' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/42/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/42", completed_message).once.and_return(asana_task)
execute_integration
end
@@ -127,27 +144,30 @@ RSpec.describe Integrations::Asana, feature_category: :integrations do
end
it 'allows multiple matches per line' do
- expect(asana_task).to receive(:add_comment)
- expect(asana_task).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '123').once.and_return(asana_task)
-
- asana_task_2 = double(Asana::Resources::Task)
- expect(asana_task_2).to receive(:add_comment)
- expect(asana_task_2).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456').once.and_return(asana_task_2)
-
- asana_task_3 = double(Asana::Resources::Task)
- expect(asana_task_3).to receive(:add_comment)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '789').once.and_return(asana_task_3)
-
- asana_task_4 = double(Asana::Resources::Task)
- expect(asana_task_4).to receive(:add_comment)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(asana_task_4)
-
- asana_task_5 = double(Asana::Resources::Task)
- expect(asana_task_5).to receive(:add_comment)
- expect(asana_task_5).to receive(:update).with(completed: true)
- expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '12').once.and_return(asana_task_5)
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/123/stories", anything).once.and_return(asana_task)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/123", completed_message).once.and_return(asana_task)
+
+ asana_task_2 = double(double(data: { gid: 456 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/456/stories", anything).once.and_return(asana_task_2)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/456", completed_message).once.and_return(asana_task_2)
+
+ asana_task_3 = double(double(data: { gid: 789 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/789/stories", anything).once.and_return(asana_task_3)
+
+ asana_task_4 = double(double(data: { gid: 42 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/42/stories", anything).once.and_return(asana_task_4)
+
+ asana_task_5 = double(double(data: { gid: 12 }))
+ expect(Gitlab::HTTP).to receive(:post)
+ .with("https://app.asana.com/api/1.0/tasks/12/stories", anything).once.and_return(asana_task_5)
+ expect(Gitlab::HTTP).to receive(:put)
+ .with("https://app.asana.com/api/1.0/tasks/12", completed_message).once.and_return(asana_task_5)
execute_integration
end
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 3b459ab9d5b..62080fa7a12 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching, feat
is_expected.to eq('http://gitlab.com/bamboo/browse/42')
end
- context 'bamboo_url has trailing slash' do
+ context 'when bamboo_url has trailing slash' do
let(:bamboo_url) { 'http://gitlab.com/bamboo/' }
it 'returns a build URL' do
@@ -198,13 +198,22 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching, feat
context 'when Bamboo API returns an array of results and we only consider the last one' do
let(:bamboo_response_template) do
- %q({"results":{"results":{"size":"2","result":[{"buildState":"%{build_state}","planResultKey":{"key":"41"}},{"buildState":"%{build_state}","planResultKey":{"key":"42"}}]}}})
+ '{"results":{"results":{"size":"2","result":[{"buildState":"%{build_state}","planResultKey":{"key":"41"}}, ' \
+ '{"buildState":"%{build_state}","planResultKey":{"key":"42"}}]}}}'
end
it_behaves_like 'reactive cache calculation'
end
end
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path(
+ 'illustrations/third-party-logos/integrations-logos/atlassian-bamboo.svg'
+ ))
+ end
+ end
+
def stub_update_and_build_request(status: 200, body: nil)
bamboo_full_url = 'http://gitlab.com/bamboo/updateAndBuild.action?buildKey=foo&os_authType=basic'
@@ -222,11 +231,11 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching, feat
status: status,
headers: { 'Content-Type' => 'application/json' },
body: body
- ).with(basic_auth: %w(mic password))
+ ).with(basic_auth: %w[mic password])
end
def bamboo_response(build_state: 'success')
# reference: https://docs.atlassian.com/atlassian-bamboo/REST/6.2.5/#d2e786
- bamboo_response_template % { build_state: build_state }
+ format(bamboo_response_template, build_state: build_state)
end
end
diff --git a/spec/models/integrations/chat_message/alert_message_spec.rb b/spec/models/integrations/chat_message/alert_message_spec.rb
index 162df1a774c..a9db9e14883 100644
--- a/spec/models/integrations/chat_message/alert_message_spec.rb
+++ b/spec/models/integrations/chat_message/alert_message_spec.rb
@@ -57,4 +57,10 @@ RSpec.describe Integrations::ChatMessage::AlertMessage do
expect(time_item[:value]).to eq(expected_time)
end
end
+
+ describe '#attachment_color' do
+ it 'returns the correct color' do
+ expect(subject.attachment_color).to eq('#C95823')
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/deployment_message_spec.rb b/spec/models/integrations/chat_message/deployment_message_spec.rb
index 630ae902331..afbf1d1c0d1 100644
--- a/spec/models/integrations/chat_message/deployment_message_spec.rb
+++ b/spec/models/integrations/chat_message/deployment_message_spec.rb
@@ -19,6 +19,29 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage, feature_category: :
it_behaves_like Integrations::ChatMessage
+ def deployment_data(params)
+ {
+ object_kind: "deployment",
+ status: "success",
+ deployable_id: 3,
+ deployable_url: "deployable_url",
+ environment: "sandbox",
+ project: {
+ name: "greatproject",
+ web_url: "project_web_url",
+ path_with_namespace: "project_path_with_namespace"
+ },
+ user: {
+ name: "Jane Person",
+ username: "jane"
+ },
+ user_url: "user_url",
+ short_sha: "12345678",
+ commit_url: "commit_url",
+ commit_title: "commit title text"
+ }.merge(params)
+ end
+
describe '#pretext' do
it 'returns a message with the data returned by the deployment data builder' do
expect(subject.pretext).to eq("Deploy to myenvironment succeeded")
@@ -80,29 +103,6 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage, feature_category: :
end
describe '#attachments' do
- def deployment_data(params)
- {
- object_kind: "deployment",
- status: "success",
- deployable_id: 3,
- deployable_url: "deployable_url",
- environment: "sandbox",
- project: {
- name: "greatproject",
- web_url: "project_web_url",
- path_with_namespace: "project_path_with_namespace"
- },
- user: {
- name: "Jane Person",
- username: "jane"
- },
- user_url: "user_url",
- short_sha: "12345678",
- commit_url: "commit_url",
- commit_title: "commit title text"
- }.merge(params)
- end
-
context 'without markdown' do
it 'returns attachments with the data returned by the deployment data builder' do
job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
@@ -165,4 +165,23 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage, feature_category: :
}])
end
end
+
+ describe '#attachment_color' do
+ using RSpec::Parameterized::TableSyntax
+ where(:status, :expected_color) do
+ 'success' | 'good'
+ 'canceled' | 'warning'
+ 'failed' | 'danger'
+ 'blub' | '#334455'
+ end
+
+ with_them do
+ it 'returns the correct color' do
+ data = deployment_data(status: status)
+ message = described_class.new(data)
+
+ expect(message.attachment_color).to eq(expected_color)
+ end
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 14451427a5a..7b09b5d08b0 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -125,4 +125,10 @@ RSpec.describe Integrations::ChatMessage::IssueMessage, feature_category: :integ
end
end
end
+
+ describe '#attachment_color' do
+ it 'returns the correct color' do
+ expect(subject.attachment_color).to eq('#C95823')
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/pipeline_message_spec.rb b/spec/models/integrations/chat_message/pipeline_message_spec.rb
index 4d371ca0899..5eb3915018e 100644
--- a/spec/models/integrations/chat_message/pipeline_message_spec.rb
+++ b/spec/models/integrations/chat_message/pipeline_message_spec.rb
@@ -388,4 +388,31 @@ RSpec.describe Integrations::ChatMessage::PipelineMessage do
)
end
end
+
+ describe '#attachment_color' do
+ context 'when success' do
+ before do
+ args[:object_attributes][:status] = 'success'
+ end
+
+ it { expect(subject.attachment_color).to eq('good') }
+ end
+
+ context 'when passed with warnings' do
+ before do
+ args[:object_attributes][:status] = 'success'
+ args[:object_attributes][:detailed_status] = 'passed with warnings'
+ end
+
+ it { expect(subject.attachment_color).to eq('warning') }
+ end
+
+ context 'when failed' do
+ before do
+ args[:object_attributes][:status] = 'failed'
+ end
+
+ it { expect(subject.attachment_color).to eq('danger') }
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/push_message_spec.rb b/spec/models/integrations/chat_message/push_message_spec.rb
index 5c9c5c64d7e..a9d0f801406 100644
--- a/spec/models/integrations/chat_message/push_message_spec.rb
+++ b/spec/models/integrations/chat_message/push_message_spec.rb
@@ -214,4 +214,10 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
end
end
end
+
+ describe '#attachment_color' do
+ it 'returns the correct color' do
+ expect(subject.attachment_color).to eq('#345')
+ end
+ end
end
diff --git a/spec/models/integrations/discord_spec.rb b/spec/models/integrations/discord_spec.rb
index 7ab7308ac1c..89c4dcd7e0e 100644
--- a/spec/models/integrations/discord_spec.rb
+++ b/spec/models/integrations/discord_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
RSpec.describe Integrations::Discord, feature_category: :integrations do
- it_behaves_like "chat integration", "Discord notifications" do
+ it_behaves_like "chat integration", "Discord notifications", supports_deployments: true do
let(:client) { Discordrb::Webhooks::Client }
let(:client_arguments) { { url: webhook_url } }
let(:payload) do
@@ -18,6 +18,8 @@ RSpec.describe Integrations::Discord, feature_category: :integrations do
]
}
end
+
+ it_behaves_like 'supports group mentions', :discord_integration
end
describe 'validations' do
@@ -77,7 +79,7 @@ RSpec.describe Integrations::Discord, feature_category: :integrations do
icon_url: start_with('https://www.gravatar.com/avatar/'),
name: user.name
),
- color: 16543014,
+ color: 3359829,
timestamp: Time.now.utc.iso8601
)
end
diff --git a/spec/models/integrations/google_play_spec.rb b/spec/models/integrations/google_play_spec.rb
index a0bc73378d3..c5b0c058809 100644
--- a/spec/models/integrations/google_play_spec.rb
+++ b/spec/models/integrations/google_play_spec.rb
@@ -20,8 +20,7 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
it { is_expected.to allow_value('a.a.a').for(:package_name) }
it { is_expected.to allow_value('com.example').for(:package_name) }
it { is_expected.not_to allow_value('com').for(:package_name) }
- it { is_expected.to allow_value(true, false).for(:google_play_protected_refs) }
- it { is_expected.not_to allow_value(nil).for(:google_play_protected_refs) }
+ it { is_expected.to validate_inclusion_of(:google_play_protected_refs).in_array([true, false]) }
it { is_expected.not_to allow_value('com.example.my app').for(:package_name) }
it { is_expected.not_to allow_value('1com.example.myapp').for(:package_name) }
it { is_expected.not_to allow_value('com.1example.myapp').for(:package_name) }
diff --git a/spec/models/integrations/hangouts_chat_spec.rb b/spec/models/integrations/hangouts_chat_spec.rb
index bcb80768ffb..a1ecfd436c2 100644
--- a/spec/models/integrations/hangouts_chat_spec.rb
+++ b/spec/models/integrations/hangouts_chat_spec.rb
@@ -4,7 +4,7 @@ require "spec_helper"
RSpec.describe Integrations::HangoutsChat, feature_category: :integrations do
it_behaves_like "chat integration", "Hangouts Chat" do
- let(:client) { HangoutsChat::Sender }
+ let(:client) { Gitlab::HTTP }
let(:client_arguments) { webhook_url }
let(:payload) do
{
diff --git a/spec/models/integrations/integration_list_spec.rb b/spec/models/integrations/integration_list_spec.rb
new file mode 100644
index 00000000000..b7ccbcecf6b
--- /dev/null
+++ b/spec/models/integrations/integration_list_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::IntegrationList, feature_category: :integrations do
+ let_it_be(:projects) { create_pair(:project, :small_repo) }
+ let(:batch) { Project.where(id: projects.pluck(:id)) }
+ let(:integration_hash) { { 'active' => 'true', 'category' => 'common' } }
+ let(:association) { 'project' }
+
+ subject { described_class.new(batch, integration_hash, association) }
+
+ describe '#to_array' do
+ it 'returns array of Integration, columns, and values' do
+ expect(subject.to_array).to eq([
+ Integration,
+ %w[active category project_id],
+ [['true', 'common', projects.first.id], ['true', 'common', projects.second.id]]
+ ])
+ end
+ end
+end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 9bb77f6d6d4..c87128db221 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -597,7 +597,7 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
it 'uses the default GitLab::HTTP timeouts' do
timeouts = Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS
- expect(Gitlab::HTTP).to receive(:httparty_perform_request)
+ expect(Gitlab::HTTP_V2::Client).to receive(:httparty_perform_request)
.with(Net::HTTP::Get, '/foo', hash_including(timeouts)).and_call_original
jira_integration.client.get('/foo')
@@ -1372,4 +1372,12 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
end
end
end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(
+ ActionController::Base.helpers.image_path('illustrations/third-party-logos/integrations-logos/jira.svg')
+ )
+ end
+ end
end
diff --git a/spec/models/integrations/pivotaltracker_spec.rb b/spec/models/integrations/pivotaltracker_spec.rb
index bf8458a376c..babe9119ccf 100644
--- a/spec/models/integrations/pivotaltracker_spec.rb
+++ b/spec/models/integrations/pivotaltracker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::Pivotaltracker do
+RSpec.describe Integrations::Pivotaltracker, feature_category: :integrations do
include StubRequests
describe 'Validations' do
@@ -93,4 +93,14 @@ RSpec.describe Integrations::Pivotaltracker do
end
end
end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(
+ ActionController::Base.helpers.image_path(
+ 'illustrations/third-party-logos/integrations-logos/pivotal-tracker.svg'
+ )
+ )
+ end
+ end
end
diff --git a/spec/models/integrations/pushover_spec.rb b/spec/models/integrations/pushover_spec.rb
index 8286fd20669..c576340a78a 100644
--- a/spec/models/integrations/pushover_spec.rb
+++ b/spec/models/integrations/pushover_spec.rb
@@ -62,4 +62,12 @@ RSpec.describe Integrations::Pushover do
expect(WebMock).to have_requested(:post, 'https://8.8.8.8/1/messages.json').once
end
end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(
+ ActionController::Base.helpers.image_path('illustrations/third-party-logos/integrations-logos/pushover.svg')
+ )
+ end
+ end
end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 59ee3746d8f..0d82abd9e3d 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -9,4 +9,6 @@ RSpec.describe Integrations::Slack, feature_category: :integrations do
stub_request(:post, integration.webhook)
end
end
+
+ it_behaves_like 'supports group mentions', :integrations_slack
end
diff --git a/spec/models/integrations/telegram_spec.rb b/spec/models/integrations/telegram_spec.rb
index c3a66c84f09..4c814dedd66 100644
--- a/spec/models/integrations/telegram_spec.rb
+++ b/spec/models/integrations/telegram_spec.rb
@@ -50,4 +50,12 @@ RSpec.describe Integrations::Telegram, feature_category: :integrations do
end
end
end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(
+ ActionController::Base.helpers.image_path('illustrations/third-party-logos/integrations-logos/telegram.svg')
+ )
+ end
+ end
end
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index 9af667c2960..24f0b9f2a5c 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -7,7 +7,9 @@ RSpec.describe IssueLink, feature_category: :portfolio_management do
it_behaves_like 'issuable link' do
let_it_be_with_reload(:issuable_link) { create(:issue_link) }
- let_it_be(:issuable) { create(:issue) }
+ let_it_be(:issuable) { create(:issue, project: project) }
+ let_it_be(:issuable2) { create(:issue, project: project) }
+ let_it_be(:issuable3) { create(:issue, project: project) }
let(:issuable_class) { 'Issue' }
let(:issuable_link_factory) { :issue_link }
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 4e217e3a9f7..e7a5a53c6a0 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -69,8 +69,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
describe 'validations' do
- it { is_expected.not_to allow_value(nil).for(:confidential) }
- it { is_expected.to allow_value(true, false).for(:confidential) }
+ it { is_expected.to validate_inclusion_of(:confidential).in_array([true, false]) }
end
describe 'custom validations' do
@@ -302,7 +301,7 @@ RSpec.describe Issue, feature_category: :team_planning do
let(:issue) { create(:issue) }
let(:project) { issue.project }
let(:user) { issue.author }
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CREATED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CREATED }
let(:namespace) { project.namespace }
subject(:service_action) { issue }
@@ -867,6 +866,29 @@ RSpec.describe Issue, feature_category: :team_planning do
.to contain_exactly(authorized_issue_b, authorized_incident_a)
end
end
+
+ context 'when authorize argument is false' do
+ it 'returns all related issues' do
+ expect(authorized_issue_a.related_issues(authorize: false))
+ .to contain_exactly(authorized_issue_b, authorized_issue_c, authorized_incident_a, unauthorized_issue)
+ end
+ end
+
+ context 'when current_user argument is nil' do
+ let_it_be(:public_issue) { create(:issue, project: create(:project, :public)) }
+
+ it 'returns public linked issues only' do
+ create(:issue_link, source: authorized_issue_a, target: public_issue)
+
+ expect(authorized_issue_a.related_issues).to contain_exactly(public_issue)
+ end
+ end
+
+ context 'when issue is a new record' do
+ let(:new_issue) { build(:issue, project: authorized_project) }
+
+ it { expect(new_issue.related_issues(user)).to be_empty }
+ end
end
describe '#can_move?' do
@@ -2038,4 +2060,134 @@ RSpec.describe Issue, feature_category: :team_planning do
expect(issue.search_data.namespace_id).to eq(issue.namespace_id)
end
end
+
+ describe '#linked_items_count' do
+ let_it_be(:issue1) { create(:issue, project: reusable_project) }
+ let_it_be(:issue2) { create(:issue, project: reusable_project) }
+ let_it_be(:issue3) { create(:issue, project: reusable_project) }
+ let_it_be(:issue4) { build(:issue, project: reusable_project) }
+
+ it 'returns number of issues linked to the issue' do
+ create(:issue_link, source: issue1, target: issue2)
+ create(:issue_link, source: issue1, target: issue3)
+
+ expect(issue1.linked_items_count).to eq(2)
+ expect(issue2.linked_items_count).to eq(1)
+ expect(issue3.linked_items_count).to eq(1)
+ expect(issue4.linked_items_count).to eq(0)
+ end
+ end
+
+ describe '#readable_by?' do
+ let_it_be(:admin_user) { create(:user, :admin) }
+
+ subject { issue_subject.readable_by?(user) }
+
+ context 'when issue belongs directly to a project' do
+ let_it_be_with_reload(:project_issue) { create(:issue, project: reusable_project) }
+ let_it_be(:project_reporter) { create(:user).tap { |u| reusable_project.add_reporter(u) } }
+ let_it_be(:project_guest) { create(:user).tap { |u| reusable_project.add_guest(u) } }
+
+ let(:issue_subject) { project_issue }
+
+ context 'when user is in admin mode', :enable_admin_mode do
+ let(:user) { admin_user }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when user is a reporter' do
+ let(:user) { project_reporter }
+
+ it { is_expected.to be_truthy }
+
+ context 'when issues project feature is not enabled' do
+ before do
+ reusable_project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when issue is hidden (banned author)' do
+ before do
+ issue_subject.author.ban!
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when user is a guest' do
+ let(:user) { project_guest }
+
+ context 'when issue is confidential' do
+ before do
+ issue_subject.update!(confidential: true)
+ end
+
+ it { is_expected.to be_falsey }
+
+ context 'when user is assignee of the issue' do
+ before do
+ issue_subject.update!(assignees: [user])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
+
+ context 'when issue belongs directly to the group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:group_issue) { create(:issue, :group_level, namespace: group) }
+ let_it_be(:group_reporter) { create(:user).tap { |u| group.add_reporter(u) } }
+ let_it_be(:group_guest) { create(:user).tap { |u| group.add_guest(u) } }
+
+ let(:issue_subject) { group_issue }
+
+ context 'when user is in admin mode', :enable_admin_mode do
+ let(:user) { admin_user }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when user is a reporter' do
+ let(:user) { group_reporter }
+
+ it { is_expected.to be_truthy }
+
+ context 'when issue is hidden (banned author)' do
+ before do
+ issue_subject.author.ban!
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when user is a guest' do
+ let(:user) { group_guest }
+
+ it { is_expected.to be_truthy }
+
+ context 'when issue is confidential' do
+ before do
+ issue_subject.update!(confidential: true)
+ end
+
+ it { is_expected.to be_falsey }
+
+ context 'when user is assignee of the issue' do
+ before do
+ issue_subject.update!(assignees: [user])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/lfs_download_object_spec.rb b/spec/models/lfs_download_object_spec.rb
index d82e432b7d6..f69c6efb0a7 100644
--- a/spec/models/lfs_download_object_spec.rb
+++ b/spec/models/lfs_download_object_spec.rb
@@ -24,6 +24,19 @@ RSpec.describe LfsDownloadObject do
end
end
+ describe '#to_hash' do
+ it 'returns specified Hash' do
+ expected_hash = {
+ 'oid' => oid,
+ 'size' => size,
+ 'link' => link,
+ 'headers' => headers
+ }
+
+ expect(subject.to_hash).to eq(expected_hash)
+ end
+ end
+
describe '#has_authorization_header?' do
it 'returns false' do
expect(subject.has_authorization_header?).to be false
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index ed80f5c1516..619f77b6bec 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -16,30 +16,20 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model, feature_category:
let(:records) { described_class.load_batch_for_table(table, 10) }
describe '.load_batch_for_table' do
- where(:union_feature_flag_value) do
- [true, false]
+ it 'loads records and orders them by creation date' do
+ expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
end
- with_them do
- before do
- stub_feature_flags('loose_foreign_keys_batch_load_using_union' => union_feature_flag_value)
- end
-
- it 'loads records and orders them by creation date' do
- expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
- end
+ it 'supports configurable batch size' do
+ records = described_class.load_batch_for_table(table, 2)
- it 'supports configurable batch size' do
- records = described_class.load_batch_for_table(table, 2)
-
- expect(records).to eq([deleted_record_1, deleted_record_2])
- end
+ expect(records).to eq([deleted_record_1, deleted_record_2])
+ end
- it 'returns the partition number in each returned record' do
- records = described_class.load_batch_for_table(table, 4)
+ it 'returns the partition number in each returned record' do
+ records = described_class.load_batch_for_table(table, 4)
- expect(records).to all(have_attributes(partition: (a_value > 0)))
- end
+ expect(records).to all(have_attributes(partition: (a_value > 0)))
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 6dd5f9dec8c..fdd8a610fe4 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Member, feature_category: :groups_and_projects do
describe 'Associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:member_namespace) }
- it { is_expected.to have_one(:member_task) }
end
describe 'Validation' do
@@ -883,17 +882,6 @@ RSpec.describe Member, feature_category: :groups_and_projects do
expect(member.invite_token).not_to be_nil
expect_any_instance_of(described_class).not_to receive(:after_accept_invite)
end
-
- it 'schedules a TasksToBeDone::CreateWorker task' do
- member_task = create(:member_task, member: member, project: member.project)
-
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(member_task.id, member.created_by_id, [user.id])
- .once
-
- member.accept_invite!(user)
- end
end
describe '#decline_invite!' do
diff --git a/spec/models/members/last_group_owner_assigner_spec.rb b/spec/models/members/last_group_owner_assigner_spec.rb
index 2539388c667..5e135665585 100644
--- a/spec/models/members/last_group_owner_assigner_spec.rb
+++ b/spec/models/members/last_group_owner_assigner_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe LastGroupOwnerAssigner do
+RSpec.describe LastGroupOwnerAssigner, feature_category: :groups_and_projects do
describe "#execute" do
let_it_be(:user, reload: true) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/models/members/member_task_spec.rb b/spec/models/members/member_task_spec.rb
deleted file mode 100644
index b06aa05c255..00000000000
--- a/spec/models/members/member_task_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MemberTask do
- describe 'Associations' do
- it { is_expected.to belong_to(:member) }
- it { is_expected.to belong_to(:project) }
- end
-
- describe 'Validations' do
- it { is_expected.to validate_presence_of(:member) }
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_inclusion_of(:tasks).in_array(MemberTask::TASKS.values) }
-
- describe 'unique tasks validation' do
- subject do
- build(:member_task, tasks: [0, 0])
- end
-
- it 'expects the task values to be unique' do
- expect(subject).to be_invalid
- expect(subject.errors[:tasks]).to include('are not unique')
- end
- end
-
- describe 'project validations' do
- let_it_be(:project) { create(:project) }
-
- subject do
- build(:member_task, member: member, project: project, tasks_to_be_done: [:ci, :code])
- end
-
- context 'when the member source is a group' do
- let_it_be(:member) { create(:group_member) }
-
- it "expects the project to be part of the member's group projects" do
- expect(subject).to be_invalid
- expect(subject.errors[:project]).to include('is not in the member group')
- end
-
- context "when the project is part of the member's group projects" do
- let_it_be(:project) { create(:project, namespace: member.source) }
-
- it { is_expected.to be_valid }
- end
- end
-
- context 'when the member source is a project' do
- let_it_be(:member) { create(:project_member) }
-
- it "expects the project to be the member's project" do
- expect(subject).to be_invalid
- expect(subject.errors[:project]).to include('is not the member project')
- end
-
- context "when the project is the member's project" do
- let_it_be(:project) { member.source }
-
- it { is_expected.to be_valid }
- end
- end
- end
- end
-
- describe '.for_members' do
- it 'returns the member_tasks for multiple members' do
- member1 = create(:group_member)
- member_task1 = create(:member_task, member: member1)
- create(:member_task)
- expect(described_class.for_members([member1])).to match_array([member_task1])
- end
- end
-
- describe '#tasks_to_be_done' do
- subject { member_task.tasks_to_be_done }
-
- let_it_be(:member_task) { build(:member_task) }
-
- before do
- member_task[:tasks] = [0, 1]
- end
-
- it 'returns an array of symbols for the corresponding integers' do
- expect(subject).to match_array([:ci, :code])
- end
- end
-
- describe '#tasks_to_be_done=' do
- let_it_be(:member_task) { build(:member_task) }
-
- context 'when passing valid values' do
- subject { member_task[:tasks] }
-
- before do
- member_task.tasks_to_be_done = tasks
- end
-
- context 'when passing tasks as strings' do
- let_it_be(:tasks) { %w(ci code) }
-
- it 'sets an array of integers for the corresponding tasks' do
- expect(subject).to match_array([0, 1])
- end
- end
-
- context 'when passing a single task' do
- let_it_be(:tasks) { :ci }
-
- it 'sets an array of integers for the corresponding tasks' do
- expect(subject).to match_array([1])
- end
- end
-
- context 'when passing a task twice' do
- let_it_be(:tasks) { %w(ci ci) }
-
- it 'is set only once' do
- expect(subject).to match_array([1])
- end
- end
- end
- end
-end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index bf9af73fe1b..806ce3f21b5 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
it { expect(subject.head_commit_sha).to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0') }
it { expect(subject.base_commit_sha).to eq('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
it { expect(subject.start_commit_sha).to eq('0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
+ it { expect(subject.patch_id_sha).to eq('1e05e04d4c2a6414d9d4ab38208511a3bbe715f2') }
context 'when diff_type is merge_head' do
let_it_be(:merge_request) { create(:merge_request) }
@@ -703,6 +704,39 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
end
+ describe "#set_patch_id_sha" do
+ let(:mr_diff) { create(:merge_request).merge_request_diff }
+
+ it "sets the patch_id_sha attribute" do
+ expect(mr_diff.set_patch_id_sha).not_to be_nil
+ end
+
+ context "when base_commit_sha is nil" do
+ it "records patch_id_sha as nil" do
+ expect(mr_diff).to receive(:base_commit_sha).and_return(nil)
+
+ expect(mr_diff.set_patch_id_sha).to be_nil
+ end
+ end
+
+ context "when head_commit_sha is nil" do
+ it "records patch_id_sha as nil" do
+ expect(mr_diff).to receive(:head_commit_sha).and_return(nil)
+
+ expect(mr_diff.set_patch_id_sha).to be_nil
+ end
+ end
+
+ context "when head_commit_sha and base_commit_sha match" do
+ it "records patch_id_sha as nil" do
+ expect(mr_diff).to receive(:base_commit_sha).at_least(:once).and_return("abc123")
+ expect(mr_diff).to receive(:head_commit_sha).at_least(:once).and_return("abc123")
+
+ expect(mr_diff.set_patch_id_sha).to be_nil
+ end
+ end
+ end
+
describe '#save_diffs' do
it 'saves collected state' do
mr_diff = create(:merge_request).merge_request_diff
diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb
index 5a29966e4b9..fb1e43a426d 100644
--- a/spec/models/merge_request_reviewer_spec.rb
+++ b/spec/models/merge_request_reviewer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequestReviewer do
+RSpec.describe MergeRequestReviewer, feature_category: :code_review_workflow do
let(:reviewer) { create(:user) }
let(:merge_request) { create(:merge_request) }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index b36737fc19d..40f85c92851 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -570,6 +570,16 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe '.by_merged_commit_sha' do
+ it 'returns merge requests that match the given merged commit' do
+ mr = create(:merge_request, :merged, merged_commit_sha: '123abc')
+
+ create(:merge_request, :merged, merged_commit_sha: '123def')
+
+ expect(described_class.by_merged_commit_sha('123abc')).to eq([mr])
+ end
+ end
+
describe '.by_merge_commit_sha' do
it 'returns merge requests that match the given merge commit' do
mr = create(:merge_request, :merged, merge_commit_sha: '123abc')
@@ -591,16 +601,18 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
- describe '.by_merge_or_squash_commit_sha' do
- subject { described_class.by_merge_or_squash_commit_sha([sha1, sha2]) }
+ describe '.by_merged_or_merge_or_squash_commit_sha' do
+ subject { described_class.by_merged_or_merge_or_squash_commit_sha([sha1, sha2, sha3]) }
let(:sha1) { '123abc' }
let(:sha2) { '456abc' }
+ let(:sha3) { '111111' }
let(:mr1) { create(:merge_request, :merged, squash_commit_sha: sha1) }
let(:mr2) { create(:merge_request, :merged, merge_commit_sha: sha2) }
+ let(:mr3) { create(:merge_request, :merged, merged_commit_sha: sha3) }
- it 'returns merge requests that match the given squash and merge commits' do
- is_expected.to include(mr1, mr2)
+ it 'returns merge requests that match the given squash, merge and merged commits' do
+ is_expected.to include(mr1, mr2, mr3)
end
end
@@ -644,6 +656,13 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { is_expected.to eq([merge_request]) }
end
+ context 'when commit is a rebased fast-forward commit' do
+ let!(:merge_request) { create(:merge_request, :merged, merged_commit_sha: sha) }
+ let(:sha) { '123abc' }
+
+ it { is_expected.to eq([merge_request]) }
+ end
+
context 'when commit is not found' do
let(:sha) { '0000' }
@@ -2416,6 +2435,19 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
expect(merge_request.has_terraform_reports?).to be_falsey
end
end
+
+ context 'when head pipeline is not finished and has terraform reports' do
+ before do
+ stub_feature_flags(mr_show_reports_immediately: false)
+ end
+
+ it 'returns true' do
+ merge_request = create(:merge_request, :with_terraform_reports)
+ merge_request.actual_head_pipeline.update!(status: :running)
+
+ expect(merge_request.has_terraform_reports?).to be_truthy
+ end
+ end
end
describe '#has_sast_reports?' do
@@ -3474,6 +3506,10 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
+
+ it 'returns true when skipping draft check' do
+ expect(subject.mergeable_state?(skip_draft_check: true)).to be(true)
+ end
end
context 'when broken' do
@@ -4554,7 +4590,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', source_project: project, merge_jid: 123) }
- it 'updates merge request head pipeline and sets merge_jid to nil', :sidekiq_might_not_need_inline do
+ it 'updates merge request head pipeline and sets merge_jid to nil', :sidekiq_inline do
pipeline = create(:ci_empty_pipeline, project: subject.project, ref: subject.source_branch, sha: subject.source_branch_sha)
subject.unlock_mr
@@ -5956,4 +5992,77 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { is_expected.to eq(expected) }
end
end
+
+ describe '#current_patch_id_sha' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+ let(:merge_request_diff) { build_stubbed(:merge_request_diff) }
+ let(:patch_id) { 'ghi789' }
+
+ subject(:current_patch_id_sha) { merge_request.current_patch_id_sha }
+
+ before do
+ allow(merge_request).to receive(:merge_request_diff).and_return(merge_request_diff)
+ allow(merge_request_diff).to receive(:patch_id_sha).and_return(patch_id)
+ end
+
+ it { is_expected.to eq(patch_id) }
+
+ context 'when related merge_request_diff does not have a patch_id_sha' do
+ let(:diff_refs) { instance_double(Gitlab::Diff::DiffRefs, base_sha: base_sha, head_sha: head_sha) }
+ let(:base_sha) { 'abc123' }
+ let(:head_sha) { 'def456' }
+
+ before do
+ allow(merge_request_diff).to receive(:patch_id_sha).and_return(nil)
+ allow(merge_request).to receive(:diff_refs).and_return(diff_refs)
+
+ allow_next_instance_of(Repository) do |repo|
+ allow(repo)
+ .to receive(:get_patch_id)
+ .with(diff_refs.base_sha, diff_refs.head_sha)
+ .and_return(patch_id)
+ end
+ end
+
+ it { is_expected.to eq(patch_id) }
+
+ context 'when base_sha is nil' do
+ let(:base_sha) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when head_sha is nil' do
+ let(:head_sha) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when base_sha and head_sha match' do
+ let(:head_sha) { base_sha }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe '#all_mergeability_checks_results' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+ let(:result) { instance_double(ServiceResponse, payload: { results: ['result'] }) }
+
+ it 'executes MergeRequests::Mergeability::RunChecksService with all mergeability checks' do
+ expect_next_instance_of(
+ MergeRequests::Mergeability::RunChecksService,
+ merge_request: merge_request,
+ params: {}
+ ) do |svc|
+ expect(svc)
+ .to receive(:execute)
+ .with(described_class.all_mergeability_checks, execute_all: true)
+ .and_return(result)
+ end
+
+ expect(merge_request.all_mergeability_checks_results).to eq(result.payload[:results])
+ end
+ end
end
diff --git a/spec/models/ml/model_spec.rb b/spec/models/ml/model_spec.rb
index 42d8ed5c0c5..e22989f3ce2 100644
--- a/spec/models/ml/model_spec.rb
+++ b/spec/models/ml/model_spec.rb
@@ -118,4 +118,47 @@ RSpec.describe Ml::Model, feature_category: :mlops do
end
end
end
+
+ describe 'with_version_count' do
+ let(:model) { existing_model }
+
+ subject { described_class.with_version_count.find_by(id: model.id).version_count }
+
+ context 'when model has versions' do
+ before do
+ create(:ml_model_versions, model: model)
+ end
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'when model has no versions' do
+ let(:model) { another_existing_model }
+
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#by_project_and_id' do
+ let(:id) { existing_model.id }
+ let(:project_id) { existing_model.project.id }
+
+ subject { described_class.by_project_id_and_id(project_id, id) }
+
+ context 'if exists' do
+ it { is_expected.to eq(existing_model) }
+ end
+
+ context 'if id has no match' do
+ let(:id) { non_existing_record_id }
+
+ it { is_expected.to be(nil) }
+ end
+
+ context 'if project id does not match' do
+ let(:project_id) { non_existing_record_id }
+
+ it { is_expected.to be(nil) }
+ end
+ end
end
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index f3fda200fda..e6096bc9267 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -11,13 +11,9 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
it { is_expected.to validate_presence_of(:namespace) }
describe '#maven_duplicates_allowed' do
- it { is_expected.to allow_value(true, false).for(:maven_duplicates_allowed) }
- it { is_expected.not_to allow_value(nil).for(:maven_duplicates_allowed) }
- it { is_expected.to allow_value(true, false).for(:generic_duplicates_allowed) }
- it { is_expected.not_to allow_value(nil).for(:generic_duplicates_allowed) }
- it { is_expected.to allow_value(true).for(:nuget_duplicates_allowed) }
- it { is_expected.to allow_value(false).for(:nuget_duplicates_allowed) }
- it { is_expected.not_to allow_value(nil).for(:nuget_duplicates_allowed) }
+ it { is_expected.to validate_inclusion_of(:maven_duplicates_allowed).in_array([true, false]) }
+ it { is_expected.to validate_inclusion_of(:generic_duplicates_allowed).in_array([true, false]) }
+ it { is_expected.to validate_inclusion_of(:nuget_duplicates_allowed).in_array([true, false]) }
end
describe 'regex values' do
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index a937a3e8988..e9822d97447 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe NamespaceSetting, feature_category: :groups_and_projects, type: :
end
it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
- it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_values([:all, :ssh, :http]).with_suffix }
+ it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_suffix }
describe 'default values' do
subject(:setting) { described_class.new }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index a0deee0f2d3..9974aac3c6c 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -206,18 +206,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
expect { parent.update!(name: 'Foo') }.not_to raise_error
end
end
-
- context 'when restrict_special_characters_in_namespace_path feature flag is disabled' do
- before do
- stub_feature_flags(restrict_special_characters_in_namespace_path: false)
- end
-
- it 'allows special character at the start or end of project namespace path' do
- namespace = build(:namespace, type: project_sti_name, parent: parent, path: '_path_')
-
- expect(namespace).to be_valid
- end
- end
end
describe '1 char path length' do
@@ -673,23 +661,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
context 'traversal scopes' do
- context 'recursive' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'namespace traversal scopes'
- end
-
- context 'linear' do
- it_behaves_like 'namespace traversal scopes'
- end
-
- shared_examples 'makes recursive queries' do
- specify do
- expect { subject }.to make_queries_matching(/WITH RECURSIVE/)
- end
- end
+ it_behaves_like 'namespace traversal scopes'
shared_examples 'does not make recursive queries' do
specify do
@@ -703,14 +675,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
subject { described_class.where(id: namespace).self_and_descendants.load }
it_behaves_like 'does not make recursive queries'
-
- context 'when feature flag :use_traversal_ids is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'makes recursive queries'
- end
end
describe '.self_and_descendant_ids' do
@@ -719,14 +683,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
subject { described_class.where(id: namespace).self_and_descendant_ids.load }
it_behaves_like 'does not make recursive queries'
-
- context 'when feature flag :use_traversal_ids is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'makes recursive queries'
- end
end
end
@@ -845,6 +801,14 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
describe '#human_name' do
it { expect(namespace.human_name).to eq(namespace.owner_name) }
+
+ context 'when the owner is missing' do
+ before do
+ namespace.update_column(:owner_id, non_existing_record_id)
+ end
+
+ it { expect(namespace.human_name).to eq(namespace.path) }
+ end
end
describe '#any_project_has_container_registry_tags?' do
@@ -1207,70 +1171,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
- describe '#move_dir', :request_store do
- context 'hashed storage' do
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:project) { create(:project_empty_repo, namespace: namespace) }
-
- context 'when any project has container images' do
- let(:container_repository) { create(:container_repository) }
-
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
-
- create(:project, namespace: namespace, container_repositories: [container_repository])
-
- allow(namespace).to receive(:path_was).and_return(namespace.path)
- allow(namespace).to receive(:path).and_return('new_path')
- allow(namespace).to receive(:first_project_with_container_registry_tags).and_return(project)
- end
-
- it 'raises an error about not movable project' do
- expect { namespace.move_dir }.to raise_error(
- Gitlab::UpdatePathError, /Namespace .* cannot be moved/
- )
- end
- end
-
- it "repository directory remains unchanged if path changed" do
- before_disk_path = project.disk_path
- namespace.update!(path: namespace.full_path + '_new')
-
- expect(before_disk_path).to eq(project.disk_path)
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be_truthy
- end
- end
-
- context 'for each project inside the namespace' do
- let!(:parent) { create(:group, name: 'mygroup', path: 'mygroup') }
- let!(:subgroup) { create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent) }
- let!(:project_in_parent_group) { create(:project, :legacy_storage, :repository, namespace: parent, name: 'foo1') }
- let!(:hashed_project_in_subgroup) { create(:project, :repository, namespace: subgroup, name: 'foo2') }
- let!(:legacy_project_in_subgroup) { create(:project, :legacy_storage, :repository, namespace: subgroup, name: 'foo3') }
-
- it 'updates project full path in .git/config' do
- parent.update!(path: 'mygroup_new')
-
- expect(project_in_parent_group.reload.repository.full_path).to eq "mygroup_new/#{project_in_parent_group.path}"
- expect(hashed_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
- expect(legacy_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
- end
-
- it 'updates the project storage location' do
- repository_project_in_parent_group = project_in_parent_group.project_repository
- repository_hashed_project_in_subgroup = hashed_project_in_subgroup.project_repository
- repository_legacy_project_in_subgroup = legacy_project_in_subgroup.project_repository
-
- parent.update!(path: 'mygroup_moved')
-
- expect(repository_project_in_parent_group.reload.disk_path).to eq "mygroup_moved/#{project_in_parent_group.path}"
- expect(repository_hashed_project_in_subgroup.reload.disk_path).to eq hashed_project_in_subgroup.disk_path
- expect(repository_legacy_project_in_subgroup.reload.disk_path).to eq "mygroup_moved/mysubgroup/#{legacy_project_in_subgroup.path}"
- end
- end
- end
-
describe '.find_by_path_or_name' do
before do
@namespace = create(:namespace, name: 'WoW', path: 'woW')
@@ -1360,30 +1260,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it { is_expected.to eq false }
end
- describe '#use_traversal_ids?' do
- let_it_be(:namespace, reload: true) { create(:namespace) }
-
- subject { namespace.use_traversal_ids? }
-
- context 'when use_traversal_ids feature flag is true' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- it { is_expected.to eq true }
-
- it_behaves_like 'disabled feature flag when traversal_ids is blank'
- end
-
- context 'when use_traversal_ids feature flag is false' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it { is_expected.to eq false }
- end
- end
-
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
@@ -1487,28 +1363,14 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
describe '#all_projects' do
- context 'with use_traversal_ids feature flag enabled' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- include_examples '#all_projects'
-
- # Using #self_and_descendant instead of #self_and_descendant_ids can produce
- # very slow queries.
- it 'calls self_and_descendant_ids' do
- namespace = create(:group)
- expect(namespace).to receive(:self_and_descendant_ids)
- namespace.all_projects
- end
- end
-
- context 'with use_traversal_ids feature flag disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples '#all_projects'
+ include_examples '#all_projects'
+
+ # Using #self_and_descendant instead of #self_and_descendant_ids can produce
+ # very slow queries.
+ it 'calls self_and_descendant_ids' do
+ namespace = create(:group)
+ expect(namespace).to receive(:self_and_descendant_ids)
+ namespace.all_projects
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 2b26c73aa7a..5aa3ac3a2ea 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -81,6 +81,14 @@ RSpec.describe Note, feature_category: :team_planning do
end
end
+ context 'when noteable is an abuse report' do
+ subject { build(:note, noteable: build_stubbed(:abuse_report), project: nil, namespace: nil) }
+
+ it 'is valid without project or namespace' do
+ is_expected.to be_valid
+ end
+ end
+
describe 'max notes limit' do
let_it_be(:noteable) { create(:issue) }
let_it_be(:existing_note) { create(:note, project: noteable.project, noteable: noteable) }
@@ -314,34 +322,59 @@ RSpec.describe Note, feature_category: :team_planning do
end
describe '#ensure_namespace_id' do
+ context 'for issues' do
+ let!(:issue) { create(:issue) }
+
+ it 'copies the namespace_id of the issue' do
+ note = build(:note, noteable: issue)
+
+ note.valid?
+
+ expect(note.namespace_id).to eq(issue.namespace_id)
+ end
+ end
+
+ context 'for group-level work items' do
+ let!(:group) { create(:group) }
+ let!(:work_item) { create(:work_item, namespace: group) }
+
+ it 'copies the namespace_id of the work item' do
+ note = build(:note, noteable: work_item)
+
+ note.valid?
+
+ expect(note.namespace_id).to eq(group.id)
+ end
+ end
+
context 'for a project noteable' do
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:merge_request) { create(:merge_request) }
it 'copies the project_namespace_id of the project' do
- note = build(:note, noteable: issue, project: issue.project)
+ note = build(:note, noteable: merge_request, project: merge_request.project)
note.valid?
- expect(note.namespace_id).to eq(issue.project.project_namespace_id)
+ expect(note.namespace_id).to eq(merge_request.project.project_namespace_id)
end
context 'when noteable is changed' do
- let_it_be(:another_issue) { create(:issue) }
+ let_it_be(:another_mr) { create(:merge_request) }
it 'updates the namespace_id' do
- note = create(:note, noteable: issue, project: issue.project)
+ note = create(:note, noteable: merge_request, project: merge_request.project)
- note.noteable = another_issue
- note.project = another_issue.project
+ note.noteable = another_mr
+ note.project = another_mr.project
note.valid?
- expect(note.namespace_id).to eq(another_issue.project.project_namespace_id)
+ expect(note.namespace_id).to eq(another_mr.project.project_namespace_id)
end
end
context 'when project is missing' do
it 'does not raise an exception' do
- note = build(:note, noteable: issue, project: nil)
+ note = build(:note, noteable: merge_request, project: nil)
expect { note.valid? }.not_to raise_error
end
@@ -1325,6 +1358,20 @@ RSpec.describe Note, feature_category: :team_planning do
end
end
+ describe '#for_abuse_report' do
+ it 'is true when the noteable is an abuse report' do
+ note = build(:note, noteable: build(:abuse_report))
+
+ expect(note).to be_for_abuse_report
+ end
+
+ it 'is not true when the noteable is not an abuse report' do
+ note = build(:note, noteable: build(:design))
+
+ expect(note).not_to be_for_abuse_report
+ end
+ end
+
describe '#to_ability_name' do
it 'returns note' do
expect(build(:note).to_ability_name).to eq('note')
diff --git a/spec/models/packages/build_info_spec.rb b/spec/models/packages/build_info_spec.rb
index db8ac605d72..9bb8062005a 100644
--- a/spec/models/packages/build_info_spec.rb
+++ b/spec/models/packages/build_info_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::BuildInfo, type: :model do
+RSpec.describe Packages::BuildInfo, type: :model, feature_category: :package_registry do
describe 'relationships' do
it { is_expected.to belong_to(:package) }
it { is_expected.to belong_to(:pipeline) }
diff --git a/spec/models/packages/protection/rule_spec.rb b/spec/models/packages/protection/rule_spec.rb
index b368687e6d8..320c265239c 100644
--- a/spec/models/packages/protection/rule_spec.rb
+++ b/spec/models/packages/protection/rule_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :package_registry do
+ using RSpec::Parameterized::TableSyntax
+
it_behaves_like 'having unique enum values'
describe 'relationships' do
@@ -10,9 +12,19 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
end
describe 'enums' do
- describe '#package_type' do
- it { is_expected.to define_enum_for(:package_type).with_values(npm: Packages::Package.package_types[:npm]) }
- end
+ it { is_expected.to define_enum_for(:package_type).with_values(npm: Packages::Package.package_types[:npm]) }
+
+ it {
+ is_expected.to(
+ define_enum_for(:push_protected_up_to_access_level)
+ .with_values(
+ developer: Gitlab::Access::DEVELOPER,
+ maintainer: Gitlab::Access::MAINTAINER,
+ owner: Gitlab::Access::OWNER
+ )
+ .with_prefix(:push_protected_up_to)
+ )
+ }
end
describe 'validations' do
@@ -30,11 +42,219 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
describe '#push_protected_up_to_access_level' do
it { is_expected.to validate_presence_of(:push_protected_up_to_access_level) }
+ end
+ end
+
+ describe 'before_save' do
+ describe '#set_package_name_pattern_ilike_query' do
+ subject { create(:package_protection_rule, package_name_pattern: package_name_pattern) }
+
+ context 'with different package name patterns' do
+ where(:package_name_pattern, :expected_pattern_query) do
+ '@my-scope/my-package' | '@my-scope/my-package'
+ '*@my-scope/my-package-with-wildcard-start' | '%@my-scope/my-package-with-wildcard-start'
+ '@my-scope/my-package-with-wildcard-end*' | '@my-scope/my-package-with-wildcard-end%'
+ '@my-scope/*my-package-with-wildcard-inbetween' | '@my-scope/%my-package-with-wildcard-inbetween'
+ '**@my-scope/**my-package-with-wildcard-multiple**' | '%%@my-scope/%%my-package-with-wildcard-multiple%%'
+ '@my-scope/my-package-with_____underscore' | '@my-scope/my-package-with\_\_\_\_\_underscore'
+ '@my-scope/my-package-with-percent-sign-%' | '@my-scope/my-package-with-percent-sign-\%'
+ '@my-scope/my-package-with-regex-characters.+' | '@my-scope/my-package-with-regex-characters.+'
+ end
+
+ with_them do
+ it { is_expected.to have_attributes(package_name_pattern_ilike_query: expected_pattern_query) }
+ end
+ end
+ end
+ end
+
+ describe '.for_package_name' do
+ let_it_be(:package_protection_rule) do
+ create(:package_protection_rule, package_name_pattern: '@my-scope/my_package')
+ end
+
+ let_it_be(:ppr_with_wildcard_start) do
+ create(:package_protection_rule, package_name_pattern: '*@my-scope/my_package-with-wildcard-start')
+ end
+
+ let_it_be(:ppr_with_wildcard_end) do
+ create(:package_protection_rule, package_name_pattern: '@my-scope/my_package-with-wildcard-end*')
+ end
+
+ let_it_be(:ppr_with_wildcard_inbetween) do
+ create(:package_protection_rule, package_name_pattern: '@my-scope/*my_package-with-wildcard-inbetween')
+ end
+
+ let_it_be(:ppr_with_wildcard_multiples) do
+ create(:package_protection_rule, package_name_pattern: '**@my-scope/**my_package-with-wildcard-multiple**')
+ end
+
+ let_it_be(:ppr_with_underscore) do
+ create(:package_protection_rule, package_name_pattern: '@my-scope/my_package-with_____underscore')
+ end
+
+ let_it_be(:ppr_with_regex_characters) do
+ create(:package_protection_rule, package_name_pattern: '@my-scope/my_package-with-regex-characters.+')
+ end
+
+ let(:package_name) { package_protection_rule.package_name_pattern }
+
+ subject { described_class.for_package_name(package_name) }
+
+ context 'with several package protection rule scenarios' do
+ where(:package_name, :expected_package_protection_rules) do
+ '@my-scope/my_package' | [ref(:package_protection_rule)]
+ '@my-scope/my2package' | []
+ '@my-scope/my_package-2' | []
+
+ # With wildcard pattern at the start
+ '@my-scope/my_package-with-wildcard-start' | [ref(:ppr_with_wildcard_start)]
+ '@my-scope/my_package-with-wildcard-start-any' | []
+ 'prefix-@my-scope/my_package-with-wildcard-start' | [ref(:ppr_with_wildcard_start)]
+ 'prefix-@my-scope/my_package-with-wildcard-start-any' | []
+
+ # With wildcard pattern at the end
+ '@my-scope/my_package-with-wildcard-end' | [ref(:ppr_with_wildcard_end)]
+ '@my-scope/my_package-with-wildcard-end:1234567890' | [ref(:ppr_with_wildcard_end)]
+ 'prefix-@my-scope/my_package-with-wildcard-end' | []
+ 'prefix-@my-scope/my_package-with-wildcard-end:1234567890' | []
+
+ # With wildcard pattern inbetween
+ '@my-scope/my_package-with-wildcard-inbetween' | [ref(:ppr_with_wildcard_inbetween)]
+ '@my-scope/any-my_package-with-wildcard-inbetween' | [ref(:ppr_with_wildcard_inbetween)]
+ '@my-scope/any-my_package-my_package-wildcard-inbetween-any' | []
+
+ # With multiple wildcard pattern are used
+ '@my-scope/my_package-with-wildcard-multiple' | [ref(:ppr_with_wildcard_multiples)]
+ 'prefix-@my-scope/any-my_package-with-wildcard-multiple-any' | [ref(:ppr_with_wildcard_multiples)]
+ '****@my-scope/****my_package-with-wildcard-multiple****' | [ref(:ppr_with_wildcard_multiples)]
+ 'prefix-@other-scope/any-my_package-with-wildcard-multiple-any' | []
+
+ # With underscore
+ '@my-scope/my_package-with_____underscore' | [ref(:ppr_with_underscore)]
+ '@my-scope/my_package-with_any_underscore' | []
+
+ '@my-scope/my_package-with-regex-characters.+' | [ref(:ppr_with_regex_characters)]
+ '@my-scope/my_package-with-regex-characters.' | []
+ '@my-scope/my_package-with-regex-characters' | []
+ '@my-scope/my_package-with-regex-characters-any' | []
+
+ # Special cases
+ nil | []
+ '' | []
+ 'any_package' | []
+ end
+
+ with_them do
+ it { is_expected.to match_array(expected_package_protection_rules) }
+ end
+ end
+
+ context 'with multiple matching package protection rules' do
+ let!(:package_protection_rule_second_match) do
+ create(:package_protection_rule, package_name_pattern: "#{package_name}*")
+ end
+
+ it { is_expected.to contain_exactly(package_protection_rule_second_match, package_protection_rule) }
+ end
+ end
+
+ describe '.push_protected_from?' do
+ let_it_be(:project_with_ppr) { create(:project) }
+ let_it_be(:project_without_ppr) { create(:project) }
+
+ let_it_be(:ppr_for_developer) do
+ create(:package_protection_rule,
+ package_name_pattern: '@my-scope/my-package-stage*',
+ project: project_with_ppr,
+ package_type: :npm,
+ push_protected_up_to_access_level: :developer
+ )
+ end
+
+ let_it_be(:ppr_for_maintainer) do
+ create(:package_protection_rule,
+ package_name_pattern: '@my-scope/my-package-prod*',
+ project: project_with_ppr,
+ package_type: :npm,
+ push_protected_up_to_access_level: :maintainer
+ )
+ end
+
+ let_it_be(:ppr_owner) do
+ create(:package_protection_rule,
+ package_name_pattern: '@my-scope/my-package-release*',
+ project: project_with_ppr,
+ package_type: :npm,
+ push_protected_up_to_access_level: :owner
+ )
+ end
+
+ let_it_be(:ppr_2_for_developer) do
+ create(:package_protection_rule,
+ package_name_pattern: '@my-scope/my-package-*',
+ project: project_with_ppr,
+ package_type: :npm,
+ push_protected_up_to_access_level: :developer
+ )
+ end
+
+ subject do
+ project
+ .package_protection_rules
+ .push_protected_from?(
+ access_level: access_level,
+ package_name: package_name,
+ package_type: package_type
+ )
+ end
+
+ describe "with different users and protection levels" do
+ # rubocop:disable Layout/LineLength
+ where(:project, :access_level, :package_name, :package_type, :push_protected) do
+ ref(:project_with_ppr) | Gitlab::Access::REPORTER | '@my-scope/my-package-stage-sha-1234' | :npm | true
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-stage-sha-1234' | :npm | true
+ ref(:project_with_ppr) | :maintainer | '@my-scope/my-package-stage-sha-1234' | :npm | false
+ ref(:project_with_ppr) | :maintainer | '@my-scope/my-package-stage-sha-1234' | :npm | false
+ ref(:project_with_ppr) | :owner | '@my-scope/my-package-stage-sha-1234' | :npm | false
+ ref(:project_with_ppr) | Gitlab::Access::ADMIN | '@my-scope/my-package-stage-sha-1234' | :npm | false
+
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-prod-sha-1234' | :npm | true
+ ref(:project_with_ppr) | :maintainer | '@my-scope/my-package-prod-sha-1234' | :npm | true
+ ref(:project_with_ppr) | :owner | '@my-scope/my-package-prod-sha-1234' | :npm | false
+ ref(:project_with_ppr) | Gitlab::Access::ADMIN | '@my-scope/my-package-prod-sha-1234' | :npm | false
+
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-release-v1' | :npm | true
+ ref(:project_with_ppr) | :owner | '@my-scope/my-package-release-v1' | :npm | true
+ ref(:project_with_ppr) | Gitlab::Access::ADMIN | '@my-scope/my-package-release-v1' | :npm | false
+
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-any-suffix' | :npm | true
+ ref(:project_with_ppr) | :maintainer | '@my-scope/my-package-any-suffix' | :npm | false
+ ref(:project_with_ppr) | :owner | '@my-scope/my-package-any-suffix' | :npm | false
+
+ # For non-matching package_name
+ ref(:project_with_ppr) | :developer | '@my-scope/non-matching-package' | :npm | false
+
+ # For non-matching package_type
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-any-suffix' | :conan | false
+
+ # For no access level
+ ref(:project_with_ppr) | Gitlab::Access::NO_ACCESS | '@my-scope/my-package-prod' | :npm | true
+
+ # Edge cases
+ ref(:project_with_ppr) | 0 | '' | nil | true
+ ref(:project_with_ppr) | nil | nil | nil | true
+
+ # For projects that have no package protection rules
+ ref(:project_without_ppr) | :developer | '@my-scope/my-package-prod' | :npm | false
+ ref(:project_without_ppr) | :maintainer | '@my-scope/my-package-prod' | :npm | false
+ ref(:project_without_ppr) | :owner | '@my-scope/my-package-prod' | :npm | false
+ end
+ # rubocop:enable Layout/LineLength
- it {
- is_expected.to validate_inclusion_of(:push_protected_up_to_access_level).in_array([Gitlab::Access::DEVELOPER,
- Gitlab::Access::MAINTAINER, Gitlab::Access::OWNER])
- }
+ with_them do
+ it { is_expected.to eq push_protected }
+ end
end
end
end
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index 62152f9d3a4..08ba823f8fa 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -4,8 +4,10 @@ require 'spec_helper'
RSpec.describe Pages::LookupPath, feature_category: :pages do
let(:project) { create(:project, :pages_private, pages_https_only: true) }
+ let(:trim_prefix) { nil }
+ let(:domain) { nil }
- subject(:lookup_path) { described_class.new(project) }
+ subject(:lookup_path) { described_class.new(project, trim_prefix: trim_prefix, domain: domain) }
before do
stub_pages_setting(
@@ -30,11 +32,7 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
end
describe '#https_only' do
- subject(:lookup_path) { described_class.new(project, domain: domain) }
-
context 'when no domain provided' do
- let(:domain) { nil }
-
it 'delegates to Project#pages_https_only?' do
expect(lookup_path.https_only).to eq(true)
end
@@ -101,41 +99,26 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
end
end
end
-
- context 'when deployment were created during migration' do
- before do
- allow(deployment).to receive(:migrated?).and_return(true)
- end
-
- it 'uses deployment from object storage' do
- freeze_time do
- expect(source).to eq(
- type: 'zip',
- path: deployment.file.url(expire_at: 1.day.from_now),
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- )
- end
- end
- end
end
end
describe '#prefix' do
- it 'returns "/" for pages group root projects' do
- project = instance_double(Project, full_path: "namespace/namespace.example.com")
- lookup_path = described_class.new(project, trim_prefix: 'mygroup')
+ let(:trim_prefix) { 'mygroup' }
+
+ context 'when pages group root projects' do
+ let(:project) { instance_double(Project, full_path: "namespace/namespace.example.com") }
- expect(lookup_path.prefix).to eq('/')
+ it 'returns "/"' do
+ expect(lookup_path.prefix).to eq('/')
+ end
end
- it 'returns the project full path with the provided prefix removed' do
- project = instance_double(Project, full_path: 'mygroup/myproject')
- lookup_path = described_class.new(project, trim_prefix: 'mygroup')
+ context 'when pages in the given prefix' do
+ let(:project) { instance_double(Project, full_path: 'mygroup/myproject') }
- expect(lookup_path.prefix).to eq('/myproject/')
+ it 'returns the project full path with the provided prefix removed' do
+ expect(lookup_path.prefix).to eq('/myproject/')
+ end
end
end
@@ -157,12 +140,18 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
expect(lookup_path.unique_host).to eq('unique-domain.example.com')
end
+
+ context 'when there is domain provided' do
+ let(:domain) { instance_double(PagesDomain) }
+
+ it 'returns nil' do
+ expect(lookup_path.unique_host).to eq(nil)
+ end
+ end
end
end
describe '#root_directory' do
- subject(:lookup_path) { described_class.new(project) }
-
context 'when there is no deployment' do
it 'returns nil' do
expect(lookup_path.root_directory).to be_nil
diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb
index 916197fe5e9..e74c7ee8612 100644
--- a/spec/models/pages_deployment_spec.rb
+++ b/spec/models/pages_deployment_spec.rb
@@ -28,16 +28,6 @@ RSpec.describe PagesDeployment, feature_category: :pages do
end
end
- describe '.migrated_from_legacy_storage' do
- it 'only returns migrated deployments' do
- migrated_deployment = create_migrated_deployment(project)
- # create one other deployment
- create(:pages_deployment, project: project)
-
- expect(described_class.migrated_from_legacy_storage).to eq([migrated_deployment])
- end
- end
-
context 'with deployments stored locally and remotely' do
before do
stub_pages_object_storage(::Pages::DeploymentUploader)
@@ -132,34 +122,6 @@ RSpec.describe PagesDeployment, feature_category: :pages do
end
end
- describe '#migrated?' do
- it 'returns false for normal deployment' do
- deployment = create(:pages_deployment)
-
- expect(deployment.migrated?).to eq(false)
- end
-
- it 'returns true for migrated deployment' do
- deployment = create_migrated_deployment(project)
-
- expect(deployment.migrated?).to eq(true)
- end
- end
-
- def create_migrated_deployment(project)
- public_path = File.join(project.pages_path, "public")
- FileUtils.mkdir_p(public_path)
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
-
- expect(::Pages::MigrateLegacyStorageToDeploymentService.new(project).execute[:status]).to eq(:success)
-
- project.reload.pages_metadatum.pages_deployment
- ensure
- FileUtils.rm_rf(public_path)
- end
-
describe 'default for file_store' do
let(:deployment) do
filepath = Rails.root.join("spec/fixtures/pages.zip")
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index cd740bca502..5a4eca11f71 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PagesDomain do
+RSpec.describe PagesDomain, feature_category: :pages do
using RSpec::Parameterized::TableSyntax
subject(:pages_domain) { described_class.new }
diff --git a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
index 2462e305597..b690bd3162c 100644
--- a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
+++ b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Preloaders::ProjectRootAncestorPreloader do
+RSpec.describe Preloaders::ProjectRootAncestorPreloader, feature_category: :system_access do
let_it_be(:root_parent1) { create(:group, :private, name: 'root-1', path: 'root-1') }
let_it_be(:root_parent2) { create(:group, name: 'root-2', path: 'root-2') }
let_it_be(:guest_project) { create(:project, name: 'public guest', path: 'public-guest') }
@@ -43,87 +43,47 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
end
end
- context 'when use_traversal_ids FF is enabled' do
- context 'when the preloader is used' do
- context 'when no additional preloads are provided' do
- before do
- preload_ancestors(:group)
- end
-
- it_behaves_like 'executes N matching DB queries', 0
- end
-
- context 'when additional preloads are provided' do
- let(:additional_preloads) { [:route] }
- let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
-
- before do
- preload_ancestors
- end
-
- it_behaves_like 'executes N matching DB queries', 0, :full_path
- end
-
- context 'when projects are an array and not an ActiveRecord::Relation' do
- before do
- described_class.new(projects, :namespace, additional_preloads).execute
- end
-
- it_behaves_like 'executes N matching DB queries', 4
- end
- end
-
- context 'when the preloader is not used' do
- it_behaves_like 'executes N matching DB queries', 4
- end
-
- context 'when using a :group sti name and passing projects in a user namespace' do
- let(:projects) { [private_developer_project] }
- let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
-
- it 'does not load a nil value for root_ancestor' do
+ context 'when the preloader is used' do
+ context 'when no additional preloads are provided' do
+ before do
preload_ancestors(:group)
-
- expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
end
- end
- end
- context 'when use_traversal_ids FF is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
+ it_behaves_like 'executes N matching DB queries', 0
end
- context 'when the preloader is used' do
+ context 'when additional preloads are provided' do
+ let(:additional_preloads) { [:route] }
+ let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
+
before do
preload_ancestors
end
- context 'when no additional preloads are provided' do
- it_behaves_like 'executes N matching DB queries', 4
- end
-
- context 'when additional preloads are provided' do
- let(:additional_preloads) { [:route] }
- let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
+ it_behaves_like 'executes N matching DB queries', 0, :full_path
+ end
- it_behaves_like 'executes N matching DB queries', 4, :full_path
+ context 'when projects are an array and not an ActiveRecord::Relation' do
+ before do
+ described_class.new(projects, :namespace, additional_preloads).execute
end
- end
- context 'when the preloader is not used' do
it_behaves_like 'executes N matching DB queries', 4
end
+ end
- context 'when using a :group sti name and passing projects in a user namespace' do
- let(:projects) { [private_developer_project] }
- let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
+ context 'when the preloader is not used' do
+ it_behaves_like 'executes N matching DB queries', 4
+ end
- it 'does not load a nil value for root_ancestor' do
- preload_ancestors(:group)
+ context 'when using a :group sti name and passing projects in a user namespace' do
+ let(:projects) { [private_developer_project] }
+ let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
- expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
- end
+ it 'does not load a nil value for root_ancestor' do
+ preload_ancestors(:group)
+
+ expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
end
end
diff --git a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
index 5befa3ab66f..3dc409cbcc2 100644
--- a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
@@ -34,46 +34,31 @@ RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader, feature_category
let(:groups) { [group1, group2, group3, child_maintainer, child_indirect_access] }
- context 'when traversal_ids feature flag is disabled' do
- it_behaves_like 'executes N max member permission queries to the DB' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- described_class.new(groups, user).execute
- end
-
- # One query for group with no access and another one per group where the user is not a direct member
- let(:expected_query_count) { 2 }
+ it_behaves_like 'executes N max member permission queries to the DB' do
+ before do
+ described_class.new(groups, user).execute
end
- end
-
- context 'when traversal_ids feature flag is enabled' do
- it_behaves_like 'executes N max member permission queries to the DB' do
- before do
- stub_feature_flags(use_traversal_ids: true)
- described_class.new(groups, user).execute
- end
- let(:expected_query_count) { 0 }
- end
+ let(:expected_query_count) { 0 }
+ end
- context 'for groups arising from group shares' do
- let_it_be(:group4) { create(:group, :private) }
- let_it_be(:group4_subgroup) { create(:group, :private, parent: group4) }
+ context 'for groups arising from group shares' do
+ let_it_be(:group4) { create(:group, :private) }
+ let_it_be(:group4_subgroup) { create(:group, :private, parent: group4) }
- let(:groups) { [group4, group4_subgroup] }
+ let(:groups) { [group4, group4_subgroup] }
- before do
- create(:group_group_link, :guest, shared_with_group: group1, shared_group: group4)
- end
+ before do
+ create(:group_group_link, :guest, shared_with_group: group1, shared_group: group4)
+ end
- it 'sets the right access level in cache for groups arising from group shares' do
- described_class.new(groups, user).execute
+ it 'sets the right access level in cache for groups arising from group shares' do
+ described_class.new(groups, user).execute
- groups.each do |group|
- cached_access_level = group.max_member_access_for_user(user)
+ groups.each do |group|
+ cached_access_level = group.max_member_access_for_user(user)
- expect(cached_access_level).to eq(Gitlab::Access::GUEST)
- end
+ expect(cached_access_level).to eq(Gitlab::Access::GUEST)
end
end
end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 9fed05342aa..a5f29fcbe8b 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -83,8 +83,10 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
end
describe 'scopes' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
describe '.non_guests' do
- let_it_be(:project) { create(:project) }
let_it_be(:project_original_owner_authorization) { project.owner.project_authorizations.first }
let_it_be(:project_authorization_guest) { create(:project_authorization, :guest, project: project) }
let_it_be(:project_authorization_reporter) { create(:project_authorization, :reporter, project: project) }
@@ -100,6 +102,28 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
].map(&:attributes))
end
end
+
+ describe '.for_project' do
+ let_it_be(:project_2) { create(:project, namespace: user.namespace) }
+ let_it_be(:project_3) { create(:project, namespace: user.namespace) }
+
+ let_it_be(:project_authorization_3) { project_3.project_authorizations.first }
+ let_it_be(:project_authorization_2) { project_2.project_authorizations.first }
+ let_it_be(:project_authorization) { project.project_authorizations.first }
+
+ it 'returns all records for the project' do
+ expect(described_class.for_project(project).map(&:attributes)).to match_array([
+ project_authorization
+ ].map(&:attributes))
+ end
+
+ it 'returns all records for multiple projects' do
+ expect(described_class.for_project([project, project_3]).map(&:attributes)).to match_array([
+ project_authorization,
+ project_authorization_3
+ ].map(&:attributes))
+ end
+ end
end
describe '.insert_all' do
diff --git a/spec/models/project_pages_metadatum_spec.rb b/spec/models/project_pages_metadatum_spec.rb
deleted file mode 100644
index 31a533e0363..00000000000
--- a/spec/models/project_pages_metadatum_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProjectPagesMetadatum do
- describe '.only_on_legacy_storage' do
- it 'returns only deployed records without deployment' do
- create(:project) # without pages deployed
-
- legacy_storage_project = create(:project)
- legacy_storage_project.mark_pages_as_deployed
-
- project_with_deployment = create(:project)
- deployment = create(:pages_deployment, project: project_with_deployment)
- project_with_deployment.mark_pages_as_deployed
- project_with_deployment.update_pages_deployment!(deployment)
-
- expect(described_class.only_on_legacy_storage).to eq([legacy_storage_project.pages_metadatum])
- end
- end
-end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index 3b890e75064..719e51018ac 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -26,8 +26,7 @@ RSpec.describe ProjectSetting, type: :model, feature_category: :groups_and_proje
it { is_expected.to allow_value([]).for(:target_platforms) }
it { is_expected.to validate_length_of(:issue_branch_template).is_at_most(255) }
- it { is_expected.not_to allow_value(nil).for(:suggested_reviewers_enabled) }
- it { is_expected.to allow_value(true, false).for(:suggested_reviewers_enabled) }
+ it { is_expected.to validate_inclusion_of(:suggested_reviewers_enabled).in_array([true, false]) }
it 'allows any combination of the allowed target platforms' do
valid_target_platform_combinations.each do |target_platforms|
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 46bf80b1e8f..c27ed2cc82c 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -103,6 +103,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_one(:mock_monitoring_integration) }
it { is_expected.to have_one(:service_desk_custom_email_verification).class_name('ServiceDesk::CustomEmailVerification') }
it { is_expected.to have_one(:container_registry_data_repair_detail).class_name('ContainerRegistry::DataRepairDetail') }
+ it { is_expected.to have_many(:container_registry_protection_rules).class_name('ContainerRegistry::Protection::Rule') }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:ci_refs) }
@@ -820,6 +821,28 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe 'name format validation' do
+ context 'name is unchanged' do
+ let_it_be(:invalid_path_project) do
+ project = create(:project)
+ project.update_attribute(:name, '.invalid_name')
+ project
+ end
+
+ it 'does not raise validation error for name for existing project' do
+ expect { invalid_path_project.update!(description: 'Foo') }.not_to raise_error
+ end
+ end
+
+ %w[. - $].each do |special_character|
+ it "rejects a name starting with '#{special_character}'" do
+ project = build(:project, name: "#{special_character}foo")
+
+ expect(project).not_to be_valid
+ end
+ end
+ end
+
describe 'path validation' do
it 'allows paths reserved on the root namespace' do
project = build(:project, path: 'api')
@@ -2218,7 +2241,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
context 'when the Slack app setting is not enabled' do
before do
stub_application_setting(slack_app_enabled: false)
- allow(Rails.env).to receive(:test?).and_return(false, true)
+ allow(Rails.env).to receive(:test?).and_return(false)
end
it 'includes all projects' do
@@ -5124,28 +5147,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#pages_available?' do
- let(:project) { create(:project, group: group) }
-
- subject { project.pages_available? }
-
- before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
- end
-
- context 'when the project is in a top level namespace' do
- let(:group) { create(:group) }
-
- it { is_expected.to be(true) }
- end
-
- context 'when the project is in a subgroup' do
- let(:group) { create(:group, :nested) }
-
- it { is_expected.to be(true) }
- end
- end
-
describe '#remove_private_deploy_keys' do
let!(:project) { create(:project) }
@@ -5296,62 +5297,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.hashed_storage?(:repository)).to be_falsey
end
end
-
- describe '#pages_path' do
- it 'returns a path where pages are stored' do
- expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
- end
- end
-
- describe '#migrate_to_hashed_storage!' do
- let(:project) { create(:project, :empty_repo, :legacy_storage) }
-
- it 'returns true' do
- expect(project.migrate_to_hashed_storage!).to be_truthy
- end
-
- it 'does not run validation' do
- expect(project).not_to receive(:valid?)
-
- project.migrate_to_hashed_storage!
- end
-
- it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the project repo is in use' do
- Gitlab::ReferenceCounter.new(Gitlab::GlRepository::PROJECT.identifier_for_container(project)).increase
-
- expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
-
- project.migrate_to_hashed_storage!
- end
-
- it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the wiki repo is in use' do
- Gitlab::ReferenceCounter.new(Gitlab::GlRepository::WIKI.identifier_for_container(project.wiki)).increase
-
- expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
-
- project.migrate_to_hashed_storage!
- end
-
- it 'schedules HashedStorage::ProjectMigrateWorker' do
- expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_async).with(project.id)
-
- project.migrate_to_hashed_storage!
- end
- end
-
- describe '#rollback_to_legacy_storage!' do
- let(:project) { create(:project, :empty_repo, :legacy_storage) }
-
- it 'returns nil' do
- expect(project.rollback_to_legacy_storage!).to be_nil
- end
-
- it 'does not run validations' do
- expect(project).not_to receive(:valid?)
-
- project.rollback_to_legacy_storage!
- end
- end
end
context 'hashed storage' do
@@ -5391,58 +5336,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.disk_path).to eq(hashed_path)
end
end
-
- describe '#pages_path' do
- it 'returns a path where pages are stored' do
- expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
- end
- end
-
- describe '#migrate_to_hashed_storage!' do
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
-
- it 'returns nil' do
- expect(project.migrate_to_hashed_storage!).to be_nil
- end
-
- it 'does not flag as read-only' do
- expect { project.migrate_to_hashed_storage! }.not_to change { project.repository_read_only }
- end
-
- context 'when partially migrated' do
- it 'enqueues a job' do
- project = create(:project, storage_version: 1, skip_disk_validation: true)
-
- Sidekiq::Testing.fake! do
- expect { project.migrate_to_hashed_storage! }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
- end
- end
- end
- end
-
- describe '#rollback_to_legacy_storage!' do
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
-
- it 'returns true' do
- expect(project.rollback_to_legacy_storage!).to be_truthy
- end
-
- it 'does not run validations' do
- expect(project).not_to receive(:valid?)
-
- project.rollback_to_legacy_storage!
- end
-
- it 'does not flag as read-only' do
- expect { project.rollback_to_legacy_storage! }.not_to change { project.repository_read_only }
- end
-
- it 'enqueues a job' do
- Sidekiq::Testing.fake! do
- expect { project.rollback_to_legacy_storage! }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
- end
- end
- end
end
describe '#has_ci?' do
@@ -6908,6 +6801,17 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '.with_package_registry_enabled' do
+ subject { described_class.with_package_registry_enabled }
+
+ it 'returns projects with the package registry enabled' do
+ project_1 = create(:project)
+ create(:project, package_registry_access_level: ProjectFeature::DISABLED, packages_enabled: false)
+
+ expect(subject).to contain_exactly(project_1)
+ end
+ end
+
describe '.deployments' do
subject { project.deployments }
@@ -7435,7 +7339,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#has_pool_repsitory?' do
+ describe '#has_pool_repository?' do
it 'returns false when it does not have a pool repository' do
subject = create(:project, :repository)
@@ -8807,16 +8711,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#content_editor_on_issues_feature_flag_enabled?' do
- let_it_be(:group_project) { create(:project, :in_subgroup) }
-
- it_behaves_like 'checks parent group feature flag' do
- let(:feature_flag_method) { :content_editor_on_issues_feature_flag_enabled? }
- let(:feature_flag) { :content_editor_on_issues }
- let(:subject_project) { group_project }
- end
- end
-
describe '#work_items_mvc_feature_flag_enabled?' do
let_it_be(:group_project) { create(:project, :in_subgroup) }
@@ -9274,4 +9168,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
name: name
)
end
+
+ context 'with loose foreign key on projects.creator_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:project, creator: parent) }
+ end
+ end
end
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index e557990c7e9..10a2e967b14 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -328,19 +328,6 @@ RSpec.describe ProjectTeam, feature_category: :groups_and_projects do
expect(project.team.reporter?(user1)).to be(true)
expect(project.team.reporter?(user2)).to be(true)
end
-
- context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
- before do
- project.team.add_members([user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
- end
-
- it 'creates a member_task with the correct attributes', :aggregate_failures do
- member = project.project_members.last
-
- expect(member.tasks_to_be_done).to match_array([:ci, :code])
- expect(member.member_task.project).to eq(project)
- end
- end
end
describe '#add_member' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index af7457c78e2..2265d1b39af 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -3845,11 +3845,50 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
context 'when a Gitlab::Git::CommandError is raised' do
- it 'returns nil' do
+ before do
expect(repository.raw_repository)
.to receive(:get_patch_id).and_raise(Gitlab::Git::CommandError)
+ end
- expect(repository.get_patch_id('HEAD', "f" * 40)).to be_nil
+ it 'returns nil' do
+ expect(repository.get_patch_id('HEAD', 'HEAD')).to be_nil
+ end
+
+ it 'reports the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Gitlab::Git::CommandError),
+ project_id: repository.project.id,
+ old_revision: 'HEAD',
+ new_revision: 'HEAD'
+ )
+
+ repository.get_patch_id('HEAD', 'HEAD')
+ end
+ end
+
+ context 'when a Gitlab::Git::Repository::NoRepository is raised' do
+ before do
+ expect(repository.raw_repository)
+ .to receive(:get_patch_id).and_raise(Gitlab::Git::Repository::NoRepository)
+ end
+
+ it 'returns nil' do
+ expect(repository.get_patch_id('HEAD', 'f' * 40)).to be_nil
+ end
+
+ it 'reports the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Gitlab::Git::Repository::NoRepository),
+ project_id: repository.project.id,
+ old_revision: 'HEAD',
+ new_revision: 'HEAD'
+ )
+
+ repository.get_patch_id('HEAD', 'HEAD')
end
end
end
@@ -3942,4 +3981,61 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
end
+
+ describe '#get_file_attributes' do
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => gitattr_content,
+ 'file1.txt' => 'test content'
+ })
+ end
+
+ let(:gitattr_content) { '' }
+ let(:rev) { 'master' }
+ let(:paths) { ['file1.txt', 'README'] }
+ let(:attrs) { %w[text diff] }
+
+ subject(:file_attributes) { repository.get_file_attributes(rev, paths, attrs) }
+
+ context 'when the given attributes are defined' do
+ let(:gitattr_content) { "* -text\n*.txt text\n*.txt diff" }
+
+ it 'returns expected attributes' do
+ expect(file_attributes.count).to eq 3
+ expect(file_attributes[0]).to eq({ path: 'file1.txt', attribute: 'text', value: 'set' })
+ expect(file_attributes[1]).to eq({ path: 'file1.txt', attribute: 'diff', value: 'set' })
+ expect(file_attributes[2]).to eq({ path: 'README', attribute: 'text', value: 'unset' })
+ end
+ end
+
+ context 'when the attribute is not defined for a given file' do
+ let(:gitattr_content) { "*.txt text" }
+
+ let(:rev) { 'master' }
+ let(:paths) { ['README'] }
+ let(:attrs) { ['text'] }
+
+ it 'returns an empty array' do
+ expect(file_attributes).to eq []
+ end
+ end
+
+ context 'when revision is an empty string' do
+ let(:rev) { '' }
+
+ it { expect { file_attributes }.to raise_error(ArgumentError) }
+ end
+
+ context 'when paths list is empty' do
+ let(:paths) { [] }
+
+ it { expect { file_attributes }.to raise_error(ArgumentError) }
+ end
+
+ context 'when attributes list is empty' do
+ let(:attrs) { [] }
+
+ it { expect { file_attributes }.to raise_error(ArgumentError) }
+ end
+ end
end
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index 5bd8b664d23..5a3f21631ca 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe ResourceStateEvent, feature_category: :team_planning, type: :mode
it_behaves_like 'internal event tracking' do
subject(:service_action) { close_issue }
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLOSED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLOSED }
let(:project) { issue.project }
let(:namespace) { issue.project.namespace }
let(:user) { issue.author }
@@ -86,7 +86,7 @@ RSpec.describe ResourceStateEvent, feature_category: :team_planning, type: :mode
it_behaves_like 'internal event tracking' do
subject(:service_action) { reopen_issue }
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_REOPENED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_REOPENED }
let(:project) { issue.project }
let(:user) { issue.author }
let(:namespace) { issue.project.namespace }
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 4c6f1476481..ec2dfb2634f 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -487,6 +487,18 @@ RSpec.describe Snippet do
end
end
+ describe '.without_created_by_banned_user', feature_category: :insider_threat do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ let_it_be(:snippet) { create(:snippet, author: user) }
+ let_it_be(:snippet_by_banned_user) { create(:snippet, author: banned_user) }
+
+ subject(:without_created_by_banned_user) { described_class.without_created_by_banned_user }
+
+ it { is_expected.to match_array(snippet) }
+ end
+
describe '#participants' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) }
@@ -962,4 +974,30 @@ RSpec.describe Snippet do
it_behaves_like 'can move repository storage' do
let_it_be(:container) { create(:snippet, :repository) }
end
+
+ describe '#hidden_due_to_author_ban?', feature_category: :insider_threat do
+ let(:snippet) { build(:snippet, author: author) }
+
+ subject(:hidden_due_to_author_ban) { snippet.hidden_due_to_author_ban? }
+
+ context 'when the author is not banned' do
+ let_it_be(:author) { build(:user) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when author is banned' do
+ let_it_be(:author) { build(:user, :banned) }
+
+ it { is_expected.to eq(true) }
+
+ context 'when the `hide_snippets_of_banned_users` feature flag is disabled' do
+ before do
+ stub_feature_flags(hide_snippets_of_banned_users: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 2d6a674d3ce..316d1343512 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -396,6 +396,19 @@ RSpec.describe Todo do
end
end
+ describe '.not_in_users' do
+ it 'returns the expected todos' do
+ user1 = create(:user)
+ user2 = create(:user)
+
+ todo1 = create(:todo, user: user1)
+ todo2 = create(:todo, user: user1)
+ create(:todo, user: user2)
+
+ expect(described_class.not_in_users(user2)).to contain_exactly(todo1, todo2)
+ end
+ end
+
describe '.for_group_ids_and_descendants' do
it 'returns the todos for a group and its descendants' do
parent_group = create(:group)
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 401a85e2f82..343576de4d3 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -49,8 +49,7 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
describe 'pass_user_identities_to_ci_jwt' do
- it { is_expected.to allow_value(true, false).for(:pass_user_identities_to_ci_jwt) }
- it { is_expected.not_to allow_value(nil).for(:pass_user_identities_to_ci_jwt) }
+ it { is_expected.to validate_inclusion_of(:pass_user_identities_to_ci_jwt).in_array([true, false]) }
it { is_expected.not_to allow_value("").for(:pass_user_identities_to_ci_jwt) }
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index c611c3c26e3..947d83badf6 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -5073,14 +5073,6 @@ RSpec.describe User, feature_category: :user_profile do
describe '#ci_owned_runners' do
it_behaves_like '#ci_owned_runners'
-
- context 'when FF use_traversal_ids is disabled fallbacks to inefficient implementation' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like '#ci_owned_runners'
- end
end
describe '#projects_with_reporter_access_limited_to' do
@@ -6120,25 +6112,23 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe '#allow_possible_spam?' do
+ describe '#trusted?' do
context 'when no custom attribute is set' do
- it 'is false' do
- expect(user.allow_possible_spam?).to be_falsey
+ it 'is falsey' do
+ expect(user.trusted?).to be_falsey
end
end
context 'when the custom attribute is set' do
before do
- user.custom_attributes.upsert_custom_attributes(
- [{
- user_id: user.id,
- key: UserCustomAttribute::ALLOW_POSSIBLE_SPAM,
- value: "test"
- }])
+ user.custom_attributes.create!(
+ key: UserCustomAttribute::TRUSTED_BY,
+ value: "test"
+ )
end
- it '#allow_possible_spam? is true' do
- expect(user.allow_possible_spam?).to be_truthy
+ it 'is truthy' do
+ expect(user.trusted?).to be_truthy
end
end
end
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
index 486d1c6d3ea..7faddb2384c 100644
--- a/spec/models/users/credit_card_validation_spec.rb
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -15,41 +15,43 @@ RSpec.describe Users::CreditCardValidation, feature_category: :user_profile do
it { is_expected.to validate_length_of(:network_hash).is_at_most(44) }
describe '#similar_records' do
- let(:card_details) do
- subject.attributes.with_indifferent_access.slice(:expiration_date, :last_digits, :network, :holder_name)
+ let_it_be(:credit_card_validation) { create(:credit_card_validation) }
+
+ let_it_be(:card_details) do
+ credit_card_validation.attributes.with_indifferent_access.slice(
+ :expiration_date, :last_digits, :network, :holder_name
+ )
end
- subject!(:credit_card_validation) { create(:credit_card_validation, holder_name: 'Alice') }
+ let_it_be(:match_1) { create(:credit_card_validation, card_details) }
+ let_it_be(:match_2) { create(:credit_card_validation, card_details.merge(holder_name: 'Bob')) }
- let!(:match1) { create(:credit_card_validation, card_details) }
- let!(:match2) { create(:credit_card_validation, card_details.merge(holder_name: 'Bob')) }
- let!(:non_match1) { create(:credit_card_validation, card_details.merge(last_digits: 9)) }
- let!(:non_match2) { create(:credit_card_validation, card_details.merge(network: 'unknown')) }
- let!(:non_match3) do
- create(:credit_card_validation, card_details.dup.tap { |h| h[:expiration_date] += 1.year })
+ let_it_be(:non_match_1) { create(:credit_card_validation, card_details.merge(last_digits: 9999)) }
+ let_it_be(:non_match_2) { create(:credit_card_validation, card_details.merge(network: 'Mastercard')) }
+ let_it_be(:non_match_3) do
+ create(:credit_card_validation, card_details.merge(expiration_date: 2.years.from_now.to_date))
end
it 'returns matches with the same last_digits, expiration and network, ordered by credit_card_validated_at' do
- expect(subject.similar_records).to eq([match2, match1, subject])
+ # eq is used instead of match_array because rows are sorted by credit_card_validated_at in desc order
+ expect(credit_card_validation.similar_records).to eq([match_2, match_1, credit_card_validation])
end
end
describe '#similar_holder_names_count' do
- subject!(:credit_card_validation) { create(:credit_card_validation, holder_name: holder_name) }
-
context 'when holder_name is present' do
- let(:holder_name) { 'ALICE M SMITH' }
+ let_it_be(:credit_card_validation) { create(:credit_card_validation, holder_name: 'ALICE M SMITH') }
- let!(:match) { create(:credit_card_validation, holder_name: 'Alice M Smith') }
- let!(:non_match) { create(:credit_card_validation, holder_name: 'Bob B Brown') }
+ let_it_be(:match) { create(:credit_card_validation, holder_name: 'Alice M Smith') }
+ let_it_be(:non_match) { create(:credit_card_validation, holder_name: 'Bob B Brown') }
it 'returns the count of cards with similar case insensitive holder names' do
- expect(subject.similar_holder_names_count).to eq(2)
+ expect(credit_card_validation.similar_holder_names_count).to eq(2)
end
end
context 'when holder_name is nil' do
- let(:holder_name) { nil }
+ let_it_be(:credit_card_validation) { create(:credit_card_validation, holder_name: nil) }
it 'returns 0' do
expect(subject.similar_holder_names_count).to eq(0)
@@ -75,104 +77,117 @@ RSpec.describe Users::CreditCardValidation, feature_category: :user_profile do
end
describe '.by_banned_user' do
- let(:banned_user) { create(:banned_user) }
- let!(:credit_card) { create(:credit_card_validation) }
- let!(:banned_user_credit_card) { create(:credit_card_validation, user: banned_user.user) }
+ subject(:by_banned_user) { described_class.by_banned_user }
+
+ let_it_be(:banned_user) { create(:banned_user) }
+ let_it_be(:credit_card) { create(:credit_card_validation) }
+ let_it_be(:banned_user_credit_card) { create(:credit_card_validation, user: banned_user.user) }
it 'returns only records associated to banned users' do
- expect(described_class.by_banned_user).to match_array([banned_user_credit_card])
+ expect(by_banned_user).to match_array([banned_user_credit_card])
end
end
describe '.similar_by_holder_name' do
- let!(:credit_card) { create(:credit_card_validation, holder_name: 'CARD MCHODLER') }
- let!(:credit_card2) { create(:credit_card_validation, holder_name: 'RICHIE RICH') }
+ subject(:similar_by_holder_name) { described_class.similar_by_holder_name(holder_name_hash) }
- it 'returns only records that case-insensitive match the given holder name' do
- expect(described_class.similar_by_holder_name('card mchodler')).to match_array([credit_card])
- end
+ let_it_be(:credit_card_validation) { create(:credit_card_validation, holder_name: 'Alice M Smith') }
+ let_it_be(:match) { create(:credit_card_validation, holder_name: 'ALICE M SMITH') }
+
+ context 'when holder_name_hash is present' do
+ let_it_be(:holder_name_hash) { credit_card_validation.holder_name_hash }
- context 'when given holder name is falsey' do
- it 'returns [] when given holder name is ""' do
- expect(described_class.similar_by_holder_name('')).to match_array([])
+ it 'returns records with similar holder names case-insensitively' do
+ expect(similar_by_holder_name).to match_array([credit_card_validation, match])
end
+ end
+
+ context 'when holder_name_hash is nil' do
+ let_it_be(:holder_name_hash) { nil }
- it 'returns [] when given holder name is nil' do
- expect(described_class.similar_by_holder_name(nil)).to match_array([])
+ it 'returns an empty array' do
+ expect(similar_by_holder_name).to match_array([])
end
end
end
describe '.similar_to' do
- let(:credit_card) { create(:credit_card_validation) }
+ subject(:similar_to) { described_class.similar_to(credit_card_validation) }
+
+ let_it_be(:credit_card_validation) { create(:credit_card_validation) }
- let!(:credit_card2) do
+ let_it_be(:match) do
create(:credit_card_validation,
- expiration_date: credit_card.expiration_date,
- last_digits: credit_card.last_digits,
- network: credit_card.network
+ expiration_date: credit_card_validation.expiration_date,
+ last_digits: credit_card_validation.last_digits,
+ network: credit_card_validation.network
)
end
- let!(:credit_card3) do
+ let_it_be(:non_match) do
create(:credit_card_validation,
- expiration_date: credit_card.expiration_date,
- last_digits: credit_card.last_digits,
- network: 'UnknownCCNetwork'
+ expiration_date: credit_card_validation.expiration_date,
+ last_digits: credit_card_validation.last_digits,
+ network: 'Mastercard'
)
end
it 'returns only records with similar expiration_date, last_digits, and network attribute values' do
- expect(described_class.similar_to(credit_card)).to match_array([credit_card, credit_card2])
+ expect(similar_to).to match_array([credit_card_validation, match])
end
end
end
describe '#used_by_banned_user?' do
- let(:credit_card_details) do
- {
- holder_name: 'Christ McLovin',
- expiration_date: 2.years.from_now.end_of_month,
- last_digits: 4242,
- network: 'Visa'
- }
- end
-
- let!(:credit_card) { create(:credit_card_validation, credit_card_details) }
+ subject(:used_by_banned_user) { credit_card_validation.used_by_banned_user? }
- subject { credit_card }
+ let_it_be(:credit_card_validation) { create(:credit_card_validation) }
- context 'when there is a similar credit card associated to a banned user' do
- let_it_be(:banned_user) { create(:banned_user) }
-
- let(:attrs) { credit_card_details.merge({ user: banned_user.user }) }
- let!(:similar_credit_card) { create(:credit_card_validation, attrs) }
+ let_it_be(:card_details) do
+ credit_card_validation.attributes.with_indifferent_access.slice(
+ :expiration_date, :last_digits, :network, :holder_name
+ )
+ end
- it { is_expected.to be_used_by_banned_user }
+ let_it_be(:banned_user) { create(:banned_user) }
- context 'when holder names do not match' do
- let!(:similar_credit_card) do
- create(:credit_card_validation, attrs.merge({ holder_name: 'Mary Goody' }))
+ context 'when there is a similar credit card associated to a banned user' do
+ context 'when holder names match exactly' do
+ before do
+ create(:credit_card_validation, card_details.merge(user: banned_user.user))
end
- it { is_expected.not_to be_used_by_banned_user }
+ it { is_expected.to be(true) }
end
- context 'when .similar_to returns nothing' do
- let!(:similar_credit_card) do
- create(:credit_card_validation, attrs.merge({ network: 'DifferentNetwork' }))
+ context 'when holder names do not match exactly' do
+ before do
+ create(:credit_card_validation, card_details.merge(user: banned_user.user, holder_name: 'John M Smith'))
end
- it { is_expected.not_to be_used_by_banned_user }
+ it { is_expected.to be(false) }
end
end
- context 'when there is a similar credit card not associated to a banned user' do
- let!(:similar_credit_card) do
- create(:credit_card_validation, credit_card_details)
+ context 'when there are no similar credit cards associated to a banned user' do
+ before do
+ create(:credit_card_validation,
+ user: banned_user.user,
+ network: 'Mastercard',
+ last_digits: 1111,
+ holder_name: 'Jane Smith'
+ )
+ end
+
+ it { is_expected.to be(false) }
+ end
+
+ context 'when there is a similar credit card but it is not associated to a banned user' do
+ before do
+ create(:credit_card_validation, card_details)
end
- it { is_expected.not_to be_used_by_banned_user }
+ it { is_expected.to be(false) }
end
end
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index 78de9ad8bdb..d333a51ae3b 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::InProductMarketingEmail, type: :model do
+RSpec.describe Users::InProductMarketingEmail, type: :model, feature_category: :onboarding do
let(:track) { :create }
let(:series) { 0 }
@@ -15,7 +15,7 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
it { is_expected.to validate_presence_of(:user) }
- context 'for a track+series email' do
+ context 'when track+series email' do
it { is_expected.to validate_presence_of(:track) }
it { is_expected.to validate_presence_of(:series) }
@@ -24,28 +24,6 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
.scoped_to([:track, :series]).with_message('track series email has already been sent')
}
end
-
- context 'for a campaign email' do
- subject { build(:in_product_marketing_email, :campaign) }
-
- it { is_expected.to validate_presence_of(:campaign) }
- it { is_expected.not_to validate_presence_of(:track) }
- it { is_expected.not_to validate_presence_of(:series) }
-
- it {
- is_expected.to validate_uniqueness_of(:user_id)
- .scoped_to(:campaign).with_message('campaign email has already been sent')
- }
-
- it { is_expected.to validate_inclusion_of(:campaign).in_array(described_class::CAMPAIGNS) }
- end
-
- context 'when mixing campaign and track+series' do
- it 'is not valid' do
- expect(build(:in_product_marketing_email, :campaign, track: :create)).not_to be_valid
- expect(build(:in_product_marketing_email, :campaign, series: 0)).not_to be_valid
- end
- end
end
describe '.without_track_and_series' do
@@ -78,33 +56,9 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
context 'when no track or series for a user exists' do
let(:track) { :create }
let(:series) { 0 }
+ let(:other_user) { create(:user) }
- before do
- @other_user = create(:user)
- end
-
- it { expect(without_track_and_series).to eq [@other_user] }
- end
- end
-
- describe '.without_campaign' do
- let_it_be(:user) { create(:user) }
- let_it_be(:other_user) { create(:user) }
-
- let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
-
- subject(:without_campaign) { User.merge(described_class.without_campaign(campaign)) }
-
- context 'when record for campaign already exists' do
- before do
- create(:in_product_marketing_email, :campaign, campaign: campaign, user: user)
- end
-
- it { is_expected.to match_array [other_user] }
- end
-
- context 'when record for campaign does not exist' do
- it { is_expected.to match_array [user, other_user] }
+ it { expect(without_track_and_series).to eq [other_user] }
end
end
@@ -112,7 +66,9 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
let_it_be(:user) { create(:user) }
let_it_be(:in_product_marketing_email) { create(:in_product_marketing_email, series: 0, track: 0, user: user) }
- subject(:for_user_with_track_and_series) { described_class.for_user_with_track_and_series(user, track, series).first }
+ subject(:for_user_with_track_and_series) do
+ described_class.for_user_with_track_and_series(user, track, series).first
+ end
context 'when record for user with given track and series exists' do
it { is_expected.to eq(in_product_marketing_email) }
@@ -165,7 +121,7 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
end
end
- context 'cta_clicked_at is already set' do
+ context 'when cta_clicked_at is already set' do
it 'does not update' do
create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: Time.zone.now)
diff --git a/spec/models/vs_code/settings/vs_code_setting_spec.rb b/spec/models/vs_code/settings/vs_code_setting_spec.rb
new file mode 100644
index 00000000000..d22cc815877
--- /dev/null
+++ b/spec/models/vs_code/settings/vs_code_setting_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::VsCodeSetting, feature_category: :web_ide do
+ let!(:user) { create(:user) }
+ let!(:setting) { create(:vscode_setting, user: user, setting_type: 'settings') }
+
+ describe 'validates the presence of required attributes' do
+ it { is_expected.to validate_presence_of(:setting_type) }
+ it { is_expected.to validate_presence_of(:content) }
+ end
+
+ describe 'relationship validation' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe '.by_setting_type' do
+ subject { described_class.by_setting_type('settings') }
+
+ it { is_expected.to contain_exactly(setting) }
+ end
+
+ describe '.by_user' do
+ subject { described_class.by_user(user) }
+
+ it { is_expected.to contain_exactly(setting) }
+ end
+end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index ee61f191f05..2e1cb9d3d9b 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -394,6 +394,22 @@ RSpec.describe WikiPage, feature_category: :wiki do
expect { subject.create(title: '') }.not_to change { wiki.list_pages.length }
end
end
+
+ context "with front matter context" do
+ let(:attributes) do
+ {
+ title: SecureRandom.hex,
+ content: "---\nxxx: abc\n---\nHome Page",
+ format: "markdown",
+ message: 'Custom Commit Message'
+ }
+ end
+
+ it 'create the page with front matter' do
+ subject.create(attributes)
+ expect(wiki.find_page(title).front_matter).to eq({ xxx: "abc" })
+ end
+ end
end
describe "dot in the title" do
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 4b675faf99e..3294d53e364 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -287,7 +287,7 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
it_behaves_like 'internal event tracking' do
let(:work_item) { create(:work_item) }
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CREATED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CREATED }
let(:project) { work_item.project }
let(:user) { work_item.author }
let(:namespace) { project.namespace }
@@ -713,5 +713,28 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
.to contain_exactly(authorized_item_b, authorized_item_c, unauthorized_item)
end
end
+
+ context 'when work item is a new record' do
+ let(:new_work_item) { build(:work_item, project: authorized_project) }
+
+ it { expect(new_work_item.linked_work_items(user)).to be_empty }
+ end
+ end
+
+ describe '#linked_items_count' do
+ let_it_be(:item1) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item2) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item3) { create(:work_item, :issue, project: reusable_project) }
+ let_it_be(:item4) { build(:work_item, :issue, project: reusable_project) }
+
+ it 'returns number of items linked to the work item' do
+ create(:work_item_link, source: item1, target: item2)
+ create(:work_item_link, source: item1, target: item3)
+
+ expect(item1.linked_items_count).to eq(2)
+ expect(item2.linked_items_count).to eq(1)
+ expect(item3.linked_items_count).to eq(1)
+ expect(item4.linked_items_count).to eq(0)
+ end
end
end
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index 3fcfa856db4..301a019dbeb 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -109,11 +109,29 @@ RSpec.describe WorkItems::ParentLink, feature_category: :portfolio_management do
end
end
- it 'is not valid if parent is in other project' do
- link = build(:parent_link, work_item_parent: task1, work_item: build(:work_item))
+ context 'when assigning parent from different project' do
+ let_it_be(:cross_project_issue) { create(:work_item, project: create(:project)) }
- expect(link).not_to be_valid
- expect(link.errors[:work_item_parent]).to include('parent must be in the same project as child.')
+ let(:restriction) do
+ WorkItems::HierarchyRestriction
+ .find_by_parent_type_id_and_child_type_id(cross_project_issue.work_item_type_id, task1.work_item_type_id)
+ end
+
+ it 'is valid when cross-hierarchy is enabled' do
+ restriction.update!(cross_hierarchy_enabled: true)
+ link = build(:parent_link, work_item_parent: cross_project_issue, work_item: task1)
+
+ expect(link).to be_valid
+ expect(link.errors).to be_empty
+ end
+
+ it 'is not valid when cross-hierarchy is not enabled' do
+ restriction.update!(cross_hierarchy_enabled: false)
+ link = build(:parent_link, work_item_parent: cross_project_issue, work_item: task1)
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item_parent]).to include('parent must be in the same project or group as child.')
+ end
end
context 'when parent already has maximum number of links' do
diff --git a/spec/models/work_items/related_link_restriction_spec.rb b/spec/models/work_items/related_link_restriction_spec.rb
new file mode 100644
index 00000000000..764ada53f8b
--- /dev/null
+++ b/spec/models/work_items/related_link_restriction_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::RelatedLinkRestriction, feature_category: :portfolio_management do
+ describe 'associations' do
+ it { is_expected.to belong_to(:source_type) }
+ it { is_expected.to belong_to(:target_type) }
+ end
+
+ describe 'validations' do
+ before do
+ # delete seeded records to prevent non-unique record error
+ described_class.delete_all
+ end
+
+ subject { build(:related_link_restriction) }
+
+ it { is_expected.to validate_presence_of(:source_type) }
+ it { is_expected.to validate_presence_of(:target_type) }
+ it { is_expected.to validate_uniqueness_of(:target_type).scoped_to([:source_type_id, :link_type]) }
+ end
+
+ describe '.link_type' do
+ it { is_expected.to define_enum_for(:link_type).with_values(relates_to: 0, blocks: 1) }
+ end
+end
diff --git a/spec/models/work_items/related_work_item_link_spec.rb b/spec/models/work_items/related_work_item_link_spec.rb
index 3217ac52489..d4a07997052 100644
--- a/spec/models/work_items/related_work_item_link_spec.rb
+++ b/spec/models/work_items/related_work_item_link_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
it_behaves_like 'issuable link' do
let_it_be_with_reload(:issuable_link) { create(:work_item_link) }
let_it_be(:issuable) { issue }
+ let_it_be(:issuable2) { create(:work_item, :issue, project: project) }
+ let_it_be(:issuable3) { create(:work_item, :issue, project: project) }
let(:issuable_class) { 'WorkItem' }
let(:issuable_link_factory) { :work_item_link }
end
@@ -21,51 +23,48 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
let_it_be(:item_type) { described_class.issuable_name }
end
- describe 'validations' do
- let_it_be(:task1) { create(:work_item, :task, project: project) }
- let_it_be(:task2) { create(:work_item, :task, project: project) }
- let_it_be(:task3) { create(:work_item, :task, project: project) }
-
- subject(:link) { build(:work_item_link, source_id: task1.id, target_id: task2.id) }
+ describe '.issuable_type' do
+ it { expect(described_class.issuable_type).to eq(:issue) }
+ end
- describe '#validate_max_number_of_links' do
- shared_examples 'invalid due to exceeding max number of links' do
- let(:error_msg) { 'This work item would exceed the maximum number of linked items.' }
+ describe '.issuable_name' do
+ it { expect(described_class.issuable_name).to eq('work item') }
+ end
- before do
- create(:work_item_link, source: source, target: target)
- stub_const("#{described_class}::MAX_LINKS_COUNT", 1)
- end
+ describe 'validations' do
+ describe '#validate_related_link_restrictions' do
+ using RSpec::Parameterized::TableSyntax
- specify do
- is_expected.to be_invalid
- expect(link.errors.messages[error_item]).to include(error_msg)
- end
+ where(:source_type_sym, :target_types, :valid) do
+ :incident | [:incident, :test_case, :issue, :task, :ticket] | false
+ :ticket | [:incident, :test_case, :issue, :task, :ticket] | false
+ :test_case | [:incident, :test_case, :issue, :task, :ticket] | false
+ :task | [:incident, :test_case, :ticket] | false
+ :issue | [:incident, :test_case, :ticket] | false
+ :task | [:task, :issue] | true
+ :issue | [:task, :issue] | true
end
- context 'when source exceeds max' do
- let(:source) { task1 }
- let(:target) { task3 }
- let(:error_item) { :source }
+ with_them do
+ it 'validates the related link' do
+ target_types.each do |target_type_sym|
+ source_type = WorkItems::Type.default_by_type(source_type_sym)
+ target_type = WorkItems::Type.default_by_type(target_type_sym)
+ source = build(:work_item, work_item_type: source_type, project: project)
+ target = build(:work_item, work_item_type: target_type, project: project)
+ link = build(:work_item_link, source: source, target: target)
+ opposite_link = build(:work_item_link, source: target, target: source)
- it_behaves_like 'invalid due to exceeding max number of links'
- end
-
- context 'when target exceeds max' do
- let(:source) { task2 }
- let(:target) { task3 }
- let(:error_item) { :target }
+ expect(link.valid?).to eq(valid)
+ expect(opposite_link.valid?).to eq(valid)
+ next if valid
- it_behaves_like 'invalid due to exceeding max number of links'
+ expect(link.errors.messages[:source]).to contain_exactly(
+ "#{source_type.name.downcase.pluralize} cannot be related to #{target_type.name.downcase.pluralize}"
+ )
+ end
+ end
end
end
end
-
- describe '.issuable_type' do
- it { expect(described_class.issuable_type).to eq(:issue) }
- end
-
- describe '.issuable_name' do
- it { expect(described_class.issuable_name).to eq('work item') }
- end
end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index e4d2ccdfc5a..7f836ce4e90 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -83,6 +83,8 @@ RSpec.describe WorkItems::Type, feature_category: :team_planning do
expect(Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter).not_to receive(:upsert_types).and_call_original
expect(Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter).not_to receive(:upsert_widgets)
expect(Gitlab::DatabaseImporters::WorkItems::HierarchyRestrictionsImporter).not_to receive(:upsert_restrictions)
+ expect(Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter)
+ .not_to receive(:upsert_restrictions)
expect(subject).to eq(default_issue_type)
end
@@ -96,6 +98,7 @@ RSpec.describe WorkItems::Type, feature_category: :team_planning do
expect(Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter).to receive(:upsert_types).and_call_original
expect(Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter).to receive(:upsert_widgets)
expect(Gitlab::DatabaseImporters::WorkItems::HierarchyRestrictionsImporter).to receive(:upsert_restrictions)
+ expect(Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter).to receive(:upsert_restrictions)
expect(subject).to eq(default_issue_type)
end
diff --git a/spec/policies/achievements/user_achievement_policy_spec.rb b/spec/policies/achievements/user_achievement_policy_spec.rb
index c3148e882fa..a53912d67a1 100644
--- a/spec/policies/achievements/user_achievement_policy_spec.rb
+++ b/spec/policies/achievements/user_achievement_policy_spec.rb
@@ -75,4 +75,27 @@ RSpec.describe Achievements::UserAchievementPolicy, feature_category: :user_prof
end
end
end
+
+ context 'when current_user and achievement owner are different' do
+ it { is_expected.to be_disallowed(:update_owned_user_achievement) }
+ it { is_expected.to be_disallowed(:update_user_achievement) }
+ end
+
+ context 'when current_user and achievement owner are the same' do
+ let(:current_user) { achievement_owner }
+
+ it { is_expected.to be_allowed(:update_owned_user_achievement) }
+ it { is_expected.to be_allowed(:update_user_achievement) }
+ end
+
+ context 'when the achievements feature flag is disabled' do
+ let(:current_user) { achievement_owner }
+
+ before do
+ stub_feature_flags(achievements: false)
+ end
+
+ it { is_expected.to be_disallowed(:read_user_achievement) }
+ it { is_expected.to be_disallowed(:update_user_achievement) }
+ end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 4d72de27046..cb7884b141e 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
expect_disallowed(*maintainer_permissions)
expect_disallowed(*owner_permissions)
expect_disallowed(:read_namespace)
+ expect_disallowed(:read_namespace_via_membership)
end
end
@@ -34,6 +35,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
expect_disallowed(*maintainer_permissions)
expect_disallowed(*owner_permissions)
expect_disallowed(:read_namespace)
+ expect_disallowed(:read_namespace_via_membership)
end
end
@@ -1099,73 +1101,6 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
end
end
- describe 'observability' do
- let(:allowed_admin) { be_allowed(:read_observability) && be_allowed(:admin_observability) }
- let(:allowed_read) { be_allowed(:read_observability) && be_disallowed(:admin_observability) }
- let(:disallowed) { be_disallowed(:read_observability) && be_disallowed(:admin_observability) }
-
- # rubocop:disable Layout/LineLength
- where(:feature_enabled, :admin_matcher, :owner_matcher, :maintainer_matcher, :developer_matcher, :reporter_matcher, :guest_matcher, :non_member_matcher, :anonymous_matcher) do
- false | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed)
- true | ref(:allowed_admin) | ref(:allowed_admin) | ref(:allowed_admin) | ref(:allowed_read) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed) | ref(:disallowed)
- end
- # rubocop:enable Layout/LineLength
-
- with_them do
- before do
- stub_feature_flags(observability_group_tab: feature_enabled)
- end
-
- context 'admin', :enable_admin_mode do
- let(:current_user) { admin }
-
- it { is_expected.to admin_matcher }
- end
-
- context 'owner' do
- let(:current_user) { owner }
-
- it { is_expected.to owner_matcher }
- end
-
- context 'maintainer' do
- let(:current_user) { maintainer }
-
- it { is_expected.to maintainer_matcher }
- end
-
- context 'developer' do
- let(:current_user) { developer }
-
- it { is_expected.to developer_matcher }
- end
-
- context 'reporter' do
- let(:current_user) { reporter }
-
- it { is_expected.to reporter_matcher }
- end
-
- context 'with guest' do
- let(:current_user) { guest }
-
- it { is_expected.to guest_matcher }
- end
-
- context 'with non member' do
- let(:current_user) { create(:user) }
-
- it { is_expected.to non_member_matcher }
- end
-
- context 'with anonymous' do
- let(:current_user) { nil }
-
- it { is_expected.to anonymous_matcher }
- end
- end
- end
-
describe 'dependency proxy' do
RSpec.shared_examples 'disabling admin_package feature flag' do
before do
diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index 41555ca4150..b4fbc7e0417 100644
--- a/spec/policies/namespaces/user_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Namespaces::UserNamespacePolicy, feature_category: :groups_and_pr
let_it_be(:admin) { create(:admin) }
let_it_be(:namespace) { create(:user_namespace, owner: owner) }
- let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package, :read_billing, :edit_billing, :import_projects] }
+ let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_namespace_via_membership, :read_statistics, :transfer_projects, :admin_package, :read_billing, :edit_billing, :import_projects] }
subject { described_class.new(current_user, namespace) }
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index d546805ce01..3efa96cffe9 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -170,4 +170,55 @@ RSpec.describe PersonalSnippetPolicy do
it_behaves_like 'admin access with admin mode'
end
+
+ context 'when the author of the snippet is banned', feature_category: :insider_threat do
+ let(:banned_user) { build(:user, :banned) }
+ let(:snippet) { build(:personal_snippet, :public, author: banned_user) }
+
+ context 'no user' do
+ subject { permissions(nil) }
+
+ it do
+ is_expected.to be_disallowed(:read_snippet)
+ is_expected.to be_disallowed(:create_note)
+ is_expected.to be_disallowed(:award_emoji)
+ is_expected.to be_disallowed(*author_permissions)
+ end
+ end
+
+ context 'regular user' do
+ subject { permissions(regular_user) }
+
+ it do
+ is_expected.to be_disallowed(:read_snippet)
+ is_expected.to be_disallowed(:read_note)
+ is_expected.to be_disallowed(:create_note)
+ is_expected.to be_disallowed(*author_permissions)
+ end
+ end
+
+ context 'external user' do
+ subject { permissions(external_user) }
+
+ it do
+ is_expected.to be_disallowed(:read_snippet)
+ is_expected.to be_disallowed(:read_note)
+ is_expected.to be_disallowed(:create_note)
+ is_expected.to be_disallowed(*author_permissions)
+ end
+ end
+
+ context 'snippet author' do
+ subject { permissions(snippet.author) }
+
+ it do
+ is_expected.to be_disallowed(:read_snippet)
+ is_expected.to be_disallowed(:read_note)
+ is_expected.to be_disallowed(:create_note)
+ is_expected.to be_disallowed(*author_permissions)
+ end
+ end
+
+ it_behaves_like 'admin access with admin mode'
+ end
end
diff --git a/spec/policies/project_member_policy_spec.rb b/spec/policies/project_member_policy_spec.rb
index d7c155b39f5..8e7f2658e3f 100644
--- a/spec/policies/project_member_policy_spec.rb
+++ b/spec/policies/project_member_policy_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe ProjectMemberPolicy do
- let(:project) { create(:project) }
- let(:maintainer) { create(:user) }
+RSpec.describe ProjectMemberPolicy, feature_category: :groups_and_projects do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user) }
let(:member) { create(:project_member, project: project, user: member_user) }
let(:current_user) { maintainer }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index e7c2dcc4158..3de006d8c9b 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -288,7 +288,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
:create_build, :read_build, :update_build, :admin_build, :destroy_build,
:create_pipeline_schedule, :read_pipeline_schedule_variables, :update_pipeline_schedule, :admin_pipeline_schedule, :destroy_pipeline_schedule,
:create_environment, :read_environment, :update_environment, :admin_environment, :destroy_environment,
- :create_cluster, :read_cluster, :update_cluster, :admin_cluster, :destroy_cluster,
:create_deployment, :read_deployment, :update_deployment, :admin_deployment, :destroy_deployment
]
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index c6d8ef05cfd..b02fc53db21 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe ProjectSnippetPolicy do
let_it_be(:group) { create(:group, :public) }
let_it_be(:regular_user) { create(:user) }
let_it_be(:external_user) { create(:user, :external) }
+ let_it_be(:admin_user) { create(:user, :admin) }
let_it_be(:author) { create(:user) }
let_it_be(:author_permissions) do
[
@@ -296,7 +297,7 @@ RSpec.describe ProjectSnippetPolicy do
context 'admin user' do
let(:snippet_visibility) { :private }
- let(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
context 'when admin mode is enabled', :enable_admin_mode do
it do
@@ -327,4 +328,57 @@ RSpec.describe ProjectSnippetPolicy do
it_behaves_like 'regular user member permissions'
end
end
+
+ context 'when the author of the snippet is banned', feature_category: :insider_threat do
+ let(:banned_user) { build(:user, :banned) }
+ let(:project) { build(:project, :public, group: group) }
+ let(:snippet) { build(:project_snippet, :public, project: project, author: banned_user) }
+
+ context 'no user' do
+ let(:current_user) { nil }
+
+ it do
+ expect_disallowed(:read_snippet)
+ expect_disallowed(:read_note)
+ expect_disallowed(:create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'regular user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { project }
+
+ it do
+ expect_disallowed(:read_snippet)
+ expect_disallowed(:read_note)
+ expect_disallowed(:create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'external user' do
+ let(:current_user) { external_user }
+ let(:membership_target) { project }
+
+ it do
+ expect_disallowed(:read_snippet)
+ expect_disallowed(:read_note)
+ expect_disallowed(:create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin_user }
+ let(:membership_target) { project }
+
+ it do
+ expect_allowed(:read_snippet)
+ expect_allowed(:read_note)
+ expect_allowed(:create_note)
+ expect_allowed(*author_permissions)
+ end
+ end
+ end
end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index bd8f5604eba..568c375ce56 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -221,4 +221,92 @@ RSpec.describe WorkItemPolicy, feature_category: :team_planning do
it { is_expected.to be_allowed(:admin_work_item_link) }
end
end
+
+ describe 'create_note' do
+ context 'when work item is associated with a project' do
+ context 'when project is public' do
+ let(:work_item_subject) { public_work_item }
+
+ context 'when user is not a member of the project' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.to be_allowed(:create_note) }
+ end
+
+ context 'when user is a member of the project' do
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_allowed(:create_note) }
+
+ context 'when work_item is confidential' do
+ let(:work_item_subject) { create(:work_item, :confidential, project: project) }
+
+ it { is_expected.not_to be_allowed(:create_note) }
+ end
+ end
+ end
+ end
+
+ context 'when work item is associated with a group' do
+ context 'when group is public' do
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_group_work_item) { create(:work_item, :group_level, namespace: public_group) }
+ let_it_be(:public_group_member) { create(:user).tap { |u| public_group.add_reporter(u) } }
+ let(:work_item_subject) { public_group_work_item }
+
+ let_it_be(:public_group_confidential_work_item) do
+ create(:work_item, :group_level, :confidential, namespace: public_group)
+ end
+
+ context 'when user is not a member of the group' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.to be_allowed(:create_note) }
+
+ context 'when work_item is confidential' do
+ let(:work_item_subject) { public_group_confidential_work_item }
+
+ it { is_expected.not_to be_allowed(:create_note) }
+ end
+ end
+
+ context 'when user is a member of the group' do
+ let(:current_user) { public_group_member }
+
+ it { is_expected.to be_allowed(:create_note) }
+
+ context 'when work_item is confidential' do
+ let(:work_item_subject) { public_group_confidential_work_item }
+
+ it { is_expected.to be_allowed(:create_note) }
+ end
+ end
+ end
+
+ context 'when group is not public' do
+ let_it_be(:private_group) { create(:group, :private) }
+ let_it_be(:private_group_work_item) { create(:work_item, :group_level, namespace: private_group) }
+ let_it_be(:private_group_reporter) { create(:user).tap { |u| private_group.add_reporter(u) } }
+ let(:work_item_subject) { private_group_work_item }
+
+ context 'when user is not a member of the group' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.not_to be_allowed(:create_note) }
+ end
+
+ context 'when user is a member of the group' do
+ let(:current_user) { private_group_reporter }
+
+ it { is_expected.to be_allowed(:create_note) }
+
+ context 'when work_item is confidential' do
+ let(:work_item_subject) { create(:work_item, :group_level, :confidential, namespace: private_group) }
+
+ it { is_expected.to be_allowed(:create_note) }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index a249597e900..eed39c7a404 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -56,31 +56,43 @@ RSpec.describe BlobPresenter do
end
context 'when blob has ref_type' do
- before do
- blob.ref_type = 'heads'
- end
+ %w[heads tags].each do |ref_type|
+ context "when ref_type is #{ref_type}" do
+ before do
+ blob.ref_type = ref_type
+ end
- describe '#web_url' do
- it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{ref}/#{path}?ref_type=heads") }
- end
+ describe '#web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
- describe '#web_path' do
- it { expect(presenter.web_path).to eq("/#{project.full_path}/-/blob/#{ref}/#{path}?ref_type=heads") }
- end
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/blob/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
- describe '#edit_blob_path' do
- it { expect(presenter.edit_blob_path).to eq("/#{project.full_path}/-/edit/#{ref}/#{path}?ref_type=heads") }
- end
+ describe '#edit_blob_path' do
+ it { expect(presenter.edit_blob_path).to eq("/#{project.full_path}/-/edit/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
- describe '#raw_path' do
- it { expect(presenter.raw_path).to eq("/#{project.full_path}/-/raw/#{ref}/#{path}?ref_type=heads") }
- end
+ describe '#raw_path' do
+ it { expect(presenter.raw_path).to eq("/#{project.full_path}/-/raw/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
- describe '#replace_path' do
- it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/update/#{ref}/#{path}?ref_type=heads") }
- end
+ describe '#replace_path' do
+ it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/update/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
- it_behaves_like '#can_current_user_push_to_branch?'
+ describe '#history_path' do
+ it { expect(presenter.history_path).to eq("/#{project.full_path}/-/commits/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
+
+ describe '#blame_path' do
+ it { expect(presenter.blame_path).to eq("/#{project.full_path}/-/blame/#{ref}/#{path}?ref_type=#{ref_type}") }
+ end
+
+ it_behaves_like '#can_current_user_push_to_branch?'
+ end
+ end
end
describe '#can_modify_blob?' do
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index fc13b377014..97d444e047a 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -154,8 +154,8 @@ RSpec.describe Ci::PipelinePresenter do
let(:pipeline) { merge_request.all_pipelines.last }
it 'returns a correct ref text' do
- is_expected.to eq("Related merge request <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
- "to merge <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
+ is_expected.to eq("Related merge request <a class=\"mr-iid ref-container\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "to merge <a class=\"ref-container gl-link\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
end
end
@@ -164,9 +164,9 @@ RSpec.describe Ci::PipelinePresenter do
let(:pipeline) { merge_request.all_pipelines.last }
it 'returns a correct ref text' do
- is_expected.to eq("Related merge request <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
- "to merge <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
- "into <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
+ is_expected.to eq("Related merge request <a class=\"mr-iid ref-container\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "to merge <a class=\"ref-container gl-link\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
+ "into <a class=\"ref-container gl-link\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
end
end
@@ -177,7 +177,7 @@ RSpec.describe Ci::PipelinePresenter do
end
it 'returns a correct ref text' do
- is_expected.to eq("For <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
+ is_expected.to eq("For <a class=\"ref-container gl-link\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
end
context 'when ref contains malicious script' do
@@ -209,100 +209,6 @@ RSpec.describe Ci::PipelinePresenter do
end
end
- describe '#all_related_merge_requests' do
- subject(:all_related_merge_requests) do
- presenter.send(:all_related_merge_requests)
- end
-
- it 'memoizes the returned relation' do
- expect(pipeline).to receive(:all_merge_requests_by_recency).exactly(1).time.and_call_original
- 2.times { presenter.send(:all_related_merge_requests).count }
- end
-
- context 'for a branch pipeline with two open MRs' do
- let!(:one) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
- let!(:two) { create(:merge_request, source_project: project, source_branch: pipeline.ref, target_branch: 'fix') }
-
- it { is_expected.to contain_exactly(one, two) }
- end
-
- context 'permissions' do
- let_it_be_with_refind(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
-
- let(:pipeline) { merge_request.all_pipelines.take }
-
- shared_examples 'private merge requests' do
- context 'when not logged in' do
- let(:current_user) {}
-
- it { is_expected.to be_empty }
- end
-
- context 'when logged in as a non_member' do
- let(:current_user) { create(:user) }
-
- it { is_expected.to be_empty }
- end
-
- context 'when logged in as a guest' do
- let(:current_user) { create(:user) }
-
- before do
- project.add_guest(current_user)
- end
-
- it { is_expected.to be_empty }
- end
-
- context 'when logged in as a developer' do
- it { is_expected.to contain_exactly(merge_request) }
- end
-
- context 'when logged in as a maintainer' do
- let(:current_user) { create(:user) }
-
- before do
- project.add_maintainer(current_user)
- end
-
- it { is_expected.to contain_exactly(merge_request) }
- end
- end
-
- context 'with a private project' do
- it_behaves_like 'private merge requests'
- end
-
- context 'with a public project with private merge requests' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
-
- project
- .project_feature
- .update!(merge_requests_access_level: ProjectFeature::PRIVATE)
- end
-
- it_behaves_like 'private merge requests'
- end
-
- context 'with a public project with public merge requests' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
-
- project
- .project_feature
- .update!(merge_requests_access_level: ProjectFeature::ENABLED)
- end
-
- context 'when not logged in' do
- let(:current_user) {}
-
- it { is_expected.to contain_exactly(merge_request) }
- end
- end
- end
- end
-
describe '#link_to_merge_request' do
subject { presenter.link_to_merge_request }
@@ -353,4 +259,24 @@ RSpec.describe Ci::PipelinePresenter do
it { is_expected.to be_nil }
end
end
+
+ describe '#triggered_by_path' do
+ subject { presenter.triggered_by_path }
+
+ context 'when the pipeline is a child ' do
+ let(:upstream_pipeline) { create(:ci_pipeline) }
+ let(:pipeline) { create(:ci_pipeline, child_of: upstream_pipeline) }
+ let(:expected_path) { project_pipeline_path(upstream_pipeline.project, upstream_pipeline) }
+
+ it 'returns the pipeline path' do
+ expect(subject).to eq(expected_path)
+ end
+ end
+
+ context 'when the pipeline is not a child ' do
+ it 'returns the pipeline path' do
+ expect(subject).to eq('')
+ end
+ end
+ end
end
diff --git a/spec/presenters/commit_presenter_spec.rb b/spec/presenters/commit_presenter_spec.rb
index 5ac270a8df8..22c4f1f39d4 100644
--- a/spec/presenters/commit_presenter_spec.rb
+++ b/spec/presenters/commit_presenter_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe CommitPresenter, feature_category: :source_code_management do
subject { presenter.detailed_status_for('ref')&.text }
where(:read_commit_status, :read_pipeline, :expected_result) do
- true | true | 'passed'
+ true | true | 'Passed'
true | false | nil
false | true | nil
false | false | nil
diff --git a/spec/presenters/issue_presenter_spec.rb b/spec/presenters/issue_presenter_spec.rb
index 99ab8582f77..07a9f8015e9 100644
--- a/spec/presenters/issue_presenter_spec.rb
+++ b/spec/presenters/issue_presenter_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe IssuePresenter do
it 'returns a work item url using iid for the task' do
expect(presenter.web_url).to eq(
- project_work_items_url(project, work_items_path: presented_issue.iid)
+ project_work_item_url(project, presented_issue.iid)
)
end
end
@@ -67,7 +67,7 @@ RSpec.describe IssuePresenter do
it 'returns a work item path using iid for the task' do
expect(presenter.issue_path).to eq(
- project_work_items_path(project, work_items_path: presented_issue.iid)
+ project_work_item_path(project, presented_issue.iid)
)
end
end
diff --git a/spec/presenters/member_presenter_spec.rb b/spec/presenters/member_presenter_spec.rb
index 7850399b711..7223c98d5f7 100644
--- a/spec/presenters/member_presenter_spec.rb
+++ b/spec/presenters/member_presenter_spec.rb
@@ -2,13 +2,44 @@
require 'spec_helper'
+# Creation is necessary due to relations and the need to check in the presenter
+#
+# rubocop:disable RSpec/FactoryBot/AvoidCreate
RSpec.describe MemberPresenter, feature_category: :groups_and_projects do
- let_it_be(:member) { build(:group_member) }
- let(:presenter) { described_class.new(member) }
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: root_group) }
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:root_member) { create(:group_member, :reporter, group: root_group, user: user) }
+ let_it_be(:subgroup_member) { create(:group_member, :reporter, group: subgroup, user: user) }
+
+ let(:presenter) { described_class.new(root_member) }
describe '#last_owner?' do
it 'raises `NotImplementedError`' do
expect { presenter.last_owner? }.to raise_error(NotImplementedError)
end
end
+
+ describe '#valid_level_roles' do
+ it 'does not return levels lower than user highest membership in the hierarchy' do
+ expect(described_class.new(subgroup_member).valid_level_roles).to eq(
+ 'Reporter' => Gitlab::Access::REPORTER,
+ 'Developer' => Gitlab::Access::DEVELOPER,
+ 'Maintainer' => Gitlab::Access::MAINTAINER,
+ 'Owner' => Gitlab::Access::OWNER
+ )
+ end
+
+ it 'returns all roles for the root group' do
+ expect(described_class.new(root_member).valid_level_roles).to eq(
+ 'Guest' => Gitlab::Access::GUEST,
+ 'Reporter' => Gitlab::Access::REPORTER,
+ 'Developer' => Gitlab::Access::DEVELOPER,
+ 'Maintainer' => Gitlab::Access::MAINTAINER,
+ 'Owner' => Gitlab::Access::OWNER
+ )
+ end
+ end
end
+# rubocop:enable RSpec/FactoryBot/AvoidCreate
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index b4210099e14..b642292b458 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -485,7 +485,7 @@ RSpec.describe MergeRequestPresenter do
allow(resource).to receive(:source_branch_exists?) { true }
is_expected
- .to eq("<a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{presenter.source_branch_commits_path}\">#{presenter.source_branch}</a>")
+ .to eq("<a class=\"ref-container gl-link\" href=\"#{presenter.source_branch_commits_path}\">#{presenter.source_branch}</a>")
end
end
@@ -508,7 +508,7 @@ RSpec.describe MergeRequestPresenter do
allow(resource).to receive(:target_branch_exists?) { true }
is_expected
- .to eq("<a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{presenter.target_branch_commits_path}\">#{presenter.target_branch}</a>")
+ .to eq("<a class=\"ref-container gl-link\" href=\"#{presenter.target_branch_commits_path}\">#{presenter.target_branch}</a>")
end
end
diff --git a/spec/presenters/ml/candidate_details_presenter_spec.rb b/spec/presenters/ml/candidate_details_presenter_spec.rb
index 0ecf80b683e..34de1e66a8a 100644
--- a/spec/presenters/ml/candidate_details_presenter_spec.rb
+++ b/spec/presenters/ml/candidate_details_presenter_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
let_it_be(:metrics) do
[
build_stubbed(:ml_candidate_metrics, name: 'metric1', value: 0.1, candidate: candidate),
+ build_stubbed(:ml_candidate_metrics, name: 'metric1', value: 0.2, step: 1, candidate: candidate),
+ build_stubbed(:ml_candidate_metrics, name: 'metric1', value: 0.3, step: 2, candidate: candidate),
build_stubbed(:ml_candidate_metrics, name: 'metric2', value: 0.2, candidate: candidate),
build_stubbed(:ml_candidate_metrics, name: 'metric3', value: 0.3, candidate: candidate)
]
@@ -30,7 +32,7 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
subject { Gitlab::Json.parse(described_class.new(candidate, include_ci_job).present)['candidate'] }
before do
- allow(candidate).to receive(:latest_metrics).and_return(metrics)
+ allow(candidate).to receive(:metrics).and_return(metrics)
allow(candidate).to receive(:params).and_return(params)
end
@@ -45,9 +47,11 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
it 'generates the correct metrics' do
expect(subject['metrics']).to include(
- hash_including('name' => 'metric1', 'value' => 0.1),
- hash_including('name' => 'metric2', 'value' => 0.2),
- hash_including('name' => 'metric3', 'value' => 0.3)
+ hash_including('name' => 'metric1', 'value' => 0.1, 'step' => 0),
+ hash_including('name' => 'metric1', 'value' => 0.2, 'step' => 1),
+ hash_including('name' => 'metric1', 'value' => 0.3, 'step' => 2),
+ hash_including('name' => 'metric2', 'value' => 0.2, 'step' => 0),
+ hash_including('name' => 'metric3', 'value' => 0.3, 'step' => 0)
)
end
diff --git a/spec/presenters/ml/model_presenter_spec.rb b/spec/presenters/ml/model_presenter_spec.rb
index dbbd3b57033..88bfa9eb4c6 100644
--- a/spec/presenters/ml/model_presenter_spec.rb
+++ b/spec/presenters/ml/model_presenter_spec.rb
@@ -40,4 +40,10 @@ RSpec.describe Ml::ModelPresenter, feature_category: :mlops do
it { is_expected.to eq("/#{project.full_path}/-/packages/#{model.latest_version.package_id}") }
end
end
+
+ describe '#path' do
+ subject { model1.present.path }
+
+ it { is_expected.to eq("/#{project.full_path}/-/ml/models/#{model1.id}") }
+ end
end
diff --git a/spec/presenters/tree_entry_presenter_spec.rb b/spec/presenters/tree_entry_presenter_spec.rb
index 0abf372b704..359ffbcb140 100644
--- a/spec/presenters/tree_entry_presenter_spec.rb
+++ b/spec/presenters/tree_entry_presenter_spec.rb
@@ -7,29 +7,32 @@ RSpec.describe TreeEntryPresenter do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
- let(:tree) { Gitlab::Graphql::Representation::TreeEntry.new(repository.tree.trees.first, repository) }
+ let(:tree) { Gitlab::Graphql::Representation::TreeEntry.new(repository.tree(ref).trees.first, repository) }
let(:presenter) { described_class.new(tree) }
+ let(:ref) { 'master' }
describe '.web_url' do
- it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
+ it {
+ expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{ref}/#{tree.path}")
+ }
end
describe '#web_path' do
- it { expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{ref}/#{tree.path}") }
end
- context 'when blob has ref_type' do
+ context 'when tree has ref_type' do
before do
tree.ref_type = 'heads'
end
describe '.web_url' do
- it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}?ref_type=heads") }
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{ref}/#{tree.path}?ref_type=heads") }
end
describe '#web_path' do
it {
- expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}?ref_type=heads")
+ expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{ref}/#{tree.path}?ref_type=heads")
}
end
end
diff --git a/spec/presenters/vs_code/settings/vs_code_manifest_presenter_spec.rb b/spec/presenters/vs_code/settings/vs_code_manifest_presenter_spec.rb
new file mode 100644
index 00000000000..6b39e3dbbf6
--- /dev/null
+++ b/spec/presenters/vs_code/settings/vs_code_manifest_presenter_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::VsCodeManifestPresenter, feature_category: :web_ide do
+ let(:settings) { [] }
+
+ subject(:presenter) { described_class.new(settings) }
+
+ describe '#latest' do
+ context 'when there are not persisted settings' do
+ it 'includes default machine uuid' do
+ default_machine = ::VsCode::Settings::DEFAULT_MACHINE
+
+ expect(presenter.latest.length).to eq(1)
+ expect(presenter.latest['machines']).to eq(default_machine[:uuid])
+ end
+ end
+
+ context 'when there are persisted settings' do
+ let(:settings) { [build_stubbed(:vscode_setting, setting_type: 'extensions')] }
+
+ it 'includes the persisted setting uuid' do
+ expect(presenter.latest.length).to eq(2)
+ expect(presenter.latest['extensions']).to eq(settings.first.uuid)
+ end
+ end
+ end
+
+ describe '#session' do
+ it 'returns default session' do
+ expect(presenter.session).to eq(::VsCode::Settings::DEFAULT_SESSION)
+ end
+ end
+end
diff --git a/spec/presenters/vs_code/settings/vs_code_setting_presenter_spec.rb b/spec/presenters/vs_code/settings/vs_code_setting_presenter_spec.rb
new file mode 100644
index 00000000000..8ad95156bf0
--- /dev/null
+++ b/spec/presenters/vs_code/settings/vs_code_setting_presenter_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::VsCodeSettingPresenter, feature_category: :web_ide do
+ subject(:presenter) { described_class.new(setting) }
+
+ context "when presenting default machine" do
+ let(:setting) { VsCode::Settings::DEFAULT_MACHINE }
+
+ describe '#content' do
+ it { expect(presenter.content).to be_nil }
+ end
+
+ describe '#machines' do
+ it { expect(presenter.machines).to eq(VsCode::Settings::DEFAULT_MACHINE[:machines]) }
+ end
+
+ describe '#machine_id' do
+ it { expect(presenter.machine_id).to be_nil }
+ end
+ end
+
+ context "when presenting persisted setting" do
+ let(:setting) { build_stubbed(:vscode_setting, setting_type: 'extensions') }
+
+ describe '#content' do
+ it { expect(presenter.content).to eq(setting.content) }
+ end
+
+ describe '#machines' do
+ it { expect(presenter.machines).to be_nil }
+ end
+
+ describe '#machine_id' do
+ it { expect(presenter.machine_id).to eq(VsCode::Settings::DEFAULT_MACHINE[:uuid]) }
+ end
+
+ describe 'version' do
+ it { expect(presenter.version).to eq(setting.version) }
+ end
+ end
+end
diff --git a/spec/rake_helper.rb b/spec/rake_helper.rb
index 53bd36542b7..aa9e976d27d 100644
--- a/spec/rake_helper.rb
+++ b/spec/rake_helper.rb
@@ -1,17 +1,5 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake'
-RSpec.configure do |config|
- config.include RakeHelpers
-
- config.before(:all, type: :task) do
- Rake.application.rake_require 'tasks/gitlab/helpers'
- Rake::Task.define_task :environment
- end
-
- config.after(:all, type: :task) do
- delete_from_all_tables!(except: deletion_except_tables)
- end
-end
+warn "WARNING: Using `require 'rake_helper'` has no effect. Use `require 'spec_helper'` instead."
diff --git a/spec/requests/acme_challenges_controller_spec.rb b/spec/requests/acme_challenges_controller_spec.rb
new file mode 100644
index 00000000000..f37aefed488
--- /dev/null
+++ b/spec/requests/acme_challenges_controller_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AcmeChallengesController, type: :request, feature_category: :pages do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get acme_challenge_path }
+ end
+end
diff --git a/spec/requests/api/admin/instance_clusters_spec.rb b/spec/requests/api/admin/instance_clusters_spec.rb
index 6fad020150c..f2e62533b78 100644
--- a/spec/requests/api/admin/instance_clusters_spec.rb
+++ b/spec/requests/api/admin/instance_clusters_spec.rb
@@ -363,7 +363,7 @@ RSpec.describe ::API::Admin::InstanceClusters, feature_category: :deployment_man
end
it 'returns validation error' do
- expect(json_response['message']['platform_kubernetes'].first).to eq(_('Cannot modify managed Kubernetes cluster'))
+ expect(json_response['message']['platform_kubernetes.base'].first).to eq(_('Cannot modify managed Kubernetes cluster'))
end
end
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 8aad56c9fc3..d3d4a723616 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -18,8 +18,26 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
shared_examples 'disabled feature' do
- it 'returns 404' do
+ before do
stub_application_setting(bulk_import_enabled: false)
+ stub_feature_flags(override_bulk_import_disabled: false)
+ end
+
+ it_behaves_like '404 response' do
+ let(:message) { '404 Not Found' }
+ end
+
+ it 'enables the feature when override flag is enabled for the user' do
+ stub_feature_flags(override_bulk_import_disabled: user)
+
+ request
+
+ expect(response).not_to have_gitlab_http_status(:not_found)
+ end
+
+ it 'does not enable the feature when override flag is enabled for another user' do
+ other_user = create(:user)
+ stub_feature_flags(override_bulk_import_disabled: other_user)
request
@@ -71,7 +89,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
end
describe 'POST /bulk_imports' do
@@ -262,26 +280,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
context 'when the destination_slug is invalid' do
- it 'returns invalid error when restricting special characters is disabled' do
- Feature.disable(:restrict_special_characters_in_namespace_path)
-
- params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg'
-
- request
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to include("entities[0][destination_slug] cannot start with " \
- "a non-alphanumeric character except for periods or " \
- "underscores, can contain only alphanumeric characters, " \
- "periods, and underscores, cannot end with a period or " \
- "forward slash, and has no leading or trailing forward " \
- "slashes. It can only contain alphanumeric characters, " \
- "periods, underscores, and dashes. For example, " \
- "'destination_namespace' not 'destination/namespace'")
- end
-
- it 'returns invalid error when restricting special characters is enabled' do
- Feature.enable(:restrict_special_characters_in_namespace_path)
-
+ it 'returns invalid error' do
params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg'
request
@@ -347,7 +346,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
context 'when request exceeds rate limits' do
it 'prevents user from starting a new migration' do
@@ -371,7 +370,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id)
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
end
describe 'GET /bulk_imports/:id' do
@@ -384,7 +383,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response['id']).to eq(import_1.id)
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
end
describe 'GET /bulk_imports/:id/entities' do
@@ -398,7 +397,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
end
describe 'GET /bulk_imports/:id/entities/:entity_id' do
@@ -411,7 +410,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response['id']).to eq(entity_2.id)
end
- include_examples 'disabled feature'
+ it_behaves_like 'disabled feature'
end
context 'when user is unauthenticated' do
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 19ac673308b..41e35de189e 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -556,7 +556,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
before do
allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy|
- threshold = Gitlab::ApplicationRateLimiter.rate_limits[:jobs_index][:threshold]
+ threshold = Gitlab::ApplicationRateLimiter.rate_limits[:jobs_index][:threshold].call
allow(strategy).to receive(:increment).and_return(threshold + 1)
end
diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb
index d760e4ddf28..fb67d7cb4fb 100644
--- a/spec/requests/api/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/ci/pipeline_schedules_spec.rb
@@ -176,7 +176,7 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
end
context 'with public project' do
- let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: true) }
it_behaves_like 'request with schedule ownership'
it_behaves_like 'request with project permissions'
@@ -204,6 +204,30 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
expect(json_response).not_to have_key('variables')
end
end
+
+ context 'when public pipelines are disabled' do
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
+
+ context 'authenticated user with no project permissions' do
+ it 'does not return pipeline_schedule' do
+ get api("/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'authenticated user with insufficient project permissions' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns pipeline_schedule with no variables' do
+ get api("/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
@@ -294,7 +318,7 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
end
context 'with public project' do
- let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: true) }
it_behaves_like 'request with schedule ownership'
it_behaves_like 'request with project permissions'
@@ -308,6 +332,18 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
expect(response).to return_pipeline_schedule_pipelines_successfully
end
end
+
+ context 'when public pipelines are disabled' do
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
+
+ context 'authenticated user with no project permissions' do
+ it 'does not return the details of pipelines triggered from the pipeline schedule' do
+ get api(url, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 90595c2d7f9..c3a7dbdcdbb 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -574,7 +574,7 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
end
it_behaves_like 'internal event tracking' do
- let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_WEB_IDE }
+ let(:event) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_WEB_IDE }
let(:namespace) { project.namespace.reload }
end
@@ -823,7 +823,7 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
valid_c_params[:start_project] = private_project.id
end
- it 'returns a 402' do
+ it 'returns a 404' do
post api(url, fork_owner), params: valid_c_params
expect(response).to have_gitlab_http_status(:not_found)
@@ -1696,6 +1696,16 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
it_behaves_like 'ref diff'
end
+
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api(route, current_user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_limited_pagination_headers
+ expect(json_response.dig(0, 'diff')).to eq(commit.diffs.diffs.first.unidiff)
+ end
+ end
end
end
@@ -2422,8 +2432,8 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['signature_type']).to eq('PGP')
- expect(json_response['commit_source']).to eq('rugged')
+ expect(json_response['signature_type']).to eq('X509')
+ expect(json_response['commit_source']).to eq('gitaly')
end
end
end
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index 3652bee5e44..d4be97a8ae2 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token_for_project, project: project) }
let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) }
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
let(:snowplow_gitlab_standard_context) do
{ project: project, namespace: project.namespace, user: user, property: 'i_package_composer_user' }
@@ -28,7 +29,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
subject { get api(url), headers: headers }
context 'with valid project' do
- let!(:package) { create(:composer_package, :with_metadatum, project: project) }
+ let_it_be(:package) { create(:composer_package, :with_metadatum, project: project) }
context 'with a public group' do
before do
@@ -36,59 +37,62 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
end
context 'with basic auth' do
- where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
- 'PUBLIC' | :developer | true | true | :include_package
- 'PUBLIC' | :developer | false | true | :include_package
- 'PUBLIC' | :guest | true | true | :include_package
- 'PUBLIC' | :guest | false | true | :include_package
- 'PUBLIC' | :anonymous | false | true | :include_package
- 'PRIVATE' | :developer | true | true | :include_package
- 'PRIVATE' | :developer | false | true | :does_not_include_package
- 'PRIVATE' | :guest | true | true | :does_not_include_package
- 'PRIVATE' | :guest | false | true | :does_not_include_package
- 'PRIVATE' | :anonymous | false | true | :does_not_include_package
- 'PRIVATE' | :guest | false | false | :does_not_include_package
- 'PRIVATE' | :guest | true | false | :does_not_include_package
- 'PRIVATE' | :developer | false | false | :does_not_include_package
- 'PRIVATE' | :developer | true | false | :does_not_include_package
- 'PUBLIC' | :developer | true | false | :include_package
- 'PUBLIC' | :guest | true | false | :include_package
- 'PUBLIC' | :developer | false | false | :include_package
- 'PUBLIC' | :guest | false | false | :include_package
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :package_returned) do
+ 'PUBLIC' | :developer | :user | true | true
+ 'PUBLIC' | :developer | :user | false | true # Anonymous User - fallback
+ 'PUBLIC' | :developer | :job | true | true
+ 'PUBLIC' | :guest | :user | true | true
+ 'PUBLIC' | :guest | :user | false | true # Anonymous User - fallback
+ 'PUBLIC' | :guest | :job | true | true
+ 'PUBLIC' | nil | :user | true | true
+ 'PUBLIC' | nil | :user | false | true # Anonymous User - fallback
+ 'PUBLIC' | nil | :job | true | true
+ 'PUBLIC' | nil | nil | nil | true # Anonymous User
+ 'PRIVATE' | :developer | :user | true | true
+ 'PRIVATE' | :developer | :user | false | false # Anonymous User - fallback
+ 'PRIVATE' | :developer | :job | true | true
+ 'PRIVATE' | :guest | :user | true | false
+ 'PRIVATE' | :guest | :user | false | false # Anonymous User - fallback
+ 'PRIVATE' | :guest | :job | true | false
+ 'PRIVATE' | nil | :user | true | false
+ 'PRIVATE' | nil | :user | false | false # Anonymous User - fallback
+ 'PRIVATE' | nil | :job | true | false
+ 'PRIVATE' | nil | nil | nil | false # Anonymous User
end
with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
- it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
+ include_context 'Composer api project access', auth_method: :basic, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like 'Composer package index', member_role: params[:member_role], expected_status: :success, package_returned: params[:package_returned]
end
end
end
- context 'with private token header auth' do
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :include_package) do
- 'PUBLIC' | :developer | true | true | :success | :include_package
- 'PUBLIC' | :developer | false | true | :success | :include_package
- 'PUBLIC' | :guest | true | true | :success | :include_package
- 'PUBLIC' | :guest | false | true | :success | :include_package
- 'PUBLIC' | :anonymous | false | true | :success | :include_package
- 'PRIVATE' | :developer | true | true | :success | :include_package
- 'PRIVATE' | :developer | false | true | :success | :does_not_include_package
- 'PRIVATE' | :guest | true | true | :success | :does_not_include_package
- 'PRIVATE' | :guest | false | true | :success | :does_not_include_package
- 'PRIVATE' | :anonymous | false | true | :success | :does_not_include_package
- 'PRIVATE' | :guest | false | false | :unauthorized | nil
- 'PRIVATE' | :guest | true | false | :unauthorized | nil
- 'PRIVATE' | :developer | false | false | :unauthorized | nil
- 'PRIVATE' | :developer | true | false | :unauthorized | nil
- 'PUBLIC' | :developer | true | false | :unauthorized | nil
- 'PUBLIC' | :guest | true | false | :unauthorized | nil
- 'PUBLIC' | :developer | false | false | :unauthorized | nil
- 'PUBLIC' | :guest | false | false | :unauthorized | nil
+ context 'with token auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :expected_status, :package_returned) do
+ :PUBLIC | :developer | :user | true | :success | true
+ :PUBLIC | :developer | :user | false | :unauthorized | false
+ :PUBLIC | :developer | :job | true | :success | true # Anonymous User - fallback
+ :PUBLIC | :guest | :user | true | :success | true
+ :PUBLIC | :guest | :user | false | :unauthorized | false
+ :PUBLIC | :guest | :job | true | :success | true # Anonymous User - fallback
+ :PUBLIC | nil | :user | true | :success | true
+ :PUBLIC | nil | :user | false | :unauthorized | false
+ :PUBLIC | nil | :job | true | :success | true # Anonymous User - fallback
+ :PUBLIC | nil | nil | nil | :success | true # Anonymous User
+ :PRIVATE | :developer | :user | true | :success | true
+ :PRIVATE | :developer | :user | false | :unauthorized | false
+ :PRIVATE | :developer | :job | true | :success | false # Anonymous User - fallback
+ :PRIVATE | :guest | :user | true | :success | false
+ :PRIVATE | :guest | :user | false | :unauthorized | false
+ :PRIVATE | :guest | :job | true | :success | false # Anonymous User - fallback
+ :PRIVATE | nil | :user | true | :success | false
+ :PRIVATE | nil | :user | false | :unauthorized | false
+ :PRIVATE | nil | nil | nil | :success | false # Anonymous User
end
with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token], :token do
- it_behaves_like 'Composer package index', params[:user_role], params[:expected_status], params[:member], params[:include_package]
+ include_context 'Composer api project access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like 'Composer package index', member_role: params[:member_role], expected_status: params[:expected_status], package_returned: params[:package_returned]
end
end
end
@@ -101,33 +105,44 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
it_behaves_like 'Composer access with deploy tokens'
- context 'with access to the api' do
- where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
- 'PRIVATE' | :developer | true | true | :include_package
- 'PRIVATE' | :guest | true | true | :does_not_include_package
+ context 'with basic auth' do
+ where(:member_role, :token_type, :valid_token, :shared_examples_name, :expected_status, :package_returned) do
+ :developer | :user | true | 'Composer package index' | :success | true
+ :developer | :user | false | 'process Composer api request' | :unauthorized | false
+ :developer | :job | true | 'Composer package index' | :success | true
+ :guest | :user | true | 'Composer package index' | :success | false
+ :guest | :user | false | 'process Composer api request' | :unauthorized | false
+ :guest | :job | true | 'Composer package index' | :success | false
+ nil | :user | true | 'Composer package index' | :not_found | false
+ nil | :user | false | 'process Composer api request' | :unauthorized | false
+ nil | :job | true | 'Composer package index' | :not_found | false
+ nil | nil | nil | 'process Composer api request' | :unauthorized | false # Anonymous User
end
with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
+ include_context 'Composer api project access', auth_method: :basic, project_visibility_level: :PRIVATE, token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status], package_returned: params[:package_returned]
end
end
end
- context 'without access to the api' do
- where(:project_visibility_level, :user_role, :member, :user_token) do
- 'PRIVATE' | :developer | true | false
- 'PRIVATE' | :developer | false | true
- 'PRIVATE' | :developer | false | false
- 'PRIVATE' | :guest | true | false
- 'PRIVATE' | :guest | false | true
- 'PRIVATE' | :guest | false | false
- 'PRIVATE' | :anonymous | false | true
+ context 'with token auth' do
+ where(:member_role, :token_type, :valid_token, :shared_examples_name, :expected_status, :package_returned) do
+ :developer | :user | true | 'Composer package index' | :success | true
+ :developer | :user | false | 'process Composer api request' | :unauthorized | false
+ :developer | :job | true | 'process Composer api request' | :unauthorized | false
+ :guest | :user | true | 'Composer package index' | :success | false
+ :guest | :user | false | 'process Composer api request' | :unauthorized | false
+ :guest | :job | true | 'process Composer api request' | :unauthorized | false
+ nil | :user | true | 'Composer package index' | :not_found | false
+ nil | :user | false | 'Composer package index' | :unauthorized | false
+ nil | :job | true | 'process Composer api request' | :unauthorized | false
+ nil | nil | nil | 'process Composer api request' | :unauthorized | false # Anonymous User
end
with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like 'process Composer api request', params[:user_role], :not_found, params[:member]
+ include_context 'Composer api project access', auth_method: :token, project_visibility_level: :PRIVATE, token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status], package_returned: params[:package_returned]
end
end
end
@@ -145,30 +160,65 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
subject { get api(url), headers: headers }
context 'with valid project' do
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'Composer provider index' | :success
- 'PRIVATE' | :developer | true | true | 'Composer provider index' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | true | 'Composer empty provider index' | :success
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ context 'with basic auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | :user | false | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | :developer | :job | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | :user | false | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :job | true | 'Composer provider index' | :success
+ 'PUBLIC' | nil | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | nil | :user | false | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :job | true | 'Composer provider index' | :success
+ 'PUBLIC' | nil | nil | nil | 'Composer provider index' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'Composer provider index' | :success
+ 'PRIVATE' | :guest | :user | true | 'Composer empty provider index' | :success
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'Composer empty provider index' | :success
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :basic, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
+ end
end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ context 'with token auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | :job | true | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | :job | true | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :user | true | 'Composer provider index' | :success
+ 'PUBLIC' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | nil | :job | true | 'Composer provider index' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | nil | nil | 'Composer provider index' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :user | true | 'Composer empty provider index' | :success
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
end
end
@@ -186,7 +236,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
subject { get api(url), headers: headers }
context 'with no packages' do
- include_context 'Composer user type', :developer, true do
+ include_context 'Composer user type', member_role: :developer do
it_behaves_like 'returning response status', :not_found
end
end
@@ -194,40 +244,73 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'with valid project' do
let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ context 'with basic auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :developer | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | nil | nil | 'Composer package api request' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'Composer package api request' | :success
+ 'PRIVATE' | :guest | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :basic, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
+ end
end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ context 'with token auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | nil | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | nil | nil | 'Composer package api request' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
end
end
context 'without a sha' do
let(:sha) { '' }
- include_context 'Composer api group access', 'PRIVATE', :developer, true do
- include_context 'Composer user type', :developer, true do
- it_behaves_like 'process Composer api request', :developer, :not_found, true
- end
+ include_context 'Composer api group access', project_visibility_level: 'PRIVATE', token_type: :user, auth_method: :token do
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :not_found
end
end
@@ -244,7 +327,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
subject { get api(url), headers: headers }
context 'with no packages' do
- include_context 'Composer user type', :developer, true do
+ include_context 'Composer user type', member_role: :developer do
it_behaves_like 'returning response status', :not_found
end
end
@@ -252,30 +335,65 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'with valid project' do
let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ context 'with basic auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :developer | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | false | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :job | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | nil | nil | 'Composer package api request' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'Composer package api request' | :success
+ 'PRIVATE' | :guest | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :basic, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
+ end
end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ context 'with token auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :user | true | 'Composer package api request' | :success
+ 'PUBLIC' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | nil | :job | true | 'Composer package api request' | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | nil | nil | 'Composer package api request' | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
+
+ with_them do
+ include_context 'Composer api group access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
+ end
end
end
@@ -296,42 +414,41 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
subject { post api(url), headers: headers, params: params }
shared_examples 'composer package publish' do
- context 'with valid project' do
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package creation' | :created
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'Composer package creation' | :created
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- end
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | 'Composer package creation' | :created
+ 'PUBLIC' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | :job | true | 'Composer package creation' | :created
+ 'PUBLIC' | :guest | :user | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | :job | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | nil | :user | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | nil | :job | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ 'PRIVATE' | :developer | :user | true | 'Composer package creation' | :created
+ 'PRIVATE' | :developer | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | :job | true | 'Composer package creation' | :created
+ 'PRIVATE' | :guest | :user | true | 'process Composer api request' | :forbidden
+ 'PRIVATE' | :guest | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | :job | true | 'process Composer api request' | :forbidden
+ 'PRIVATE' | nil | :user | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | :user | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | nil | :job | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | nil | nil | nil | 'process Composer api request' | :unauthorized # Anonymous User
+ end
- with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api project access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like params[:shared_examples_name], member_role: params[:member_role], expected_status: params[:expected_status]
end
-
- it_behaves_like 'Composer publish with deploy tokens'
end
+ it_behaves_like 'Composer publish with deploy tokens'
it_behaves_like 'rejects Composer access with unknown project id'
end
context 'with existing package' do
- include_context 'Composer api project access', 'PRIVATE', :developer, true, true
+ include_context 'Composer api project access', auth_method: :token, project_visibility_level: 'PRIVATE', token_type: :user
let_it_be_with_reload(:existing_package) { create(:composer_package, name: package_name, version: '1.2.99', project: project) }
@@ -362,7 +479,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'with no tag or branch params' do
let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process Composer api request', :developer, :bad_request
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :bad_request
end
context 'with a tag' do
@@ -376,7 +493,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
let(:params) { { tag: 'non-existing-tag' } }
let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process Composer api request', :developer, :not_found
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :not_found
end
end
@@ -391,7 +508,7 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
let(:params) { { branch: 'non-existing-branch' } }
let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process Composer api request', :developer, :not_found
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :not_found
end
end
@@ -407,19 +524,19 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'with a missing composer.json file' do
let(:files) { { 'some_other_file' => '' } }
- it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :unprocessable_entity
end
context 'with an empty composer.json file' do
let(:files) { { 'composer.json' => '' } }
- it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :unprocessable_entity
end
context 'with a malformed composer.json file' do
let(:files) { { 'composer.json' => 'not_valid_JSON' } }
- it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :unprocessable_entity
end
end
end
@@ -446,10 +563,10 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'anonymous' do
let(:headers) { {} }
- it_behaves_like 'process Composer api request', :anonymous, :unauthorized
+ it_behaves_like 'process Composer api request', expected_status: :unauthorized
end
- it_behaves_like 'process Composer api request', :developer, :not_found
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :not_found
end
context 'when the package name does not match the sha' do
@@ -460,60 +577,116 @@ RSpec.describe API::ComposerPackages, feature_category: :package_registry do
context 'anonymous' do
let(:headers) { {} }
- it_behaves_like 'process Composer api request', :anonymous, :unauthorized
+ it_behaves_like 'process Composer api request', expected_status: :unauthorized
end
- it_behaves_like 'process Composer api request', :developer, :not_found
+ it_behaves_like 'process Composer api request', member_role: :developer, expected_status: :not_found
end
context 'with a match package name and sha' do
let(:branch) { project.repository.find_branch('master') }
let(:sha) { branch.target }
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status) do
- 'PUBLIC' | :developer | true | true | :success
- 'PUBLIC' | :developer | true | false | :success
- 'PUBLIC' | :developer | false | true | :success
- 'PUBLIC' | :developer | false | false | :success
- 'PUBLIC' | :guest | true | true | :success
- 'PUBLIC' | :guest | true | false | :success
- 'PUBLIC' | :guest | false | true | :success
- 'PUBLIC' | :guest | false | false | :success
- 'PUBLIC' | :anonymous | false | true | :success
- 'PRIVATE' | :developer | true | true | :success
- 'PRIVATE' | :developer | true | false | :unauthorized
- 'PRIVATE' | :developer | false | true | :not_found
- 'PRIVATE' | :developer | false | false | :unauthorized
- 'PRIVATE' | :guest | true | true | :forbidden
- 'PRIVATE' | :guest | true | false | :unauthorized
- 'PRIVATE' | :guest | false | true | :not_found
- 'PRIVATE' | :guest | false | false | :unauthorized
- 'PRIVATE' | :anonymous | false | true | :unauthorized
- end
+ context 'with basic auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | :success
+ 'PUBLIC' | :developer | :user | false | :success # Anonymous User - fallback
+ 'PUBLIC' | :developer | :job | true | :success
+ 'PUBLIC' | :guest | :user | true | :success
+ 'PUBLIC' | :guest | :user | false | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :job | true | :success
+ 'PUBLIC' | nil | :user | true | :success
+ 'PUBLIC' | nil | :user | false | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :job | true | :success
+ 'PUBLIC' | nil | nil | nil | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | :success
+ 'PRIVATE' | :developer | :user | false | :unauthorized
+ 'PRIVATE' | :developer | :job | true | :success
+ 'PRIVATE' | :guest | :user | true | :forbidden
+ 'PRIVATE' | :guest | :user | false | :unauthorized
+ 'PRIVATE' | :guest | :job | true | :forbidden
+ 'PRIVATE' | nil | :user | true | :not_found
+ 'PRIVATE' | nil | :user | false | :unauthorized
+ 'PRIVATE' | nil | :job | true | :not_found
+ 'PRIVATE' | nil | nil | nil | :unauthorized # Anonymous User
+ end
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ with_them do
+ include_context 'Composer api project access', auth_method: :basic, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like 'process Composer api request', member_role: params[:member_role], expected_status: params[:expected_status] do
+ if params[:expected_status] == :success
+ let(:snowplow_gitlab_standard_context) do
+ if valid_token && (member_role || project_visibility_level == 'PUBLIC')
+ { project: project, namespace: project.namespace, property: 'i_package_composer_user', user: user }
+ else
+ { project: project, namespace: project.namespace, property: 'i_package_composer_user' }
+ end
+ end
+
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
+ else
+ it_behaves_like 'not a package tracking event'
+ end
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ context 'with another project' do
+ include Ci::JobTokenScopeHelpers
+
+ let_it_be(:project_two) { create(:project, group: group) }
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project_two) }
+
+ before do
+ add_inbound_accessible_linkage(project_two, project)
+ end
+
+ it_behaves_like 'process Composer api request', member_role: params[:member_role], expected_status: params[:expected_status]
+ end
+ end
end
+ end
- it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
+ context 'with token auth' do
+ where(:project_visibility_level, :member_role, :token_type, :valid_token, :expected_status) do
+ 'PUBLIC' | :developer | :user | true | :success
+ 'PUBLIC' | :developer | :user | false | :unauthorized
+ 'PUBLIC' | :developer | :job | true | :success # Anonymous User - fallback
+ 'PUBLIC' | :guest | :user | true | :success
+ 'PUBLIC' | :guest | :user | false | :unauthorized
+ 'PUBLIC' | :guest | :job | true | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | :user | true | :success
+ 'PUBLIC' | nil | :user | false | :unauthorized
+ 'PUBLIC' | nil | :job | true | :success # Anonymous User - fallback
+ 'PUBLIC' | nil | nil | nil | :success # Anonymous User
+ 'PRIVATE' | :developer | :user | true | :success
+ 'PRIVATE' | :developer | :user | false | :unauthorized
+ 'PRIVATE' | :developer | :job | true | :unauthorized
+ 'PRIVATE' | :guest | :user | true | :forbidden
+ 'PRIVATE' | :guest | :user | false | :unauthorized
+ 'PRIVATE' | :guest | :job | true | :unauthorized
+ 'PRIVATE' | nil | :user | true | :not_found
+ 'PRIVATE' | nil | :user | false | :unauthorized
+ 'PRIVATE' | nil | :job | true | :unauthorized
+ 'PRIVATE' | nil | nil | nil | :unauthorized # Anonymous User
+ end
- include_context 'Composer user type', params[:user_role], params[:member] do
- if params[:expected_status] == :success
- let(:snowplow_gitlab_standard_context) do
- if user_role == :anonymous || (project_visibility_level == 'PUBLIC' && user_token == false)
- { project: project, namespace: project.namespace, property: 'i_package_composer_user' }
+ with_them do
+ include_context 'Composer api project access', auth_method: :token, project_visibility_level: params[:project_visibility_level], token_type: params[:token_type], valid_token: params[:valid_token] do
+ it_behaves_like 'process Composer api request', member_role: params[:member_role], expected_status: params[:expected_status] do
+ if params[:expected_status] == :success
+ let(:snowplow_gitlab_standard_context) do
+ # Job tokens sent over token auth means current_user is nil
+ if valid_token && token_type != :job && (member_role || project_visibility_level == 'PUBLIC')
+ { project: project, namespace: project.namespace, property: 'i_package_composer_user', user: user }
+ else
+ { project: project, namespace: project.namespace, property: 'i_package_composer_user' }
+ end
+ end
+
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
else
- { project: project, namespace: project.namespace, property: 'i_package_composer_user', user: user }
+ it_behaves_like 'not a package tracking event'
end
end
-
- it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
- else
- it_behaves_like 'not a package tracking event'
end
end
end
diff --git a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
index fa47cf4988a..32048ea1432 100644
--- a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
+++ b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
@@ -39,7 +39,9 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
HEREDOC
end
- let_it_be(:query) do
+ let(:current_user) { user }
+
+ let(:query) do
graphql_query_for('namespace', { full_path: group.full_path }, fields)
end
@@ -48,7 +50,7 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
end
before do
- post_graphql(query, current_user: user)
+ post_graphql(query, current_user: current_user)
end
it_behaves_like 'a working graphql query'
@@ -65,6 +67,27 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
expect(graphql_data_at(:namespace, :achievements, :nodes, :userAchievements, :count)).to contain_exactly(1)
end
+ context 'when user_achievement has priority set' do
+ let_it_be(:achievement_with_priority) do
+ create(:user_achievement, achievement: achievement, user: user, priority: 0)
+ end
+
+ let(:userquery_fields) do
+ "userAchievements { nodes { id } }"
+ end
+
+ let(:query) do
+ graphql_query_for('user', { username: user.username }, userquery_fields)
+ end
+
+ it 'returns achievements in correct order' do
+ expect(graphql_data_at(:user, :userAchievements, :nodes).pluck('id')).to eq([
+ achievement_with_priority.to_global_id.to_s,
+ non_revoked_achievement1.to_global_id.to_s
+ ])
+ end
+ end
+
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user)
diff --git a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
index 0437a30eccd..db9b6bfbf5c 100644
--- a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
+++ b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
@@ -45,11 +45,16 @@ RSpec.describe 'Getting Ci Cd Setting', feature_category: :continuous_integratio
it 'fetches the settings data' do
expect(settings_data['mergePipelinesEnabled']).to eql project.ci_cd_settings.merge_pipelines_enabled?
- expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled?
expect(settings_data['keepLatestArtifact']).to eql project.keep_latest_artifacts_available?
expect(settings_data['jobTokenScopeEnabled']).to eql project.ci_cd_settings.job_token_scope_enabled?
expect(settings_data['inboundJobTokenScopeEnabled']).to eql(
project.ci_cd_settings.inbound_job_token_scope_enabled?)
+
+ if Gitlab.ee?
+ expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled?
+ else
+ expect(settings_data['mergeTrainsEnabled']).to be_nil
+ end
end
end
end
diff --git a/spec/requests/api/graphql/ci/config_spec.rb b/spec/requests/api/graphql/ci/config_spec.rb
index 5f43a0806f3..e7e32ac5531 100644
--- a/spec/requests/api/graphql/ci/config_spec.rb
+++ b/spec/requests/api/graphql/ci/config_spec.rb
@@ -455,4 +455,109 @@ RSpec.describe 'Query.ciConfig', feature_category: :continuous_integration do
end
# rubocop:enable Layout/LineLength
end
+
+ describe 'skip_verify_project_sha' do
+ let(:user) { project.owner }
+ let(:sha) { project.commit.sha }
+ let(:skip_verify_project_sha) { nil }
+ let(:content) { YAML.dump(build: { script: 'echo' }) }
+ let(:required_args) { { projectPath: project.full_path, content: content } }
+ let(:optional_args) { { sha: sha, skip_verify_project_sha: skip_verify_project_sha }.compact }
+
+ let(:query) do
+ graphql_query_for(
+ 'ciConfig',
+ required_args.merge(optional_args),
+ %w[errors mergedYaml]
+ )
+ end
+
+ before do
+ post_graphql_query
+ end
+
+ shared_examples 'content is valid' do
+ it 'returns the expected data without validation errors' do
+ expect(graphql_data_at(:ciConfig)).to eq(
+ 'errors' => [],
+ 'mergedYaml' => "---\nbuild:\n script: echo\n"
+ )
+ end
+ end
+
+ shared_examples 'returning error' do
+ it 'returns an error' do
+ expect(graphql_data_at(:ciConfig, :errors)).to include(
+ /configuration originates from an external project or a commit not associated with a Git reference/)
+ end
+ end
+
+ shared_examples 'when the sha exists in the main project' do
+ context 'when skip_verify_project_sha is not provided' do
+ let(:skip_verify_project_sha) { nil }
+
+ it_behaves_like 'content is valid'
+ end
+
+ context 'when skip_verify_project_sha is false' do
+ let(:skip_verify_project_sha) { false }
+
+ it_behaves_like 'content is valid'
+ end
+
+ context 'when skip_verify_project_sha is true' do
+ let(:skip_verify_project_sha) { true }
+
+ it_behaves_like 'content is valid'
+ end
+ end
+
+ context 'when the sha is from the main project' do
+ it_behaves_like 'when the sha exists in the main project'
+ end
+
+ context 'when the sha is from a fork project' do
+ include_context 'when a project repository contains a forked commit'
+
+ let(:sha) { forked_commit_sha }
+
+ context 'when the sha is associated with a main project ref' do
+ before_all do
+ repository.add_branch(project.owner, 'branch1', forked_commit_sha)
+ end
+
+ after(:all) do
+ repository.rm_branch(project.owner, 'branch1')
+ end
+
+ it_behaves_like 'when the sha exists in the main project'
+ end
+
+ context 'when the sha is not associated with a main project ref' do
+ context 'when skip_verify_project_sha is not provided' do
+ let(:skip_verify_project_sha) { nil }
+
+ it_behaves_like 'returning error'
+ end
+
+ context 'when skip_verify_project_sha is false' do
+ let(:skip_verify_project_sha) { false }
+
+ it_behaves_like 'returning error'
+ end
+
+ context 'when skip_verify_project_sha is true' do
+ let(:skip_verify_project_sha) { true }
+
+ it_behaves_like 'content is valid'
+ end
+ end
+ end
+
+ context 'when the sha is invalid' do
+ let(:sha) { 'invalid-sha' }
+
+ it_behaves_like 'when the sha exists in the main project'
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/job_spec.rb b/spec/requests/api/graphql/ci/job_spec.rb
index 960697db239..c402873833a 100644
--- a/spec/requests/api/graphql/ci/job_spec.rb
+++ b/spec/requests/api/graphql/ci/job_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe 'Query.project(fullPath).pipelines.job(id)', feature_category: :c
post_graphql(query, current_user: user)
expect(graphql_data_at(*path)).to match a_hash_including(
- 'text' => 'pending',
+ 'text' => 'Pending',
'label' => 'pending',
'action' => a_hash_including('buttonTitle' => 'Cancel this job', 'icon' => 'cancel')
)
diff --git a/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb b/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
index 76adce6ff1b..8219cdcd673 100644
--- a/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
@@ -115,6 +115,31 @@ RSpec.describe 'Query.project.pipelineSchedules', feature_category: :continuous_
expect(edit_path).to be nil
end
+
+ it 'returns the pipeline schedules data' do
+ expect(pipeline_schedule_graphql_data['id']).to eq(pipeline_schedule.to_global_id.to_s)
+ end
+
+ context 'when public pipelines are disabled' do
+ before do
+ project.update!(public_builds: false)
+ post_graphql(query, current_user: another_user)
+ end
+
+ it 'does not return any data' do
+ expect(pipeline_schedule_graphql_data).to be_nil
+ end
+
+ context 'when the user is authorized' do
+ before_all do
+ project.add_developer(another_user)
+ end
+
+ it 'returns the pipeline schedules data' do
+ expect(pipeline_schedule_graphql_data['id']).to eq(pipeline_schedule.to_global_id.to_s)
+ end
+ end
+ end
end
it 'avoids N+1 queries' do
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index ad21006f99a..d55a70f503c 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -239,6 +239,7 @@ RSpec.describe 'GitlabSchema configurations', feature_category: :integrations do
graphql_name 'BarEnum'
value 'FOOBAR', value: 'foobar', deprecated: { milestone: '0.1', reason: :renamed }
+ value 'FOOBARNEW', value: 'foobarnew'
end)
field :baz, GraphQL::Types::Boolean do
diff --git a/spec/requests/api/graphql/merge_request/merge_request_spec.rb b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
index 02ea7bac920..83b25a7c692 100644
--- a/spec/requests/api/graphql/merge_request/merge_request_spec.rb
+++ b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
@@ -17,37 +17,27 @@ RSpec.describe 'Query.merge_request(id)', feature_category: :code_review_workflo
graphql_query_for('mergeRequest', merge_request_params, merge_request_fields)
end
- it_behaves_like 'a working graphql query' do
- before do
- post_graphql(query, current_user: current_user)
+ context 'when the user does not have access to the merge request' do
+ it_behaves_like 'a working graphql query that returns no data' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
end
end
- it_behaves_like 'a noteable graphql type we can query' do
- let(:noteable) { merge_request }
- let(:project) { merge_request.project }
- let(:path_to_noteable) { [:merge_request] }
-
+ context 'when the user does have access' do
before do
project.add_reporter(current_user)
end
- def query(fields)
- graphql_query_for('mergeRequest', merge_request_params, fields)
- end
- end
-
- context 'when the user does not have access to the merge request' do
- it 'returns nil' do
- post_graphql(query)
-
- expect(merge_request_data).to be nil
- end
- end
+ it_behaves_like 'a noteable graphql type we can query' do
+ let(:noteable) { merge_request }
+ let(:project) { merge_request.project }
+ let(:path_to_noteable) { [:merge_request] }
- context 'when the user does have access' do
- before do
- project.add_reporter(current_user)
+ def query(fields)
+ graphql_query_for('mergeRequest', merge_request_params, fields)
+ end
end
it 'returns the merge request' do
@@ -65,7 +55,7 @@ RSpec.describe 'Query.merge_request(id)', feature_category: :code_review_workflo
end
with_them do
- it_behaves_like 'a working graphql query' do
+ it_behaves_like 'a working graphql query that returns data' do
let(:merge_request_fields) do
field
end
diff --git a/spec/requests/api/graphql/mutations/achievements/update_user_achievement_priorities_spec.rb b/spec/requests/api/graphql/mutations/achievements/update_user_achievement_priorities_spec.rb
new file mode 100644
index 00000000000..815fda50d77
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/achievements/update_user_achievement_priorities_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Achievements::UpdateUserAchievementPriorities, feature_category: :user_profile do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+
+ let_it_be(:user_achievement1) do
+ create(:user_achievement, achievement: achievement, user: user, priority: 0)
+ end
+
+ let_it_be(:user_achievement2) { create(:user_achievement, achievement: achievement, user: user) }
+ let_it_be(:user_achievement3) { create(:user_achievement, achievement: achievement, user: user) }
+
+ let(:mutation) { graphql_mutation(:user_achievement_priorities_update, params) }
+ let(:user_achievement_ids) { [user_achievement3, user_achievement1].map(&:to_global_id) }
+ let(:params) { { user_achievement_ids: user_achievement_ids } }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:user_achievement_priorities_update)
+ end
+
+ context 'when the user is not the user achievement owner' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["The resource that you are attempting to access does not exist " \
+ "or you don't have permission to perform this action"]
+
+ it 'does not update any achievements', :aggregate_failures do
+ subject
+
+ expect(user_achievement1.reload.priority).to be_zero
+ expect(user_achievement2.reload.priority).to be_nil
+ expect(user_achievement3.reload.priority).to be_nil
+ end
+ end
+
+ context 'when the user is the user achievement owner' do
+ let(:current_user) { user }
+
+ context 'when the params are invalid' do
+ let(:user_achievement_ids) { nil }
+
+ it 'returns the validation error' do
+ subject
+
+ expect(graphql_errors.to_s).to include('invalid value for userAchievementIds (Expected value to not be null)')
+ end
+ end
+
+ context 'when a user_achievement_id is invalid' do
+ let(:user_achievement_ids) { ["gid://gitlab/Achievements::UserAchievement/#{non_existing_record_id}"] }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["The resource that you are attempting to access does not exist " \
+ "or you don't have permission to perform this action"]
+ end
+
+ context 'when updating priorities' do
+ it 'updates only the given user achievements', :aggregate_failures do
+ subject
+
+ expect(graphql_data_at(:user_achievement_priorities_update, :user_achievements))
+ .to contain_exactly(a_graphql_entity_for(user_achievement3), a_graphql_entity_for(user_achievement1))
+
+ expect(user_achievement3.reload.priority).to eq(0)
+ expect(user_achievement1.reload.priority).to eq(1)
+ expect(user_achievement2.reload.priority).to be_nil
+ end
+ end
+
+ context 'when no achievement ids are given' do
+ let(:user_achievement_ids) { [] }
+
+ it 'removes all priorities', :aggregate_failures do
+ subject
+
+ expect(graphql_data_at(:user_achievement_priorities_update, :user_achievements))
+ .to contain_exactly(a_graphql_entity_for(user_achievement1)) # user_achievement1 was prioritized before
+
+ [user_achievement1, user_achievement2, user_achievement3].each do |ua|
+ expect(ua.reload.priority).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job/retry_spec.rb b/spec/requests/api/graphql/mutations/ci/job/retry_spec.rb
index 4114c77491b..82988854719 100644
--- a/spec/requests/api/graphql/mutations/ci/job/retry_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job/retry_spec.rb
@@ -37,14 +37,30 @@ RSpec.describe 'JobRetry', feature_category: :continuous_integration do
expect(graphql_errors).not_to be_empty
end
- it 'retries a job' do
- post_graphql_mutation(mutation, current_user: user)
+ context 'when the job is a Ci::Build' do
+ it 'retries the build' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ new_job_id = GitlabSchema.object_from_id(mutation_response['job']['id']).sync.id
+
+ new_job = ::Ci::Build.find(new_job_id)
+ expect(new_job).not_to be_retried
+ end
+ end
+
+ context 'when the job is a Ci::Bridge' do
+ let(:job) { create(:ci_bridge, :success, pipeline: pipeline, name: 'puente') }
- expect(response).to have_gitlab_http_status(:success)
- new_job_id = GitlabSchema.object_from_id(mutation_response['job']['id']).sync.id
+ it 'retries the bridge' do
+ post_graphql_mutation(mutation, current_user: user)
- new_job = ::Ci::Build.find(new_job_id)
- expect(new_job).not_to be_retried
+ expect(response).to have_gitlab_http_status(:success)
+ new_job_id = GitlabSchema.object_from_id(mutation_response['job']['id']).sync.id
+
+ new_job = ::Ci::Bridge.find(new_job_id)
+ expect(new_job).not_to be_retried
+ end
end
context 'when given CI variables' do
diff --git a/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb b/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb
index ef159e41d3d..8d980a9e8ea 100644
--- a/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb
@@ -36,8 +36,6 @@ RSpec.describe 'Destroying a container repository', feature_category: :container
it 'marks the container repository as delete_scheduled' do
expect(::Packages::CreateEventService)
.to receive(:new).with(nil, user, event_name: :delete_repository, scope: :container).and_call_original
- expect(DeleteContainerRepositoryWorker)
- .not_to receive(:perform_async)
subject
@@ -50,9 +48,6 @@ RSpec.describe 'Destroying a container repository', feature_category: :container
shared_examples 'denying the mutation request' do
it 'does not destroy the container repository' do
- expect(DeleteContainerRepositoryWorker)
- .not_to receive(:perform_async).with(user.id, container_repository.id)
-
subject
expect(mutation_response).to be_nil
diff --git a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
index 9fce5f8497f..497ae1cc13f 100644
--- a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
@@ -61,6 +61,17 @@ RSpec.describe 'Setting issues crm contacts', feature_category: :service_desk do
expect(graphql_data_at(:issue_set_crm_contacts, :issue, :customer_relations_contacts, :nodes))
.to match_array(expected_contacts(mutation_contacts))
end
+
+ context 'with an empty list of contacts' do
+ let(:mutation_contacts) { [] }
+
+ it 'removes all contacts' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_data_at(:issue_set_crm_contacts, :issue, :customer_relations_contacts, :nodes))
+ .to be_empty
+ end
+ end
end
context 'append' do
diff --git a/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
index 19a7c72ba80..9f6f8dff051 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/accept_spec.rb
@@ -33,12 +33,19 @@ RSpec.describe 'accepting a merge request', :request_store do
project.add_maintainer(current_user)
end
- it 'merges the merge request' do
+ it 'merges the merge request asynchronously' do
+ expect_next_found_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:merge_async).with(current_user.id, {
+ 'sha' => merge_request.diff_head_sha,
+ 'squash' => false
+ }).and_call_original
+ end
+
post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['mergeRequest']).to include(
- 'state' => 'merged'
+ 'state' => merge_request.state
)
end
end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index d41628704a1..4a7d1083f2e 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled, featur
context 'when passing append as true' do
let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
- let(:db_query_limit) { 22 }
+ let(:db_query_limit) { 23 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees
diff --git a/spec/requests/api/graphql/mutations/packages/protection/rule/create_spec.rb b/spec/requests/api/graphql/mutations/packages/protection/rule/create_spec.rb
new file mode 100644
index 00000000000..b0c8526fa1c
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/protection/rule/create_spec.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Creating the packages protection rule', :aggregate_failures, feature_category: :package_registry do
+ include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, maintainer_projects: [project]) }
+
+ let(:package_protection_rule_attributes) { build_stubbed(:package_protection_rule, project: project) }
+
+ let(:kwargs) do
+ {
+ project_path: project.full_path,
+ package_name_pattern: package_protection_rule_attributes.package_name_pattern,
+ package_type: "NPM",
+ push_protected_up_to_access_level: "MAINTAINER"
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:create_packages_protection_rule, kwargs,
+ <<~QUERY
+ clientMutationId
+ errors
+ QUERY
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:create_packages_protection_rule) }
+
+ describe 'post graphql mutation' do
+ subject { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'without existing packages protection rule' do
+ it 'returns without error' do
+ subject
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it 'returns the created packages protection rule' do
+ expect { subject }.to change { ::Packages::Protection::Rule.count }.by(1)
+
+ expect_graphql_errors_to_be_empty
+ expect(Packages::Protection::Rule.where(project: project).count).to eq 1
+
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: kwargs[:package_name_pattern])).to exist
+ end
+
+ context 'when invalid fields are given' do
+ let(:kwargs) do
+ {
+ project_path: project.full_path,
+ package_name_pattern: '',
+ package_type: 'UNKNOWN_PACKAGE_TYPE',
+ push_protected_up_to_access_level: 'UNKNOWN_ACCESS_LEVEL'
+ }
+ end
+
+ it 'returns error about required argument' do
+ subject
+
+ expect_graphql_errors_to_include(/was provided invalid value for packageType/)
+ expect_graphql_errors_to_include(/pushProtectedUpToAccessLevel/)
+ end
+ end
+ end
+
+ context 'when user does not have permission' do
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
+ let_it_be(:anonymous) { create(:user) }
+
+ where(:user) do
+ [ref(:developer), ref(:reporter), ref(:guest), ref(:anonymous)]
+ end
+
+ with_them do
+ it 'returns an error' do
+ expect { subject }.not_to change { ::Packages::Protection::Rule.count }
+
+ expect_graphql_errors_to_include(/you don't have permission to perform this action/)
+ end
+ end
+ end
+
+ context 'with existing packages protection rule' do
+ let_it_be(:existing_package_protection_rule) do
+ create(:package_protection_rule, project: project, push_protected_up_to_access_level: Gitlab::Access::DEVELOPER)
+ end
+
+ context 'when package name pattern is slightly different' do
+ let(:kwargs) do
+ {
+ project_path: project.full_path,
+ # The field `package_name_pattern` is unique; this is why we change the value in a minimum way
+ package_name_pattern: "#{existing_package_protection_rule.package_name_pattern}-unique",
+ package_type: "NPM",
+ push_protected_up_to_access_level: "DEVELOPER"
+ }
+ end
+
+ it 'returns the created packages protection rule' do
+ expect { subject }.to change { ::Packages::Protection::Rule.count }.by(1)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq 2
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: kwargs[:package_name_pattern])).to exist
+ end
+
+ it 'returns without error' do
+ subject
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'when field `package_name_pattern` is taken' do
+ let(:kwargs) do
+ {
+ project_path: project.full_path,
+ package_name_pattern: existing_package_protection_rule.package_name_pattern,
+ package_type: 'NPM',
+ push_protected_up_to_access_level: 'MAINTAINER'
+ }
+ end
+
+ it 'returns without error' do
+ subject
+
+ expect(mutation_response).to include 'errors' => ['Package name pattern has already been taken']
+ end
+
+ it 'does not create new package protection rules' do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: kwargs[:package_name_pattern],
+ push_protected_up_to_access_level: Gitlab::Access::MAINTAINER)).not_to exist
+ end
+ end
+ end
+
+ context "when feature flag ':packages_protected_packages' disabled" do
+ before do
+ stub_feature_flags(packages_protected_packages: false)
+ end
+
+ it 'does not create any package protection rules' do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project)).not_to exist
+ end
+
+ it 'returns error of disabled feature flag' do
+ subject.tap { expect_graphql_errors_to_include(/'packages_protected_packages' feature flag is disabled/) }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 06594d89338..78df78cb2a0 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe 'Updating a Snippet', feature_category: :source_code_management d
end
it_behaves_like 'internal event tracking' do
- let(:action) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_SNIPPET_EDITOR }
+ let(:event) { ::Gitlab::UsageDataCounters::EditorUniqueCounter::EDIT_BY_SNIPPET_EDITOR }
let(:user) { current_user }
let(:namespace) { project.namespace }
end
diff --git a/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
index f30b7d0ea73..ea584d1b683 100644
--- a/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
@@ -5,15 +5,17 @@ require 'spec_helper'
RSpec.describe "Add linked items to a work item", feature_category: :portfolio_management do
include GraphqlHelpers
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
- let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :private, group: group) }
+ let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
+ let_it_be(:project_work_item) { create(:work_item, :issue, project: project) }
let_it_be(:related1) { create(:work_item, project: project) }
let_it_be(:related2) { create(:work_item, project: project) }
let(:mutation_response) { graphql_mutation_response(:work_item_add_linked_items) }
let(:mutation) { graphql_mutation(:workItemAddLinkedItems, input, fields) }
+ let(:work_item) { project_work_item }
let(:ids_to_link) { [related1.to_global_id.to_s, related2.to_global_id.to_s] }
let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => ids_to_link } }
@@ -70,6 +72,18 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
)
end
+ context 'when work item is created at the group level' do
+ let(:work_item) { create(:work_item, :group_level, namespace: group) }
+
+ it 'links the work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { WorkItems::RelatedWorkItemLink.count }.by(2)
+
+ expect(mutation_response['message']).to eq("Successfully linked ID(s): #{related1.id} and #{related2.id}.")
+ end
+ end
+
context 'when linking a work item fails' do
let_it_be(:private_project) { create(:project, :private) }
let_it_be(:related2) { create(:work_item, project: private_project) }
@@ -102,6 +116,20 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
end
end
+ context 'when type cannot be linked' do
+ let_it_be(:req) { create(:work_item, :requirement, project: project) }
+
+ let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => [req.to_global_id.to_s] } }
+
+ it 'returns an error message' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response["errors"]).to eq([
+ "#{req.to_reference} cannot be added: issues cannot be related to requirements"
+ ])
+ end
+ end
+
context 'when there are more than the max allowed items to link' do
let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
let(:ids_to_link) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index c7c68696888..cb6571c2c93 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -7,14 +7,13 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- let_it_be(:author) { create(:user).tap { |user| project.add_reporter(user) } }
- let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
- let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
- let_it_be(:guest) { create(:user).tap { |user| project.add_guest(user) } }
+ let_it_be(:author) { create(:user).tap { |user| group.add_reporter(user) } }
+ let_it_be(:developer) { create(:user).tap { |user| group.add_developer(user) } }
+ let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
+ let_it_be(:guest) { create(:user).tap { |user| group.add_guest(user) } }
let_it_be(:work_item, refind: true) { create(:work_item, project: project, author: author) }
- let(:work_item_event) { 'CLOSE' }
- let(:input) { { 'stateEvent' => work_item_event, 'title' => 'updated title' } }
+ let(:input) { { 'stateEvent' => 'CLOSE', 'title' => 'updated title' } }
let(:fields) do
<<~FIELDS
workItem {
@@ -25,7 +24,8 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
FIELDS
end
- let(:mutation) { graphql_mutation(:workItemUpdate, input.merge('id' => work_item.to_global_id.to_s), fields) }
+ let(:mutation_work_item) { work_item }
+ let(:mutation) { graphql_mutation(:workItemUpdate, input.merge('id' => mutation_work_item.to_gid.to_s), fields) }
let(:mutation_response) { graphql_mutation_response(:work_item_update) }
@@ -60,7 +60,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
context 'when the work item is closed' do
- let(:work_item_event) { 'REOPEN' }
+ let(:input) { { 'stateEvent' => 'REOPEN' } }
before do
work_item.close!
@@ -155,10 +155,10 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
it 'updates labels' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to change { work_item.labels.count }.to(expected_labels.count)
+ mutation_work_item.reload
+ end.to change { mutation_work_item.labels.count }.to(expected_labels.count)
- expect(work_item.labels).to match_array(expected_labels)
+ expect(mutation_work_item.labels).to match_array(expected_labels)
expect(mutation_response['workItem']['widgets']).to include(
'labels' => {
'nodes' => match_array(expected_labels.map { |l| { 'id' => l.to_gid.to_s } })
@@ -168,9 +168,9 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
end
- let_it_be(:existing_label) { create(:label, project: project) }
- let_it_be(:label1) { create(:label, project: project) }
- let_it_be(:label2) { create(:label, project: project) }
+ let_it_be(:existing_label) { create(:group_label, group: group) }
+ let_it_be(:label1) { create(:group_label, group: group) }
+ let_it_be(:label2) { create(:group_label, group: group) }
let(:fields) do
<<~FIELDS
@@ -197,9 +197,11 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
let(:add_label_ids) { [] }
let(:remove_label_ids) { [] }
+ let_it_be(:group_work_item) { create(:work_item, :task, :group_level, namespace: group) }
before_all do
work_item.update!(labels: [existing_label])
+ group_work_item.update!(labels: [existing_label])
end
context 'when only removing labels' do
@@ -213,6 +215,12 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
it_behaves_like 'mutation updating work item labels'
end
+
+ context 'when work item belongs directly to the group' do
+ let(:mutation_work_item) { group_work_item }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
end
context 'when only adding labels' do
@@ -228,6 +236,12 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
it_behaves_like 'mutation updating work item labels'
end
+
+ context 'when work item belongs directly to the group' do
+ let(:mutation_work_item) { group_work_item }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
end
context 'when adding and removing labels' do
@@ -245,6 +259,12 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
it_behaves_like 'mutation updating work item labels'
end
+
+ context 'when work item belongs directly to the group' do
+ let(:mutation_work_item) { group_work_item }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
end
context 'when the work item type does not support labels widget' do
@@ -1025,7 +1045,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
context 'when updating notifications subscription' do
- let_it_be(:current_user) { reporter }
+ let_it_be(:current_user) { guest }
let(:input) { { 'notificationsWidget' => { 'subscribed' => desired_state } } }
let(:fields) do
@@ -1059,7 +1079,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
update_work_item
subscription.reload
end.to change(subscription, :subscribed).to(desired_state)
- .and(change { work_item.reload.subscribed?(reporter, project) }.to(desired_state))
+ .and(change { work_item.reload.subscribed?(guest, project) }.to(desired_state))
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['workItem']['widgets']).to include(
@@ -1159,7 +1179,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
context 'when updating currentUserTodos' do
- let_it_be(:current_user) { reporter }
+ let_it_be(:current_user) { guest }
let(:fields) do
<<~FIELDS
@@ -1185,7 +1205,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
context 'when adding a new todo' do
let(:input) { { 'currentUserTodosWidget' => { 'action' => 'ADD' } } }
- context 'when user has access to the work item' do
+ context 'when user can create todos' do
it 'adds a new todo for the user on the work item' do
expect { update_work_item }.to change { current_user.todos.count }.by(1)
@@ -1203,6 +1223,17 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
}
)
end
+
+ context 'when a base attribute is present' do
+ before do
+ input.merge!('title' => 'new title')
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: [
+ 'The resource that you are attempting to access does not exist or you don\'t have permission to ' \
+ 'perform this action'
+ ]
+ end
end
context 'when user has no access' do
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
index 2abb1f62ea9..6cbc70022ed 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_errors_request_spec.rb
@@ -139,9 +139,7 @@ RSpec.describe 'sentry errors requests', feature_category: :error_tracking do
context 'when reactive cache returns data' do
before do
- stub_setting_for(:list_sentry_issues,
- issues: [sentry_error],
- pagination: pagination)
+ stub_setting_for(:list_sentry_issues, issues: [sentry_error], pagination: pagination)
post_graphql(query, current_user: current_user)
end
diff --git a/spec/requests/api/graphql/project/issue/design_collection/version_spec.rb b/spec/requests/api/graphql/project/issue/design_collection/version_spec.rb
index 5ccf5c1999a..391ca2332bc 100644
--- a/spec/requests/api/graphql/project/issue/design_collection/version_spec.rb
+++ b/spec/requests/api/graphql/project/issue/design_collection/version_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Query.project(fullPath).issue(iid).designCollection.version(sha)',
-feature_category: :design_management do
+ feature_category: :design_management do
include GraphqlHelpers
include DesignManagementTestHelpers
diff --git a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
index fb7e46cff8e..ca4da8d41d7 100644
--- a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
@@ -109,8 +109,10 @@ RSpec.describe 'Query.project.mergeRequests.pipelines', feature_category: :conti
end
def run_query(first = nil)
- run_with_clean_state(query,
- context: { current_user: author },
- variables: { path: project.full_path, first: first })
+ run_with_clean_state(
+ query,
+ context: { current_user: author },
+ variables: { path: project.full_path, first: first }
+ )
end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 9ca5df95d30..c274199e65b 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -321,9 +321,11 @@ RSpec.describe 'getting merge request information nested in a project', feature_
end
it 'does not error' do
- post_graphql(query,
- current_user: current_user,
- variables: { path: project.full_path })
+ post_graphql(
+ query,
+ current_user: current_user,
+ variables: { path: project.full_path }
+ )
expect(graphql_data_at(:project, :mrs, :nodes, :notes, :pageInfo)).to contain_exactly a_hash_including(
'endCursor' => String,
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index 05ed0ed8729..543de43bcf3 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -363,27 +363,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
include_examples 'N+1 query check'
end
- context 'when requesting participants' do
- let(:requested_fields) { 'participants { nodes { name } }' }
-
- before do
- create(:award_emoji, :upvote, awardable: merge_request_a)
- create(:award_emoji, :upvote, awardable: merge_request_b)
- create(:award_emoji, :upvote, awardable: merge_request_c)
-
- note_with_emoji_a = create(:note_on_merge_request, noteable: merge_request_a, project: project)
- note_with_emoji_b = create(:note_on_merge_request, noteable: merge_request_b, project: project)
- note_with_emoji_c = create(:note_on_merge_request, noteable: merge_request_c, project: project)
-
- create(:award_emoji, :upvote, awardable: note_with_emoji_a)
- create(:award_emoji, :upvote, awardable: note_with_emoji_b)
- create(:award_emoji, :upvote, awardable: note_with_emoji_c)
- end
-
- # Executes 3 extra queries to fetch participant_attrs
- include_examples 'N+1 query check', threshold: 3
- end
-
context 'when requesting labels' do
let(:requested_fields) { ['labels { nodes { id } }'] }
@@ -425,7 +404,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
<<~SELECT
assignees { nodes { username } }
reviewers { nodes { username } }
- participants { nodes { username } }
headPipeline { status }
timelogs { nodes { timeSpent } }
SELECT
@@ -445,9 +423,14 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
before_all do
project.add_developer(current_user)
- mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
- source_project: project,
- author: current_user)
+ mrs = create_list(
+ :merge_request,
+ 10,
+ :closed,
+ :with_head_pipeline,
+ source_project: project,
+ author: current_user
+ )
mrs.each do |mr|
mr.assignees << create(:user)
mr.assignees << current_user
@@ -487,7 +470,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
a_hash_including(
'assignees' => user_collection,
'reviewers' => user_collection,
- 'participants' => user_collection,
'headPipeline' => { 'status' => be_present },
'timelogs' => { 'nodes' => be_one }
)))
diff --git a/spec/requests/api/graphql/project/packages_protection_rules_spec.rb b/spec/requests/api/graphql/project/packages_protection_rules_spec.rb
new file mode 100644
index 00000000000..0159f8a13e6
--- /dev/null
+++ b/spec/requests/api/graphql/project/packages_protection_rules_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting the packages protection rules linked to a project', :aggregate_failures, feature_category: :package_registry do
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+
+ let(:query) do
+ graphql_query_for(
+ :project,
+ { full_path: project.full_path },
+ query_nodes(:packagesProtectionRules, of: 'PackagesProtectionRule')
+ )
+ end
+
+ subject { post_graphql(query, current_user: user) }
+
+ context 'with authorized user owner' do
+ before do
+ subject
+ end
+
+ context 'with package protection rule' do
+ let_it_be(:package_protection_rule) { create(:package_protection_rule, project: project) }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns only on PackagesProtectionRule' do
+ expect(graphql_data_at(:project, :packagesProtectionRules, :nodes).count).to eq 1
+ end
+
+ it 'returns all packages protection rule fields' do
+ expect(graphql_data_at(:project, :packagesProtectionRules, :nodes)).to include(
+ hash_including(
+ 'packageNamePattern' => package_protection_rule.package_name_pattern,
+ 'packageType' => 'NPM',
+ 'pushProtectedUpToAccessLevel' => 'DEVELOPER'
+ )
+ )
+ end
+ end
+
+ context 'without package protection rule' do
+ it_behaves_like 'a working graphql query'
+
+ it 'returns no PackagesProtectionRule' do
+ expect(graphql_data_at(:project, :packagesProtectionRules, :nodes)).to eq []
+ end
+ end
+ end
+
+ context 'with unauthorized user' do
+ let_it_be(:user) { create(:user).tap { |u| project.add_developer(u) } }
+
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns no package protection rules' do
+ expect(graphql_data_at(:project, :packagesProtectionRules, :nodes)).to eq []
+ end
+ end
+
+ context "when feature flag ':packages_protected_packages' disabled" do
+ let_it_be(:package_protection_rule) { create(:package_protection_rule, project: project) }
+
+ before do
+ stub_feature_flags(packages_protected_packages: false)
+
+ subject
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns no package protection rules' do
+ expect(graphql_data_at(:project, :packagesProtectionRules, :nodes)).to eq []
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index d20ee5bfdff..114072bf1f9 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -111,11 +111,14 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
context 'when a job has been retried' do
let_it_be(:retried) do
- create(:ci_build, :retried,
- name: build_job.name,
- pipeline: pipeline,
- stage_idx: 0,
- stage: build_job.stage_name)
+ create(
+ :ci_build,
+ :retried,
+ name: build_job.name,
+ pipeline: pipeline,
+ stage_idx: 0,
+ stage: build_job.stage_name
+ )
end
let(:fields) do
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index faeb3ddd693..f061d422c9a 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -106,9 +106,10 @@ RSpec.describe 'getting project members information', feature_category: :groups_
it 'returns an error for an invalid member relation' do
fetch_members(project: child_project, args: { relations: [:OBLIQUE] })
- expect(graphql_errors.first)
- .to include('path' => %w[query project projectMembers relations],
- 'message' => a_string_including('invalid value ([OBLIQUE])'))
+ expect(graphql_errors.first).to include(
+ 'path' => %w[query project projectMembers relations],
+ 'message' => a_string_including('invalid value ([OBLIQUE])')
+ )
end
context 'when project is owned by a member' do
@@ -170,13 +171,19 @@ RSpec.describe 'getting project members information', feature_category: :groups_
it 'avoids N+1 queries, when requesting multiple MRs' do
control_query = with_signature(
[project_path, mr_a],
- graphql_query_for(:project, { full_path: project_path },
- query_graphql_field(:project_members, nil, interaction_query))
+ graphql_query_for(
+ :project,
+ { full_path: project_path },
+ query_graphql_field(:project_members, nil, interaction_query)
+ )
)
query_two = with_signature(
[project_path, mr_a, mr_b],
- graphql_query_for(:project, { full_path: project_path },
- query_graphql_field(:project_members, nil, interaction_b_query))
+ graphql_query_for(
+ :project,
+ { full_path: project_path },
+ query_graphql_field(:project_members, nil, interaction_b_query)
+ )
)
control_count = ActiveRecord::QueryRecorder.new do
@@ -199,8 +206,11 @@ RSpec.describe 'getting project members information', feature_category: :groups_
query = with_signature(
[project_path, mr_a],
- graphql_query_for(:project, { full_path: project_path },
- query_graphql_field(:project_members, nil, interaction_query))
+ graphql_query_for(
+ :project,
+ { full_path: project_path },
+ query_graphql_field(:project_members, nil, interaction_query)
+ )
)
control_count = ActiveRecord::QueryRecorder.new do
diff --git a/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb b/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
index a13e96eb9d3..a43ad3f30ee 100644
--- a/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
+++ b/spec/requests/api/graphql/project/project_pipeline_statistics_spec.rb
@@ -21,9 +21,11 @@ RSpec.describe 'rendering project pipeline statistics', feature_category: :conti
end
let(:query) do
- graphql_query_for('project',
- { 'fullPath' => project.full_path },
- query_graphql_field('pipelineAnalytics', {}, fields))
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('pipelineAnalytics', {}, fields)
+ )
end
before do
diff --git a/spec/requests/api/graphql/project/project_statistics_spec.rb b/spec/requests/api/graphql/project/project_statistics_spec.rb
index 444738cbc81..e16a9f1d4d1 100644
--- a/spec/requests/api/graphql/project/project_statistics_spec.rb
+++ b/spec/requests/api/graphql/project/project_statistics_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe 'rendering project statistics', feature_category: :shared do
let(:user) { create(:user) }
let(:query) do
- graphql_query_for('project',
- { 'fullPath' => project.full_path },
- "statistics { #{all_graphql_fields_for('ProjectStatistics')} }")
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ "statistics { #{all_graphql_fields_for('ProjectStatistics')} }"
+ )
end
before do
diff --git a/spec/requests/api/graphql/project/terraform/states_spec.rb b/spec/requests/api/graphql/project/terraform/states_spec.rb
index 25fc07ef509..7a789a5d481 100644
--- a/spec/requests/api/graphql/project/terraform/states_spec.rb
+++ b/spec/requests/api/graphql/project/terraform/states_spec.rb
@@ -11,39 +11,42 @@ RSpec.describe 'query terraform states', feature_category: :infrastructure_as_co
let_it_be(:latest_version) { terraform_state.latest_version }
let(:query) do
- graphql_query_for(:project, { fullPath: project.full_path },
- %{
- terraformStates {
- count
- nodes {
- id
- name
- lockedAt
- createdAt
- updatedAt
-
- latestVersion {
+ graphql_query_for(
+ :project,
+ { fullPath: project.full_path },
+ %{
+ terraformStates {
+ count
+ nodes {
id
- downloadPath
- serial
+ name
+ lockedAt
createdAt
updatedAt
- createdByUser {
+ latestVersion {
id
- }
+ downloadPath
+ serial
+ createdAt
+ updatedAt
- job {
- name
+ createdByUser {
+ id
+ }
+
+ job {
+ name
+ }
}
- }
- lockedByUser {
- id
+ lockedByUser {
+ id
+ }
}
}
}
- })
+ )
end
let(:current_user) { project.creator }
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 3691e023a53..b8575b25e0a 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
include GraphqlHelpers
- let_it_be(:developer) { create(:user) }
- let_it_be(:guest) { create(:user) }
- let_it_be(:project) { create(:project, :private) }
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, :private, group: group) }
+ let_it_be(:developer) { create(:user).tap { |u| group.add_developer(u) } }
+ let_it_be(:guest) { create(:user).tap { |u| group.add_guest(u) } }
let_it_be(:work_item) do
create(
:work_item,
@@ -35,6 +36,21 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
graphql_query_for('workItem', { 'id' => global_id }, work_item_fields)
end
+ context 'when project is archived' do
+ before do
+ project.update!(archived: true)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns the correct value in the archived field' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'iid' => work_item.iid.to_s,
+ 'archived' => true
+ )
+ end
+ end
+
context 'when the user can read the work item' do
let(:incoming_email_token) { current_user.incoming_email_token }
let(:work_item_email) do
@@ -42,10 +58,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
end
before do
- project.add_developer(developer)
- project.add_guest(guest)
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
-
post_graphql(query, current_user: current_user)
end
@@ -63,6 +76,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
'workItemType' => hash_including('id' => work_item.work_item_type.to_gid.to_s),
'reference' => work_item.to_reference,
'createNoteEmail' => work_item_email,
+ 'archived' => false,
'userPermissions' => {
'readWorkItem' => true,
'updateWorkItem' => true,
@@ -77,6 +91,19 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
)
end
+ context 'when work item is created at the group level' do
+ let_it_be(:group_work_item) { create(:work_item, :group_level, namespace: group) }
+ let(:global_id) { group_work_item.to_gid.to_s }
+
+ it 'always returns false in the archived field' do
+ expect(work_item_data).to include(
+ 'id' => group_work_item.to_gid.to_s,
+ 'iid' => group_work_item.iid.to_s,
+ 'archived' => false
+ )
+ end
+ end
+
context 'when querying widgets' do
describe 'description widget' do
let(:work_item_fields) do
@@ -684,7 +711,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
GRAPHQL
end
- let_it_be(:note) { create(:note, project: work_item.project, noteable: work_item) }
+ let_it_be(:note) { create(:note, project: work_item.project, noteable: work_item, author: developer) }
before_all do
create(:award_emoji, awardable: note, name: 'rocket', user: developer)
@@ -717,7 +744,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
notes = graphql_dig_at(notes_widget['discussions'], :nodes).flat_map { |d| d['notes']['nodes'] }
expect(notes).to contain_exactly(
- hash_including('maxAccessLevelOfAuthor' => 'Owner', 'authorIsContributor' => false)
+ hash_including('maxAccessLevelOfAuthor' => 'Developer', 'authorIsContributor' => false)
)
end
@@ -738,7 +765,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
create(:award_emoji, awardable: note_with_different_user, name: 'star', user: developer)
# TODO: Fix existing N+1 queries in https://gitlab.com/gitlab-org/gitlab/-/issues/414747
- expect { post_graphql(query, current_user: developer) }.not_to exceed_query_limit(control).with_threshold(3)
+ expect { post_graphql(query, current_user: developer) }.not_to exceed_query_limit(control).with_threshold(4)
expect_graphql_errors_to_be_empty
end
end
@@ -772,7 +799,6 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
let(:current_user) { guest }
before do
- project.add_guest(guest)
post_graphql(query, current_user: current_user)
end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 8a3c5261eb6..2e6ec6d02e7 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -42,13 +42,6 @@ RSpec.describe 'GraphQL', feature_category: :shared do
post_graphql(query, variables: variables)
end
-
- it 'does not instantiate any query analyzers' do # they are static and re-used
- expect(GraphQL::Analysis::QueryComplexity).not_to receive(:new)
- expect(GraphQL::Analysis::QueryDepth).not_to receive(:new)
-
- 2.times { post_graphql(query, variables: variables) }
- end
end
context 'with no variables' do
@@ -282,9 +275,9 @@ RSpec.describe 'GraphQL', feature_category: :shared do
it 'does not authenticate user' do
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:unauthorized)
- expect(graphql_data['echo']).to eq('nil says: Hello world')
+ expect_graphql_errors_to_include('Invalid token')
end
end
@@ -308,9 +301,9 @@ RSpec.describe 'GraphQL', feature_category: :shared do
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:unauthorized)
- expect(graphql_data['echo']).to eq('nil says: Hello world')
+ expect_graphql_errors_to_include('Invalid token')
end
end
end
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 7c194627f82..58d0e6a1eb5 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -453,7 +453,7 @@ RSpec.describe API::GroupClusters, feature_category: :deployment_management do
end
it 'returns validation error' do
- expect(json_response['message']['platform_kubernetes'].first).to eq(_('Cannot modify managed Kubernetes cluster'))
+ expect(json_response['message']['platform_kubernetes.base'].first).to eq(_('Cannot modify managed Kubernetes cluster'))
end
end
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index b4add2494b0..d0f7c000544 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -325,8 +325,32 @@ RSpec.describe API::GroupExport, feature_category: :importers do
end
context 'when bulk import is disabled' do
+ subject(:request) { post api(path, user) }
+
+ before do
+ stub_application_setting(bulk_import_enabled: false)
+ stub_feature_flags(override_bulk_import_disabled: false)
+ end
+
it_behaves_like '404 response' do
- let(:request) { get api(path, user) }
+ let(:message) { '404 Not Found' }
+ end
+
+ it 'enables the feature when override flag is enabled for the user' do
+ stub_feature_flags(override_bulk_import_disabled: user)
+
+ request
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ it 'does not enable the feature when override flag is enabled for another user' do
+ other_user = create(:user)
+ stub_feature_flags(override_bulk_import_disabled: other_user)
+
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 7b1da1c691d..662e11f7cfb 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -1249,19 +1249,23 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
expect(json_response.length).to eq(6)
end
- it 'avoids N+1 queries', :aggregate_failures, :use_sql_query_cache, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/383788' do
- get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
- expect(respone).to have_gitlab_http_status(:ok)
+ it 'avoids N+1 queries', :aggregate_failures do
+ get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true } # warm-up
+
+ expect(response).to have_gitlab_http_status(:ok)
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
end
- create_list(:project, 2, :public, namespace: group1)
+ create(:project, :public, namespace: group1)
+ # threshold number 2 is the additional number of queries which are getting executed.
+ # with this we are allowing some N+1 that may already exist but is not obvious.
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132246#note_1581106553
expect do
get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
- end.not_to exceed_all_query_limit(control.count)
+ end.to issue_same_number_of_queries_as(control).with_threshold(2)
end
end
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 7c2df52fdf3..9a9ccc867a3 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
let(:secret) { "sekrettt" }
let(:project_key) { 'TES' }
let(:repo_slug) { 'vim' }
+ let(:timeout_strategy) { 'pessimistic' }
let(:repo) do
double('repo',
name: repo_slug,
@@ -52,7 +53,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
post api("/import/bitbucket_server", user), params: {
@@ -87,7 +88,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully with a new project name' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything, 'pessimistic')
.and_return(double(execute: project))
post api("/import/bitbucket_server", user), params: {
@@ -96,7 +97,8 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
personal_access_token: token,
bitbucket_server_project: project_key,
bitbucket_server_repo: repo_slug,
- new_name: 'new-name'
+ new_name: 'new-name',
+ timeout_strategy: 'pessimistic'
}
expect(response).to have_gitlab_http_status(:created)
@@ -123,7 +125,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 400 response due to a blocked URL' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
allow(Gitlab::UrlBlocker)
@@ -142,6 +144,24 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
end
end
+ context 'with an invalid timeout strategy' do
+ let_it_be(:project) { create(:project, name: 'new-name') }
+
+ it 'returns 400 response due to a blocked URL' do
+ post api("/import/bitbucket_server", user), params: {
+ bitbucket_server_url: base_uri,
+ bitbucket_server_username: user,
+ personal_access_token: token,
+ bitbucket_server_project: project_key,
+ bitbucket_server_repo: repo_slug,
+ timeout_strategy: 'no-strategy'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response["error"]).to eq("timeout_strategy does not have a valid value")
+ end
+ end
+
context 'with a new namespace' do
let(:bitbucket_client) { instance_double(BitbucketServer::Client) }
@@ -159,7 +179,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 201 response when the project is imported successfully to a new namespace' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything, timeout_strategy)
.and_return(double(execute: create(:project, name: repo_slug)))
post api("/import/bitbucket_server", user), params: {
@@ -195,7 +215,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
it 'returns 401 response when user can not create projects in the chosen namespace' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
- .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything)
+ .to receive(:new).with(project_key, repo_slug, anything, repo_slug, an_instance_of(Group), user, anything, timeout_strategy)
.and_return(double(execute: build(:project)))
other_namespace = create(:group, :private, name: 'private-group')
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index e394b92c0a2..9a42b11dc76 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -122,6 +122,19 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
end
end
+ context 'with invalid timeout stategy' do
+ it 'returns 400 response' do
+ post api("/import/github", user), params: {
+ target_namespace: user.namespace_path,
+ personal_access_token: token,
+ repo_id: non_existing_record_id,
+ timeout_strategy: "invalid_strategy"
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
context 'when additional access tokens are provided' do
let(:additional_access_tokens) { 'token1,token2' }
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index 4922a07cd6c..d8ac9d5abf7 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -63,25 +63,8 @@ RSpec.describe API::Integrations, feature_category: :integrations do
describe "PUT /projects/:id/#{endpoint}/#{integration.dasherize}" do
include_context 'with integration'
- # NOTE: Some attributes are not supported for PUT requests, even though they probably should be.
- # We can fix these manually, or with a generic approach like https://gitlab.com/gitlab-org/gitlab/-/issues/348208
- let(:missing_attributes) do
- {
- datadog: %i[archive_trace_events],
- hangouts_chat: %i[notify_only_broken_pipelines],
- jira: %i[issues_enabled project_key jira_issue_regex jira_issue_prefix vulnerabilities_enabled vulnerabilities_issuetype],
- mattermost: %i[labels_to_be_notified],
- mock_ci: %i[enable_ssl_verification],
- prometheus: %i[manual_configuration],
- pumble: %i[branches_to_be_notified notify_only_broken_pipelines],
- slack: %i[labels_to_be_notified],
- unify_circuit: %i[branches_to_be_notified notify_only_broken_pipelines],
- webex_teams: %i[branches_to_be_notified notify_only_broken_pipelines]
- }
- end
-
it "updates #{integration} settings and returns the correct fields" do
- supported_attrs = integration_attrs.without(missing_attributes.fetch(integration.to_sym, []))
+ supported_attrs = attributes_for(integration_factory).without(:active, :type)
put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: supported_attrs
@@ -112,6 +95,8 @@ RSpec.describe API::Integrations, feature_category: :integrations do
end
end
+ integration_attrs = attributes_for(integration_factory).without(:active, :type)
+
if required_attributes.empty?
expected_code = :ok
else
@@ -129,7 +114,7 @@ RSpec.describe API::Integrations, feature_category: :integrations do
include_context 'with integration'
before do
- initialize_integration(integration)
+ create(integration_factory, project: project)
end
it "deletes #{integration}" do
@@ -144,7 +129,7 @@ RSpec.describe API::Integrations, feature_category: :integrations do
describe "GET /projects/:id/#{endpoint}/#{integration.dasherize}" do
include_context 'with integration'
- let!(:initialized_integration) { initialize_integration(integration, active: true) }
+ let!(:initialized_integration) { create(integration_factory, project: project) }
let_it_be(:project2) do
create(:project, creator_id: user.id, namespace: user.namespace)
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 1e8397773be..551ed0babf1 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -507,15 +507,6 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
expect(response).to have_gitlab_http_status(:success)
end
- it 'returns 400 when the feature flag is disabled' do
- deployment_project.add_member(user, :developer)
- stub_feature_flags(k8s_proxy_pat: false)
-
- send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
-
it 'returns 403 when user has no access' do
send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 65aa2326af5..1eeb3404157 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -69,6 +69,15 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
context 'when querying a custom domain' do
let_it_be(:pages_domain) { create(:pages_domain, domain: 'pages.io', project: project) }
+ # We need to ensure not to return the unique domain when requesting a custom domain
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/426435
+ before_all do
+ project.project_setting.update!(
+ pages_unique_domain: 'unique-domain',
+ pages_unique_domain_enabled: true
+ )
+ end
+
context 'when there are no pages deployed for the related project' do
before do
project.mark_pages_as_not_deployed
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index bb0f557cfee..dc02e830027 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -130,24 +130,24 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end.to change { source.members.non_invite.count }.by(1)
end
- it 'adds a new member by unconfirmed primary email' do
+ it 'adds a new member by confirmed secondary email' do
+ secondary_email = create(:email, :confirmed, email: 'secondary@example.com', user: stranger)
+
expect do
post invitations_url(source, maintainer),
- params: { email: unconfirmed_stranger.email, access_level: Member::DEVELOPER }
+ params: { email: secondary_email.email, access_level: Member::DEVELOPER }
expect(response).to have_gitlab_http_status(:created)
end.to change { source.members.non_invite.count }.by(1)
end
- it 'adds a new member by confirmed secondary email' do
- secondary_email = create(:email, :confirmed, email: 'secondary@example.com', user: stranger)
-
+ it 'adds a new member as an invite for unconfirmed primary email' do
expect do
post invitations_url(source, maintainer),
- params: { email: secondary_email.email, access_level: Member::DEVELOPER }
+ params: { email: unconfirmed_stranger.email, access_level: Member::DEVELOPER }
expect(response).to have_gitlab_http_status(:created)
- end.to change { source.members.non_invite.count }.by(1)
+ end.to change { source.members.invite.count }.by(1).and change { source.members.non_invite.count }.by(0)
end
it 'adds a new member as an invite for unconfirmed secondary email' do
@@ -266,34 +266,6 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
end
- context 'with tasks_to_be_done and tasks_project_id in the params' do
- let(:project_id) { source_type == 'project' ? source.id : create(:project, namespace: source).id }
-
- context 'when there is 1 invitation' do
- it 'creates a member_task with the tasks_to_be_done and the project' do
- post invitations_url(source, maintainer),
- params: { email: email, access_level: Member::DEVELOPER, tasks_to_be_done: %w(code ci), tasks_project_id: project_id }
-
- member = source.members.find_by(invite_email: email)
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project_id).to eq(project_id)
- end
- end
-
- context 'when there are multiple invitations' do
- it 'creates a member_task with the tasks_to_be_done and the project' do
- post invitations_url(source, maintainer),
- params: { email: [email, email2].join(','), access_level: Member::DEVELOPER, tasks_to_be_done: %w(code ci), tasks_project_id: project_id }
-
- members = source.members.where(invite_email: [email, email2])
- members.each do |member|
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project_id).to eq(project_id)
- end
- end
- end
- end
-
context 'with invite_source considerations', :snowplow do
let(:params) { { email: email, access_level: Member::DEVELOPER } }
@@ -407,8 +379,24 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
describe 'POST /projects/:id/invitations' do
- it_behaves_like 'POST /:source_type/:id/invitations', 'project' do
- let(:source) { project }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'POST /:source_type/:id/invitations', 'project' do
+ let(:source) { project }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'POST /:source_type/:id/invitations', 'project' do
+ let(:source) { project }
+ end
end
it 'does not exceed expected queries count for emails', :request_store, :use_sql_query_cache do
@@ -470,8 +458,24 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
describe 'POST /groups/:id/invitations' do
- it_behaves_like 'POST /:source_type/:id/invitations', 'group' do
- let(:source) { group }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'POST /:source_type/:id/invitations', 'group' do
+ let(:source) { group }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'POST /:source_type/:id/invitations', 'group' do
+ let(:source) { group }
+ end
end
it 'does not exceed expected queries count for emails', :request_store, :use_sql_query_cache do
@@ -583,14 +587,46 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
describe 'GET /projects/:id/invitations' do
- it_behaves_like 'GET /:source_type/:id/invitations', 'project' do
- let(:source) { project }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'GET /:source_type/:id/invitations', 'project' do
+ let(:source) { project }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'GET /:source_type/:id/invitations', 'project' do
+ let(:source) { project }
+ end
end
end
describe 'GET /groups/:id/invitations' do
- it_behaves_like 'GET /:source_type/:id/invitations', 'group' do
- let(:source) { group }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'GET /:source_type/:id/invitations', 'group' do
+ let(:source) { group }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'GET /:source_type/:id/invitations', 'group' do
+ let(:source) { group }
+ end
end
end
@@ -676,14 +712,46 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
describe 'DELETE /projects/:id/inviations/:email' do
- it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'project' do
- let(:source) { project }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'project' do
+ let(:source) { project }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'project' do
+ let(:source) { project }
+ end
end
end
describe 'DELETE /groups/:id/inviations/:email' do
- it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'group' do
- let(:source) { group }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'group' do
+ let(:source) { group }
+ end
+ end
+
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'DELETE /:source_type/:id/invitations/:email', 'group' do
+ let(:source) { group }
+ end
end
end
@@ -792,14 +860,26 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
end
describe 'PUT /projects/:id/invitations' do
- it_behaves_like 'PUT /:source_type/:id/invitations/:email', 'project' do
- let(:source) { project }
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/invitations/:email', 'project' do
+ let(:source) { project }
+ end
end
end
describe 'PUT /groups/:id/invitations' do
- it_behaves_like 'PUT /:source_type/:id/invitations/:email', 'group' do
- let(:source) { group }
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/invitations/:email', 'group' do
+ let(:source) { group }
+ end
end
end
end
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index 217788c519f..dbba31cd4d6 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -334,7 +334,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
- expect(json_response['updated_at']).to be > Time.current
+ expect(Time.parse(json_response['updated_at'])).to be_future
end
it 'removes all labels and touches the record with labels param as array', :aggregate_failures do
@@ -344,7 +344,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
- expect(json_response['updated_at']).to be > Time.current
+ expect(Time.parse(json_response['updated_at'])).to be_future
end
it 'updates labels and touches the record', :aggregate_failures do
@@ -354,7 +354,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to contain_exactly('foo', 'bar')
- expect(json_response['updated_at']).to be > Time.current
+ expect(Time.parse(json_response['updated_at'])).to be_future
end
it 'updates labels and touches the record with labels param as array', :aggregate_failures do
@@ -365,7 +365,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo'
expect(json_response['labels']).to include 'bar'
- expect(json_response['updated_at']).to be > Time.current
+ expect(Time.parse(json_response['updated_at'])).to be_future
end
it 'allows special label names', :aggregate_failures do
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index 7fe17760220..5842bd1c716 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -4,9 +4,10 @@ require 'spec_helper'
RSpec.describe API::Lint, feature_category: :pipeline_composition do
describe 'GET /projects/:id/ci/lint' do
- subject(:ci_lint) { get api("/projects/#{project.id}/ci/lint", api_user), params: { dry_run: dry_run, include_jobs: include_jobs } }
+ subject(:ci_lint) { get api("/projects/#{project.id}/ci/lint", api_user), params: { sha: sha, dry_run: dry_run, include_jobs: include_jobs } }
let(:project) { create(:project, :repository) }
+ let(:sha) { nil }
let(:dry_run) { nil }
let(:include_jobs) { nil }
@@ -291,6 +292,102 @@ RSpec.describe API::Lint, feature_category: :pipeline_composition do
end
end
end
+
+ context 'with different sha values' do
+ let(:original_content) do
+ { test: { stage: 'test', script: 'echo 1' } }.deep_stringify_keys.to_yaml
+ end
+
+ let(:first_edit) do
+ { image: 'image:1.0', services: ['postgres'] }.deep_stringify_keys.to_yaml
+ end
+
+ let(:second_edit) do
+ { new_test: { stage: 'test', script: 'echo 0' } }.deep_stringify_keys.to_yaml
+ end
+
+ before do
+ project.repository.create_file(
+ project.creator,
+ '.gitlab-ci.yml',
+ original_content,
+ message: 'Automatically created .gitlab-ci.yml',
+ branch_name: 'master'
+ )
+
+ project.repository.update_file(
+ project.creator,
+ '.gitlab-ci.yml',
+ first_edit,
+ message: 'Automatically edited .gitlab-ci.yml',
+ branch_name: 'master'
+ )
+
+ project.repository.update_file(
+ project.creator,
+ '.gitlab-ci.yml',
+ second_edit,
+ message: 'Automatically edited .gitlab-ci.yml again',
+ branch_name: 'master'
+ )
+ end
+
+ context 'when latest .gitlab-ci.yml is valid' do
+ # check with explicit sha
+ let(:sha) { project.repository.commit.sha }
+
+ it 'passes validation' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Hash
+ expect(json_response['merged_yaml']).to eq(second_edit)
+ expect(json_response['valid']).to eq(true)
+ expect(json_response['warnings']).to eq([])
+ expect(json_response['errors']).to eq([])
+ end
+ end
+
+ context 'when previous .gitlab-ci.yml is invalid' do
+ let(:sha) { project.repository.commit.parent.sha }
+
+ it 'fails validation' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Hash
+ expect(json_response['merged_yaml']).to eq(first_edit)
+ expect(json_response['valid']).to eq(false)
+ expect(json_response['warnings']).to eq([])
+ expect(json_response['errors']).to eq(["jobs config should contain at least one visible job"])
+ end
+ end
+
+ context 'when first .gitlab-ci.yml is valid' do
+ let(:sha) { project.repository.commit.parent.parent.sha }
+
+ it 'passes validation' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Hash
+ expect(json_response['merged_yaml']).to eq(original_content)
+ expect(json_response['valid']).to eq(true)
+ expect(json_response['warnings']).to eq([])
+ expect(json_response['errors']).to eq([])
+ end
+ end
+
+ context 'when sha is not found' do
+ let(:sha) { "unknown" }
+
+ it 'returns 404 response' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 4e746802500..1f841eefff2 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -377,6 +377,20 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
+ shared_examples 'rejecting request with invalid params' do
+ context 'with invalid maven path' do
+ subject { download_file(file_name: package_file.file_name, path: 'foo/bar/%0d%0ahttp:/%2fexample.com') }
+
+ it_behaves_like 'returning response status with error', status: :bad_request, error: 'path should be a valid file path'
+ end
+
+ context 'with invalid file name' do
+ subject { download_file(file_name: '%0d%0ahttp:/%2fexample.com') }
+
+ it_behaves_like 'returning response status with error', status: :bad_request, error: 'file_name should be a valid file path'
+ end
+ end
+
describe 'GET /api/v4/packages/maven/*path/:file_name' do
context 'a public project' do
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, property: 'i_package_maven_user' } }
@@ -403,6 +417,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
it_behaves_like 'returning response status', :forbidden
end
+ it_behaves_like 'rejecting request with invalid params'
+
it 'returns not found when a package is not found' do
finder = double('finder', execute: nil)
expect(::Packages::Maven::PackageFinder).to receive(:new).and_return(finder)
@@ -444,6 +460,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
+ it_behaves_like 'rejecting request with invalid params'
+
it_behaves_like 'handling groups, subgroups and user namespaces for', 'getting a file', visibilities: { public: :redirect, internal: :not_found }
end
@@ -501,6 +519,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
+ it_behaves_like 'rejecting request with invalid params'
+
it_behaves_like 'handling groups, subgroups and user namespaces for', 'getting a file', visibilities: { public: :redirect, internal: :not_found, private: :not_found }
end
@@ -566,6 +586,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
+ it_behaves_like 'rejecting request with invalid params'
+
it_behaves_like 'handling groups and subgroups for', 'getting a file for a group'
end
@@ -597,6 +619,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
end
+ it_behaves_like 'rejecting request with invalid params'
+
it_behaves_like 'handling groups and subgroups for', 'getting a file for a group', visibilities: { internal: :unauthorized, public: :redirect }
end
@@ -634,6 +658,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
it_behaves_like 'returning response status', :redirect
end
+ it_behaves_like 'rejecting request with invalid params'
+
context 'with group deploy token' do
subject { download_file_with_token(file_name: package_file.file_name, request_headers: group_deploy_token_headers) }
@@ -786,6 +812,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
it_behaves_like 'returning response status', :redirect
end
+
+ it_behaves_like 'rejecting request with invalid params'
end
context 'private project' do
@@ -830,6 +858,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
it_behaves_like 'returning response status', :redirect
end
+
+ it_behaves_like 'rejecting request with invalid params'
end
it_behaves_like 'forwarding package requests'
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index f3e5f3ab891..8dab9d555cf 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -470,34 +470,6 @@ RSpec.describe API::Members, feature_category: :groups_and_projects do
end
end
- context 'with tasks_to_be_done and tasks_project_id in the params' do
- let(:project_id) { source_type == 'project' ? source.id : create(:project, namespace: source).id }
-
- context 'when there is 1 user to add' do
- it 'creates a member_task with the correct attributes' do
- post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
- params: { user_id: stranger.id, access_level: Member::DEVELOPER, tasks_to_be_done: %w(code ci), tasks_project_id: project_id }
-
- member = source.members.find_by(user_id: stranger.id)
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project_id).to eq(project_id)
- end
- end
-
- context 'when there are multiple users to add' do
- it 'creates a member_task with the correct attributes' do
- post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
- params: { user_id: [developer.id, stranger.id].join(','), access_level: Member::DEVELOPER, tasks_to_be_done: %w(code ci), tasks_project_id: project_id }
-
- members = source.members.where(user_id: [developer.id, stranger.id])
- members.each do |member|
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project_id).to eq(project_id)
- end
- end
- end
- end
-
it "returns 409 if member already exists" do
source.add_guest(stranger)
@@ -831,10 +803,6 @@ RSpec.describe API::Members, feature_category: :groups_and_projects do
end
describe 'POST /projects/:id/members' do
- it_behaves_like 'POST /:source_type/:id/members', 'project' do
- let(:source) { project }
- end
-
context 'adding owner to project' do
it_behaves_like 'a 403 response when user does not have rights to manage members of a specific access level' do
let(:route) do
@@ -858,16 +826,48 @@ RSpec.describe API::Members, feature_category: :groups_and_projects do
end
end
- it_behaves_like 'POST /:source_type/:id/members', 'group' do
- let(:source) { group }
- end
+ context 'with admin_group_member FF disabled' do
+ before do
+ stub_feature_flags(admin_group_member: false)
+ end
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
- let(:source) { project }
+ it_behaves_like 'POST /:source_type/:id/members', 'project' do
+ let(:source) { project }
+ end
+
+ it_behaves_like 'POST /:source_type/:id/members', 'group' do
+ let(:source) { group }
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
+ let(:source) { project }
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
+ let(:source) { group }
+ end
end
- it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
- let(:source) { group }
+ context 'with admin_group_member FF enabled' do
+ before do
+ stub_feature_flags(admin_group_member: true)
+ end
+
+ it_behaves_like 'POST /:source_type/:id/members', 'project' do
+ let(:source) { project }
+ end
+
+ it_behaves_like 'POST /:source_type/:id/members', 'group' do
+ let(:source) { group }
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'project' do
+ let(:source) { project }
+ end
+
+ it_behaves_like 'PUT /:source_type/:id/members/:user_id', 'group' do
+ let(:source) { group }
+ end
end
it_behaves_like 'DELETE /:source_type/:id/members/:user_id', 'project' do
diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb
index 4f812e5d8eb..53cef226ad8 100644
--- a/spec/requests/api/merge_request_diffs_spec.rb
+++ b/spec/requests/api/merge_request_diffs_spec.rb
@@ -55,6 +55,15 @@ RSpec.describe API::MergeRequestDiffs, 'MergeRequestDiffs', feature_category: :s
expect(json_response['diffs'].size).to eq(merge_request_diff.diffs.size)
end
+ context 'when unidiff format is requested' do
+ it 'returns a diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/versions/#{merge_request_diff.id}", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('diffs', 0, 'diff')).to eq(merge_request_diff.diffs.diffs.first.unidiff)
+ end
+ end
+
it 'returns a 404 when merge_request id is used instead of the iid' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/versions/#{merge_request_diff.id}", user)
expect(response).to have_gitlab_http_status(:not_found)
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index d3f8aeb3e76..2cf8872cd40 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -27,10 +27,10 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
shared_context 'with merge requests' do
let_it_be(:milestone1) { create(:milestone, title: '0.9', project: project) }
+ let_it_be(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignees: [user], source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, updated_at: base_time + 1.hour, merge_commit_sha: '9999999999999999999999999999999999999999') }
let_it_be(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, source_branch: 'markdown', title: "Test", created_at: base_time, updated_at: base_time + 3.hours) }
let_it_be(:merge_request_closed) { create(:merge_request, state: "closed", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Closed test", created_at: base_time + 1.second, updated_at: base_time) }
let_it_be(:merge_request_locked) { create(:merge_request, state: "locked", milestone: milestone1, author: user, assignees: [user], source_project: project, target_project: project, title: "Locked test", created_at: base_time + 1.second, updated_at: base_time + 2.hours) }
- let_it_be(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignees: [user], source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, updated_at: base_time + 1.hour, merge_commit_sha: '9999999999999999999999999999999999999999') }
let_it_be(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
let_it_be(:note2) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "another comment on a MR") }
end
@@ -1829,6 +1829,15 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(json_response['overflow']).to be_falsy
end
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/changes", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('changes', 0, 'diff')).to eq(merge_request.diffs.diffs.first.unidiff)
+ end
+ end
+
context 'when using DB-backed diffs' do
it_behaves_like 'find an existing merge request'
@@ -1902,6 +1911,15 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(json_response.size).to eq(merge_request.diffs.size)
end
+ context 'when unidiff format is requested' do
+ it 'returns the diff in Unified format' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/diffs", user), params: { unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig(0, 'diff')).to eq(merge_request.diffs.diffs.first.unidiff)
+ end
+ end
+
context 'when pagination params are present' do
it 'returns limited diffs' do
get(
diff --git a/spec/requests/api/ml/mlflow/runs_spec.rb b/spec/requests/api/ml/mlflow/runs_spec.rb
index af04c387830..75b70dd867a 100644
--- a/spec/requests/api/ml/mlflow/runs_spec.rb
+++ b/spec/requests/api/ml/mlflow/runs_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
end
- describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/runs/search' do
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/search' do
let_it_be(:search_experiment) { create(:ml_experiments, user: nil, project: project) }
let_it_be(:first_candidate) do
create(:ml_candidates, experiment: search_experiment, name: 'c', user: nil).tap do |c|
@@ -215,6 +215,8 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
}
end
+ let(:request) { post api(route), params: params, headers: headers }
+
it 'searches runs for a project', :aggregate_failures do
is_expected.to have_gitlab_http_status(:ok)
is_expected.to match_response_schema('ml/search_runs')
@@ -231,7 +233,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
params = default_params.merge(page_token: json_response['next_page_token'])
- get api(route), params: params, headers: headers
+ post api(route), params: params, headers: headers
second_page_response = Gitlab::Json.parse(response.body)
second_page_runs = second_page_response['runs']
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index f796edfb20e..5fd41013b25 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_a
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(group_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants', 'root_repository_size')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size', 'projects_count')
expect(user_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path', 'parent_id')
end
@@ -66,7 +66,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_a
owned_group_response = json_response.find { |resource| resource['id'] == group1.id }
expect(owned_group_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants', 'root_repository_size')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size', 'projects_count')
end
it "returns correct attributes when user cannot admin group" do
diff --git a/spec/requests/api/npm_group_packages_spec.rb b/spec/requests/api/npm_group_packages_spec.rb
index 7fba75b0630..12b2ccd1bf7 100644
--- a/spec/requests/api/npm_group_packages_spec.rb
+++ b/spec/requests/api/npm_group_packages_spec.rb
@@ -22,11 +22,11 @@ RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
where(:auth, :group_visibility, :project_visibility, :user_role, :expected_status) do
nil | :public | :public | nil | :ok
- nil | :public | :internal | nil | :not_found
- nil | :public | :private | nil | :not_found
- nil | :internal | :internal | nil | :not_found
- nil | :internal | :private | nil | :not_found
- nil | :private | :private | nil | :not_found
+ nil | :public | :internal | nil | :unauthorized
+ nil | :public | :private | nil | :unauthorized
+ nil | :internal | :internal | nil | :unauthorized
+ nil | :internal | :private | nil | :unauthorized
+ nil | :private | :private | nil | :unauthorized
:oauth | :public | :public | :guest | :ok
:oauth | :public | :internal | :guest | :ok
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index b55d992c1e4..a116be84b3e 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
using RSpec::Parameterized::TableSyntax
- let_it_be_with_reload(:project) { create(:project, :public) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let_it_be(:package_name) { 'Dummy.Package' }
@@ -15,11 +14,9 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
let(:target_type) { 'projects' }
let(:snowplow_gitlab_standard_context) { snowplow_context }
- def snowplow_context(user_role: :developer)
- if user_role == :anonymous
- { project: target, namespace: target.namespace, property: 'i_package_nuget_user' }
- else
- { project: target, namespace: target.namespace, property: 'i_package_nuget_user', user: user }
+ def snowplow_context(user_role: :developer, event_user: user)
+ { project: target, namespace: target.namespace, property: 'i_package_nuget_user' }.tap do |context|
+ context[:user] = event_user unless user_role == :anonymous
end
end
@@ -203,11 +200,12 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
describe 'GET /api/v4/projects/:id/packages/nuget/download/*package_name/*package_version/*package_filename' do
let_it_be(:package) { create(:nuget_package, :with_symbol_package, :with_metadatum, project: project, name: package_name, version: '0.1') }
+ let_it_be(:package_version) { package.version }
let(:format) { 'nupkg' }
- let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package.version}/#{package.name}.#{package.version}.#{format}" }
+ let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{package_version}/#{package.name}.#{package_version}.#{format}" }
- subject { get api(url) }
+ subject { get api(url), headers: headers }
context 'with valid target' do
where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
@@ -236,8 +234,6 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:snowplow_gitlab_standard_context) { snowplow_context(user_role: user_role) }
- subject { get api(url), headers: headers }
-
before do
update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
@@ -320,6 +316,97 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
end
end
+ describe 'DELETE /api/v4/projects/:id/packages/nuget/*package_name/*package_version' do
+ let_it_be(:package) { create(:nuget_package, project: project, name: package_name) }
+
+ let(:url) { "/projects/#{target.id}/packages/nuget/#{package_name}/#{package.version}" }
+
+ subject { delete api(url), headers: headers }
+
+ it { is_expected.to have_request_urgency(:low) }
+
+ context 'with valid target' do
+ where(:auth, :visibility, :user_role, :shared_examples_name, :expected_status) do
+ nil | :public | :anonymous | 'rejects nuget packages access' | :unauthorized
+ nil | :private | :anonymous | 'rejects nuget packages access' | :unauthorized
+ nil | :internal | :anonymous | 'rejects nuget packages access' | :unauthorized
+
+ :personal_access_token | :public | :guest | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :public | :developer | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :public | :maintainer | 'process nuget delete request' | :no_content
+ :personal_access_token | :private | :guest | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :private | :developer | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :private | :maintainer | 'process nuget delete request' | :no_content
+ :personal_access_token | :internal | :guest | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :internal | :developer | 'rejects nuget packages access' | :forbidden
+ :personal_access_token | :internal | :maintainer | 'process nuget delete request' | :no_content
+
+ :job_token | :public | :guest | 'rejects nuget packages access' | :forbidden
+ :job_token | :public | :developer | 'rejects nuget packages access' | :forbidden
+ :job_token | :public | :maintainer | 'process nuget delete request' | :no_content
+ :job_token | :private | :guest | 'rejects nuget packages access' | :forbidden
+ :job_token | :private | :developer | 'rejects nuget packages access' | :forbidden
+ :job_token | :private | :maintainer | 'process nuget delete request' | :no_content
+ :job_token | :internal | :guest | 'rejects nuget packages access' | :forbidden
+ :job_token | :internal | :developer | 'rejects nuget packages access' | :forbidden
+ :job_token | :internal | :maintainer | 'process nuget delete request' | :no_content
+
+ :deploy_token | :public | nil | 'process nuget delete request' | :no_content
+ :deploy_token | :private | nil | 'process nuget delete request' | :no_content
+ :deploy_token | :internal | nil | 'process nuget delete request' | :no_content
+
+ :api_key | :public | :guest | 'rejects nuget packages access' | :forbidden
+ :api_key | :public | :developer | 'rejects nuget packages access' | :forbidden
+ :api_key | :public | :maintainer | 'process nuget delete request' | :no_content
+ :api_key | :private | :guest | 'rejects nuget packages access' | :forbidden
+ :api_key | :private | :developer | 'rejects nuget packages access' | :forbidden
+ :api_key | :private | :maintainer | 'process nuget delete request' | :no_content
+ :api_key | :internal | :guest | 'rejects nuget packages access' | :forbidden
+ :api_key | :internal | :developer | 'rejects nuget packages access' | :forbidden
+ :api_key | :internal | :maintainer | 'process nuget delete request' | :no_content
+ end
+
+ with_them do
+ let(:snowplow_gitlab_standard_context) do
+ snowplow_context(user_role: user_role, event_user: auth == :deploy_token ? deploy_token : user)
+ end
+
+ let(:headers) do
+ case auth
+ when :personal_access_token
+ basic_auth_header(user.username, personal_access_token.token)
+ when :job_token
+ basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token)
+ when :deploy_token
+ basic_auth_header(deploy_token.username, deploy_token.token)
+ when :api_key
+ { 'X-NuGet-ApiKey' => personal_access_token.token }
+ else
+ {}
+ end
+ end
+
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility.to_s.upcase, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status]
+ end
+ end
+
+ it_behaves_like 'rejects nuget access with unknown target id'
+
+ it_behaves_like 'rejects nuget access with invalid target id'
+
+ ['%20', '..%2F..', '../..'].each do |value|
+ context "with invalid package name #{value}" do
+ let(:package_name) { value }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
+ end
+
describe 'PUT /api/v4/projects/:id/packages/nuget/v2/authorize' do
it_behaves_like 'nuget authorize upload endpoint' do
let(:url) { "/projects/#{target.id}/packages/nuget/v2/authorize" }
diff --git a/spec/requests/api/oauth_tokens_spec.rb b/spec/requests/api/oauth_tokens_spec.rb
index 19a943477d2..f08af75a03d 100644
--- a/spec/requests/api/oauth_tokens_spec.rb
+++ b/spec/requests/api/oauth_tokens_spec.rb
@@ -6,9 +6,9 @@ RSpec.describe 'OAuth tokens', feature_category: :system_access do
include HttpBasicAuthHelpers
context 'Resource Owner Password Credentials' do
- def request_oauth_token(user, headers = {})
+ def request_oauth_token(user, headers = {}, password = user.password)
post '/oauth/token',
- params: { username: user.username, password: user.password, grant_type: 'password' },
+ params: { username: user.username, password: password, grant_type: 'password' },
headers: headers
end
@@ -61,13 +61,28 @@ RSpec.describe 'OAuth tokens', feature_category: :system_access do
context 'when user does not have 2FA enabled' do
context 'when no client credentials provided' do
- it 'creates an access token' do
- user = create(:user)
+ context 'with valid credentials' do
+ it 'creates an access token' do
+ user = create(:user)
- request_oauth_token(user)
+ request_oauth_token(user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['access_token']).to be_present
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['access_token']).to be_present
+ end
+ end
+
+ context 'with invalid user password' do
+ it 'does not create an access token' do
+ user = create(:user)
+
+ expect do
+ request_oauth_token(user, {}, 'not-my-password')
+ end.to change { user.reload.failed_attempts }.from(0).to(1)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('invalid_grant')
+ end
end
end
@@ -83,7 +98,7 @@ RSpec.describe 'OAuth tokens', feature_category: :system_access do
end
end
- context 'with invalid credentials' do
+ context 'with invalid client secret' do
it 'does not create an access token' do
user = create(:user)
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index d95f96c25d6..ec98df22af7 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -132,7 +132,6 @@ project_feature:
- project_id
- updated_at
- operations_access_level
- - model_experiments_access_level
computed_attributes:
- issues_enabled
- jobs_enabled
@@ -164,7 +163,6 @@ project_setting:
- selective_code_owner_removals
- show_diff_preview_in_email
- suggested_reviewers_enabled
- - jitsu_key
- mirror_branch_regex
- allow_pipeline_trigger_approve_deployment
- pages_unique_domain_enabled
@@ -172,16 +170,7 @@ project_setting:
- pages_multiple_versions_enabled
- runner_registration_enabled
- product_analytics_instrumentation_key
- - jitsu_host
- - jitsu_project_xid
- - jitsu_administrator_email
- - jitsu_administrator_password
- - encrypted_jitsu_administrator_password
- - encrypted_jitsu_administrator_password_iv
- product_analytics_data_collector_host
- - product_analytics_clickhouse_connection_string
- - encrypted_product_analytics_clickhouse_connection_string
- - encrypted_product_analytics_clickhouse_connection_string_iv
- cube_api_base_url
- cube_api_key
- encrypted_cube_api_key
@@ -204,5 +193,6 @@ build_service_desk_setting: # service_desk_setting
- encrypted_custom_email_smtp_password
- encrypted_custom_email_smtp_password_iv
- custom_email_smtp_password
+ - add_external_participants_from_cc
remapped_attributes:
project_key: service_desk_address
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index 99c190757ca..c52948a4cb0 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -443,7 +443,7 @@ RSpec.describe API::ProjectClusters, feature_category: :deployment_management do
it 'returns validation error' do
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['platform_kubernetes'].first)
+ expect(json_response['message']['platform_kubernetes.base'].first)
.to eq(_('Cannot modify managed Kubernetes cluster'))
end
end
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index a2e1a1c1721..f51b94bb78e 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ProjectContainerRepositories, feature_category: :package_registry do
+RSpec.describe API::ProjectContainerRepositories, feature_category: :container_registry do
include ExclusiveLeaseHelpers
let_it_be(:project) { create(:project, :private) }
@@ -142,7 +142,6 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :package_reg
let(:api_user) { maintainer }
it 'marks the repository as delete_scheduled' do
- expect(DeleteContainerRepositoryWorker).not_to receive(:perform_async)
expect { subject }.to change { root_repository.reload.status }.from(nil).to('delete_scheduled')
expect(response).to have_gitlab_http_status(:accepted)
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 3603a71151e..22729e068da 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -704,31 +704,63 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
context 'with bulk_import is disabled' do
before do
stub_application_setting(bulk_import_enabled: false)
+ stub_feature_flags(override_bulk_import_disabled: false)
+ end
+
+ shared_examples 'flag override' do |expected_http_status:|
+ it 'enables the feature when override flag is enabled for the user' do
+ stub_feature_flags(override_bulk_import_disabled: user)
+
+ request
+
+ expect(response).to have_gitlab_http_status(expected_http_status)
+ end
+
+ it 'does not enable the feature when override flag is enabled for another user' do
+ other_user = create(:user)
+ stub_feature_flags(override_bulk_import_disabled: other_user)
+
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
describe 'POST /projects/:id/export_relations' do
+ subject(:request) { post api(path, user) }
+
it_behaves_like '404 response' do
- subject(:request) { post api(path, user) }
+ let(:message) { '404 Not Found' }
end
+
+ it_behaves_like 'flag override', expected_http_status: :accepted
end
describe 'GET /projects/:id/export_relations/download' do
let_it_be(:export) { create(:bulk_import_export, project: project, relation: 'labels') }
let_it_be(:upload) { create(:bulk_import_export_upload, export: export) }
+ subject(:request) { get api(download_path, user) }
+
before do
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
end
it_behaves_like '404 response' do
- subject(:request) { post api(path, user) }
+ let(:message) { '404 Not Found' }
end
+
+ it_behaves_like 'flag override', expected_http_status: :ok
end
describe 'GET /projects/:id/export_relations/status' do
+ subject(:request) { get api(status_path, user) }
+
it_behaves_like '404 response' do
- subject(:request) { get api(status_path, user) }
+ let(:message) { '404 Not Found' }
end
+
+ it_behaves_like 'flag override', expected_http_status: :ok
end
end
end
@@ -758,11 +790,5 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
end
end
end
-
- context 'when bulk import is disabled' do
- it_behaves_like '404 response' do
- subject(:request) { get api(path, user) }
- end
- end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 12898060e22..aa7120e482a 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -823,6 +823,21 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
let(:admin_mode) { true }
let(:projects) { Project.all }
end
+
+ it 'returns a project with user namespace that has a missing owner' do
+ project.namespace.update_column(:owner_id, non_existing_record_id)
+ project.route.update_column(:name, nil)
+
+ get api(path, admin, admin_mode: true), params: { search: project.path }
+ expect(response).to have_gitlab_http_status(:ok)
+
+ project_response = json_response.find { |p| p['id'] == project.id }
+ expect(project_response).to be_present
+ expect(project_response['path']).to eq(project.path)
+
+ namespace_response = project_response['namespace']
+ expect(project_response['web_url']).to include(namespace_response['web_url'])
+ end
end
context 'with default created_at desc order' do
@@ -1271,6 +1286,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
attrs[:builds_access_level] = 'disabled'
attrs[:merge_requests_access_level] = 'disabled'
attrs[:issues_access_level] = 'disabled'
+ attrs[:model_experiments_access_level] = 'disabled'
end
post api(path, user), params: project
@@ -1281,7 +1297,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
next if %i[
has_external_issue_tracker has_external_wiki issues_enabled merge_requests_enabled wiki_enabled storage_version
container_registry_access_level releases_access_level environments_access_level feature_flags_access_level
- infrastructure_access_level monitor_access_level
+ infrastructure_access_level monitor_access_level model_experiments_access_level
].include?(k)
expect(json_response[k.to_s]).to eq(v)
@@ -1384,13 +1400,14 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
it 'disallows creating a project with an import_url that is not reachable' do
url = 'http://example.com'
endpoint_url = "#{url}/info/refs?service=git-upload-pack"
- stub_full_request(endpoint_url, method: :get).to_return({ status: 301, body: '', headers: nil })
+ error_response = { status: 301, body: '', headers: nil }
+ stub_full_request(endpoint_url, method: :get).to_return(error_response)
project_params = { import_url: url, path: 'path-project-Foo', name: 'Foo Project' }
expect { post api(path, user), params: project_params }.not_to change { Project.count }
expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['message']).to eq("#{url} is not a valid HTTP Git repository")
+ expect(json_response['message']).to eq("#{url} endpoint error: #{error_response[:status]}")
end
it 'creates a project with an import_url that is valid' do
@@ -3877,7 +3894,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(Project.find_by(path: project[:path]).analytics_access_level).to eq(ProjectFeature::PRIVATE)
end
- %i(releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level).each do |field|
+ %i(releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level model_experiments_access_level).each do |field|
it "sets #{field}" do
put api(path, user), params: { field => 'private' }
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index a94ed63bf47..22239f1d23f 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -538,6 +538,18 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
expect(json_response['compare_same_ref']).to be_truthy
end
+ context 'when unidiff format is requested' do
+ let(:commit) { project.repository.commit('feature') }
+ let(:diff) { commit.diffs.diffs.first }
+
+ it 'returns a diff in Unified format' do
+ get api(route, current_user), params: { from: 'master', to: 'feature', unidiff: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('diffs', 0, 'diff')).to eq(diff.unidiff)
+ end
+ end
+
it "returns an empty string when the diff overflows" do
allow(Gitlab::Git::DiffCollection)
.to receive(:default_limits)
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index ad52076523c..2fdcf710471 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -85,6 +85,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['gitlab_shell_operation_limit']).to eq(600)
expect(json_response['namespace_aggregation_schedule_lease_duration_in_seconds']).to eq(300)
expect(json_response['default_branch_protection_defaults']).to be_kind_of(Hash)
+ expect(json_response['max_login_attempts']).to be_nil
+ expect(json_response['failed_login_attempts_unlock_period_in_minutes']).to be_nil
end
end
@@ -1046,5 +1048,19 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
end
end
end
+
+ context 'login attempts lock settings' do
+ it 'updates the settings' do
+ put(
+ api("/application/settings", admin),
+ params: { max_login_attempts: 3,
+ failed_login_attempts_unlock_period_in_minutes: 30 }
+ )
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['max_login_attempts']).to eq(3)
+ expect(json_response['failed_login_attempts_unlock_period_in_minutes']).to eq(30)
+ end
+ end
end
end
diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb
index fdd186439a6..7d8b16b217c 100644
--- a/spec/requests/api/usage_data_queries_spec.rb
+++ b/spec/requests/api/usage_data_queries_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake_helper'
RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :service_ping do
include UsageDataHelpers
@@ -91,8 +90,8 @@ RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :se
data = Gitlab::Json.parse(File.read(file))
expect(
- json_response['counts_monthly'].except('aggregated_metrics')
- ).to eq(data['counts_monthly'].except('aggregated_metrics'))
+ json_response['counts_weekly'].except('aggregated_metrics')
+ ).to eq(data['counts_weekly'].except('aggregated_metrics'))
expect(json_response['counts']).to eq(data['counts'])
expect(json_response['active_user_count']).to eq(data['active_user_count'])
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index c8f1e8d6973..37fa75a812c 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -200,6 +200,9 @@ RSpec.describe API::UsageData, feature_category: :service_ping do
end
context 'with authentication' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project) }
+
before do
stub_application_setting(usage_ping_enabled: true)
allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true)
@@ -207,11 +210,10 @@ RSpec.describe API::UsageData, feature_category: :service_ping do
context 'with correct params' do
it 'returns status ok' do
- expect(Gitlab::InternalEvents).to receive(:track_event).with(known_event, anything)
- # allow other events to also get triggered
- allow(Gitlab::InternalEvents).to receive(:track_event)
+ expect(Gitlab::InternalEvents).to receive(:track_event)
+ .with(known_event, send_snowplow_event: false, user: user, namespace: namespace, project: project)
- post api(endpoint, user), params: { event: known_event, namespace_id: namespace_id, project_id: project_id }
+ post api(endpoint, user), params: { event: known_event, namespace_id: namespace.id, project_id: project.id }
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 5973649a9d7..7da44266064 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -260,46 +260,16 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
end
- context 'when api_keyset_pagination_multi_order FF is enabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: true)
- end
-
- it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
- let(:first_record) { user }
- let(:second_record) { admin }
- let(:api_call) { api(path, user) }
- end
-
- it 'still supports offset pagination when keyset pagination params are not provided' do
- get api(path, user)
-
- expect(response).to include_pagination_headers
- end
+ it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
+ let(:first_record) { user }
+ let(:second_record) { admin }
+ let(:api_call) { api(path, user) }
end
- context 'when api_keyset_pagination_multi_order FF is disabled' do
- before do
- stub_feature_flags(api_keyset_pagination_multi_order: false)
- end
-
- it 'paginates the records correctly using offset pagination' do
- get api(path, user), params: { pagination: 'keyset', per_page: 1 }
-
- params_for_next_page = pagination_params_from_next_url(response)
- expect(response).to include_pagination_headers
- expect(params_for_next_page).not_to include('cursor')
- end
-
- context 'on making requests with unsupported ordering structure' do
- it 'does not return error' do
- get api(path, user),
- params: { pagination: 'keyset', per_page: 1, order_by: 'created_at', sort: 'asc' }
+ it 'still supports offset pagination when keyset pagination params are not provided' do
+ get api(path, user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- end
- end
+ expect(response).to include_pagination_headers
end
end
end
@@ -4619,6 +4589,143 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
end
+ describe 'POST /user/personal_access_tokens' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:name) { 'new pat' }
+ let(:scopes) { %w[k8s_proxy] }
+ let(:path) { "/user/personal_access_tokens" }
+ let(:params) { { name: name, scopes: scopes } }
+
+ let(:all_scopes) do
+ ::Gitlab::Auth::API_SCOPES + ::Gitlab::Auth::AI_FEATURES_SCOPES + ::Gitlab::Auth::OPENID_SCOPES +
+ ::Gitlab::Auth::PROFILE_SCOPES + ::Gitlab::Auth::REPOSITORY_SCOPES + ::Gitlab::Auth::REGISTRY_SCOPES +
+ ::Gitlab::Auth::OBSERVABILITY_SCOPES + ::Gitlab::Auth::ADMIN_SCOPES
+ end
+
+ it 'returns error if required attributes are missing' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('name is missing, scopes is missing')
+ end
+
+ context 'when scope is not allowed' do
+ where(:disallowed_scopes) do
+ all_scopes - [::Gitlab::Auth::K8S_PROXY_SCOPE]
+ end
+
+ with_them do
+ it 'returns error' do
+ post api(path, user), params: params.merge({ scopes: [disallowed_scopes] })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('scopes does not have a valid value')
+ end
+ end
+ end
+
+ it 'returns error if one of the scopes is not allowed' do
+ post api(path, user), params: params.merge({ scopes: [::Gitlab::Auth::K8S_PROXY_SCOPE, ::Gitlab::Auth::API_SCOPE] })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('scopes does not have a valid value')
+ end
+
+ it 'returns a 401 error when not authenticated' do
+ post api(path), params: params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(json_response['message']).to eq('401 Unauthorized')
+ end
+
+ it 'returns a 403 error when called with a read_api-scoped PAT' do
+ read_only_pat = create(:personal_access_token, scopes: ['read_api'], user: user)
+ post api(path, personal_access_token: read_only_pat), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when scopes are empty' do
+ let(:scopes) { [] }
+
+ it 'returns an error when no scopes are given' do
+ post api(path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq("Scopes can't be blank")
+ end
+ end
+
+ it 'creates a personal access token' do
+ post api(path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(name)
+ expect(json_response['scopes']).to eq(scopes)
+ expect(json_response['expires_at']).to eq(1.day.from_now.to_date.to_s)
+ expect(json_response['id']).to be_present
+ expect(json_response['created_at']).to be_present
+ expect(json_response['active']).to be_truthy
+ expect(json_response['revoked']).to be_falsey
+ expect(json_response['token']).to be_present
+ end
+
+ context 'when expires_at at is given' do
+ let(:params) { { name: name, scopes: scopes, expires_at: expires_at } }
+
+ context 'when expires_at is in the past' do
+ let(:expires_at) { 1.day.ago }
+
+ it 'creates an inactive personal access token' do
+ post api(path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['active']).to be_falsey
+ end
+ end
+
+ context 'when expires_at is in the future' do
+ let(:expires_at) { 1.month.from_now.to_date }
+
+ it 'creates a personal access token' do
+ post api(path, user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(name)
+ expect(json_response['scopes']).to eq(scopes)
+ expect(json_response['expires_at']).to eq(1.month.from_now.to_date.to_s)
+ expect(json_response['id']).to be_present
+ expect(json_response['created_at']).to be_present
+ expect(json_response['active']).to be_truthy
+ expect(json_response['revoked']).to be_falsey
+ expect(json_response['token']).to be_present
+ end
+ end
+ end
+
+ context 'when an error is thrown by the model' do
+ let!(:admin_personal_access_token) { create(:personal_access_token, :admin_mode, user: admin) }
+ let(:error_message) { 'error message' }
+
+ before do
+ allow_next_instance_of(PersonalAccessToken) do |personal_access_token|
+ allow(personal_access_token).to receive_message_chain(:errors, :full_messages)
+ .and_return([error_message])
+
+ allow(personal_access_token).to receive(:save).and_return(false)
+ end
+ end
+
+ it 'returns the error' do
+ post api(path, personal_access_token: admin_personal_access_token), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+
describe 'GET /users/:user_id/impersonation_tokens' do
let_it_be(:active_personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:revoked_personal_access_token) { create(:personal_access_token, :revoked, user: user) }
@@ -4675,7 +4782,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
describe 'POST /users/:user_id/impersonation_tokens' do
let(:name) { 'my new pat' }
let(:expires_at) { '2016-12-28' }
- let(:scopes) { %w(api read_user) }
+ let(:scopes) { %w[api read_user] }
let(:impersonation) { true }
let(:path) { "/users/#{user.id}/impersonation_tokens" }
let(:params) { { name: name, expires_at: expires_at, scopes: scopes, impersonation: impersonation } }
diff --git a/spec/requests/api/vs_code/settings/vs_code_settings_sync_spec.rb b/spec/requests/api/vs_code/settings/vs_code_settings_sync_spec.rb
new file mode 100644
index 00000000000..1055a8efded
--- /dev/null
+++ b/spec/requests/api/vs_code/settings/vs_code_settings_sync_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::VsCode::Settings::VsCodeSettingsSync, :aggregate_failures, factory_default: :keep, feature_category: :web_ide do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:user_token) { create(:personal_access_token) }
+
+ shared_examples "returns unauthorized when not authenticated" do
+ it 'returns 401 for non-authenticated' do
+ get api(path)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ shared_examples "returns 20x when authenticated" do |http_status|
+ it "returns #{http_status || :ok} when authenticated" do
+ get api(path, personal_access_token: user_token)
+ expect(response).to have_gitlab_http_status(http_status || :ok)
+ end
+ end
+
+ describe 'GET /vscode/settings_sync/v1/manifest' do
+ let(:path) { "/vscode/settings_sync/v1/manifest" }
+
+ it_behaves_like "returns unauthorized when not authenticated"
+ it_behaves_like "returns 20x when authenticated"
+
+ context 'when no settings record is present' do
+ it 'returns a session id' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response).to have_key('latest')
+ expect(json_response).to have_key('session')
+ end
+
+ it 'returns no latest keys' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response).to have_key('latest')
+ expect(json_response['latest']).not_to have_key('settings')
+ end
+
+ it 'includes default machine id' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response['latest']).to have_key('machines')
+ end
+ end
+
+ context 'when settings record is present' do
+ let_it_be(:settings) { create(:vscode_setting) }
+
+ it 'returns the latest keys' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response).to have_key('latest')
+ expect(json_response).to have_key('session')
+ expect(json_response['latest']).to have_key('settings')
+ expect(json_response.dig('latest', 'settings')).to eq settings.uuid
+ end
+ end
+ end
+
+ describe 'GET /vscode/settings_sync/v1/resource/machines/latest' do
+ let(:path) { "/vscode/settings_sync/v1/resource/machines/latest" }
+
+ it_behaves_like "returns unauthorized when not authenticated"
+ it_behaves_like "returns 20x when authenticated"
+
+ it 'returns a list of machines' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response).to have_key('version')
+ expect(json_response).to have_key('machines')
+ expect(json_response['machines']).to be_an Array
+ expect(json_response['machines'].first).to have_key('id')
+ end
+ end
+
+ describe 'GET /vscode/settings_sync/v1/resource/:resource_name/:id' do
+ let(:path) { "/vscode/settings_sync/v1/resource/settings/1" }
+
+ it_behaves_like "returns 20x when authenticated", :no_content
+ it_behaves_like "returns unauthorized when not authenticated"
+
+ context 'when settings with that type are not present' do
+ it 'returns 204 no content and no content ETag header' do
+ get api(path, personal_access_token: user_token)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['ETag']).to eq(::VsCode::Settings::NO_CONTENT_ETAG)
+ end
+ end
+
+ context 'when settings with that type are present' do
+ let_it_be(:settings) { create(:vscode_setting, content: '{ "key": "value" }') }
+
+ it 'returns settings with the correct json content' do
+ get api(path, personal_access_token: user_token)
+ expect(json_response).to have_key('content')
+ expect(json_response).to have_key('version')
+ expect(json_response).to have_key('machineId')
+ expect(json_response['content']).to eq('{ "key": "value" }')
+ end
+ end
+ end
+
+ describe 'POST /vscode/settings_sync/v1/resource/:resource_name' do
+ let(:path) { "/vscode/settings_sync/v1/resource/settings" }
+
+ subject(:request) do
+ post api(path, personal_access_token: user_token), params: { content: '{ "editor.fontSize": 12 }', version: 1 }
+ end
+
+ it 'returns unauthorized when not authenticated' do
+ post api(path)
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'returns 201 when a valid request is sent' do
+ request
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+
+ it 'creates a new record for the setting when the setting is not present' do
+ expect { request }.to change { User.find(user.id).vscode_settings.count }.from(0).to(1)
+ record = User.find(user.id).vscode_settings.by_setting_type('settings').first
+ expect(record.content).to eq('{ "editor.fontSize": 12 }')
+ end
+
+ it 'updates a record if the setting is already present' do
+ create(:vscode_setting)
+ expect { request }.not_to change { User.find(user.id).vscode_settings.count }
+ record = User.find(user.id).vscode_settings.by_setting_type('settings').first
+ expect(record.content).to eq('{ "editor.fontSize": 12 }')
+ end
+
+ it 'fails if required fields not passed' do
+ post api(path, personal_access_token: user_token), params: {}
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+end
diff --git a/spec/requests/application_controller_spec.rb b/spec/requests/application_controller_spec.rb
new file mode 100644
index 00000000000..52fdf6bc69e
--- /dev/null
+++ b/spec/requests/application_controller_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ApplicationController, type: :request, feature_category: :shared do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get root_path }
+ end
+end
diff --git a/spec/requests/chaos_controller_spec.rb b/spec/requests/chaos_controller_spec.rb
new file mode 100644
index 00000000000..d2ce618b041
--- /dev/null
+++ b/spec/requests/chaos_controller_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ChaosController, type: :request, feature_category: :tooling do
+ it_behaves_like 'Base action controller' do
+ before do
+ # Stub leak_mem so we don't actually leak memory for the base action controller tests.
+ allow(Gitlab::Chaos).to receive(:leak_mem).with(100, 30.seconds)
+ end
+
+ subject(:request) { get leakmem_chaos_path }
+ end
+end
diff --git a/spec/requests/concerns/planning_hierarchy_spec.rb b/spec/requests/concerns/planning_hierarchy_spec.rb
index 97b987fa770..265fcde35ad 100644
--- a/spec/requests/concerns/planning_hierarchy_spec.rb
+++ b/spec/requests/concerns/planning_hierarchy_spec.rb
@@ -16,8 +16,7 @@ RSpec.describe PlanningHierarchy, type: :request, feature_category: :groups_and_
it 'renders planning hierarchy' do
get project_planning_hierarchy_path(project)
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.body).to match(/id="js-work-items-hierarchy"/)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/requests/groups/custom_emoji_controller_spec.rb b/spec/requests/groups/custom_emoji_controller_spec.rb
new file mode 100644
index 00000000000..d12cd8e42ac
--- /dev/null
+++ b/spec/requests/groups/custom_emoji_controller_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::CustomEmojiController, feature_category: :code_review_workflow do
+ let_it_be(:group) { create(:group) }
+
+ describe 'GET #index' do
+ context 'with custom_emoji feature flag disabled' do
+ before do
+ stub_feature_flags(custom_emoji: false)
+
+ get group_custom_emoji_index_url(group)
+ end
+
+ it { expect(response).to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'with custom_emoji feature flag enabled' do
+ before do
+ get group_custom_emoji_index_url(group)
+ end
+
+ it { expect(response).to have_gitlab_http_status(:ok) }
+ end
+ end
+end
diff --git a/spec/requests/groups/observability_controller_spec.rb b/spec/requests/groups/observability_controller_spec.rb
deleted file mode 100644
index 247535bc990..00000000000
--- a/spec/requests/groups/observability_controller_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Groups::ObservabilityController, feature_category: :tracing do
- let_it_be(:group) { create(:group) }
- let_it_be(:user) { create(:user) }
-
- let(:observability_url) { Gitlab::Observability.observability_url }
- let(:path) { nil }
- let(:expected_observability_path) { nil }
-
- shared_examples 'observability route request' do
- subject do
- get path
- response
- end
-
- it_behaves_like 'observability csp policy' do
- before_all do
- group.add_developer(user)
- end
-
- let(:tested_path) { path }
- end
-
- context 'when user is not authenticated' do
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when user is a guest' do
- before do
- sign_in(user)
- end
-
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when user has the correct permissions' do
- before do
- sign_in(user)
- set_permissions
- end
-
- context 'when observability url is missing' do
- before do
- allow(Gitlab::Observability).to receive(:observability_url).and_return("")
- end
-
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
- end
-
- it 'returns 200' do
- expect(subject).to have_gitlab_http_status(:ok)
- end
-
- it 'renders the proper layout' do
- expect(subject).to render_template("layouts/group")
- expect(subject).to render_template("layouts/fullscreen")
- expect(subject).not_to render_template('layouts/nav/breadcrumbs')
- expect(subject).to render_template("nav/sidebar/_group")
- expect(subject).to render_template("groups/observability/observability")
- end
-
- it 'renders the js-observability-app element correctly' do
- element = Nokogiri::HTML.parse(subject.body).at_css('#js-observability-app')
- expect(element.attributes['data-observability-iframe-src'].value).to eq(expected_observability_path)
- end
- end
- end
-
- describe 'GET #explore' do
- let(:path) { group_observability_explore_path(group) }
- let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/explore" }
-
- it_behaves_like 'observability route request' do
- let(:set_permissions) do
- group.add_developer(user)
- end
- end
- end
-
- describe 'GET #datasources' do
- let(:path) { group_observability_datasources_path(group) }
- let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/datasources" }
-
- it_behaves_like 'observability route request' do
- let(:set_permissions) do
- group.add_maintainer(user)
- end
- end
- end
-end
diff --git a/spec/requests/groups/settings/access_tokens_controller_spec.rb b/spec/requests/groups/settings/access_tokens_controller_spec.rb
index 8d386d8c1b7..a09feeea786 100644
--- a/spec/requests/groups/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/groups/settings/access_tokens_controller_spec.rb
@@ -116,23 +116,5 @@ RSpec.describe Groups::Settings::AccessTokensController, feature_category: :syst
it 'sets available scopes' do
expect(assigns(:scopes)).to include(Gitlab::Auth::K8S_PROXY_SCOPE)
end
-
- context 'with feature flag k8s_proxy_pat disabled' do
- before do
- stub_feature_flags(k8s_proxy_pat: false)
- get group_settings_access_tokens_path(resource)
- end
-
- it 'includes details of the active group access tokens' do
- active_access_tokens =
- ::GroupAccessTokenSerializer.new.represent(resource_access_tokens.reverse, group: resource)
-
- expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
- end
-
- it 'sets available scopes' do
- expect(assigns(:scopes)).not_to include(Gitlab::Auth::K8S_PROXY_SCOPE)
- end
- end
end
end
diff --git a/spec/requests/health_controller_spec.rb b/spec/requests/health_controller_spec.rb
index 639f6194af9..3ad1d8a75b4 100644
--- a/spec/requests/health_controller_spec.rb
+++ b/spec/requests/health_controller_spec.rb
@@ -73,7 +73,9 @@ RSpec.describe HealthController, feature_category: :database do
end
describe 'GET /-/readiness' do
- subject { get '/-/readiness', params: params, headers: headers }
+ subject(:request) { get readiness_path, params: params, headers: headers }
+
+ it_behaves_like 'Base action controller'
shared_context 'endpoint responding with readiness data' do
context 'when requesting instance-checks' do
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 69127a7526e..965bead4068 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe JwtController, feature_category: :system_access do
end
end
- context 'authenticating against container registry' do
+ shared_examples 'container registry authenticator' do
context 'existing service' do
subject! { get '/jwt/auth', params: parameters }
@@ -254,6 +254,40 @@ RSpec.describe JwtController, feature_category: :system_access do
end
end
+ shared_examples 'parses a space-delimited list of scopes' do |output|
+ let(:user) { create(:user) }
+ let(:headers) { { authorization: credentials(user.username, user.password) } }
+
+ subject! { get '/jwt/auth', params: parameters, headers: headers }
+
+ let(:parameters) do
+ {
+ service: service_name,
+ scope: 'scope1 scope2'
+ }
+ end
+
+ let(:service_parameters) do
+ ActionController::Parameters.new({ service: service_name, scopes: output }).permit!
+ end
+
+ it { expect(service_class).to have_received(:new).with(nil, user, service_parameters.merge(auth_type: :gitlab_or_ldap)) }
+ end
+
+ context 'authenticating against container registry' do
+ it_behaves_like 'container registry authenticator'
+ it_behaves_like 'parses a space-delimited list of scopes', %w(scope1 scope2)
+
+ context 'when jwt_auth_space_delimited_scopes feature flag is disabled' do
+ before do
+ stub_feature_flags(jwt_auth_space_delimited_scopes: false)
+ end
+
+ it_behaves_like 'container registry authenticator'
+ it_behaves_like 'parses a space-delimited list of scopes', ['scope1 scope2']
+ end
+ end
+
context 'authenticating against dependency proxy' do
let_it_be(:user) { create(:user) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index b07296a0df2..bc1ba3357a4 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -355,19 +355,6 @@ RSpec.describe 'Git LFS API and storage', feature_category: :source_code_managem
expect(json_response['objects'].first['actions']['download']['href']).to start_with("https://lfs-objects.s3.amazonaws.com/")
expect(json_response['objects'].first['actions']['download']['href']).to include("X-Amz-Expires=3600&")
end
-
- context 'when feature flag "lfs_batch_direct_downloads" is "false"' do
- before do
- stub_feature_flags(lfs_batch_direct_downloads: false)
- end
-
- it_behaves_like 'LFS http 200 response'
-
- it 'does return proxied address URL' do
- expect(json_response['objects'].first).to include(sample_object)
- expect(json_response['objects'].first['actions']['download']['href']).to eq(objects_url(project, sample_oid))
- end
- end
end
end
diff --git a/spec/requests/metrics_controller_spec.rb b/spec/requests/metrics_controller_spec.rb
new file mode 100644
index 00000000000..ce96906e020
--- /dev/null
+++ b/spec/requests/metrics_controller_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MetricsController, type: :request, feature_category: :metrics do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get metrics_path }
+ end
+end
diff --git a/spec/requests/oauth/authorizations_controller_spec.rb b/spec/requests/oauth/authorizations_controller_spec.rb
index 257f238d9ef..7887bf52542 100644
--- a/spec/requests/oauth/authorizations_controller_spec.rb
+++ b/spec/requests/oauth/authorizations_controller_spec.rb
@@ -20,6 +20,10 @@ RSpec.describe Oauth::AuthorizationsController, feature_category: :system_access
end
describe 'GET #new' do
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get oauth_authorization_path }
+ end
+
context 'when application redirect URI has a custom scheme' do
context 'when CSP is disabled' do
before do
diff --git a/spec/requests/oauth/tokens_controller_spec.rb b/spec/requests/oauth/tokens_controller_spec.rb
index 58203a81bac..aaacfce0ce8 100644
--- a/spec/requests/oauth/tokens_controller_spec.rb
+++ b/spec/requests/oauth/tokens_controller_spec.rb
@@ -3,73 +3,131 @@
require 'spec_helper'
RSpec.describe Oauth::TokensController, feature_category: :system_access do
- let(:cors_request_headers) { { 'Origin' => 'http://notgitlab.com' } }
- let(:other_headers) { {} }
- let(:headers) { cors_request_headers.merge(other_headers) }
- let(:allowed_methods) { 'POST, OPTIONS' }
- let(:authorization_methods) { %w[Authorization X-CSRF-Token X-Requested-With] }
-
- shared_examples 'cross-origin POST request' do
- it 'allows cross-origin requests' do
- expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
- expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
- expect(response.headers['Access-Control-Allow-Headers']).to be_nil
- expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ describe 'POST /oauth/token' do
+ context 'for resource owner password credential flow', :aggregate_failures do
+ let_it_be(:password) { User.random_password }
+
+ def authenticate(with_password)
+ post '/oauth/token', params: { grant_type: 'password', username: user.username, password: with_password }
+ end
+
+ context 'when user does not have two factor enabled' do
+ let_it_be(:user) { create(:user, password: password) }
+
+ it 'authenticates successfully' do
+ expect(::Gitlab::Auth).to receive(:find_with_user_password).and_call_original
+
+ authenticate(password)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.failed_attempts).to eq(0)
+ end
+
+ it 'fails to authenticate and increments failed attempts when using the incorrect password' do
+ authenticate('incorrect_password')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(user.reload.failed_attempts).to eq(1)
+ end
+ end
+
+ context 'when the user has two factor enabled' do
+ let_it_be(:user) { create(:user, :two_factor, password: password) }
+
+ it 'fails to authenticate and does not call GitLab::Auth even when using the correct password' do
+ expect(::Gitlab::Auth).not_to receive(:find_with_user_password)
+
+ authenticate(password)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(user.reload.failed_attempts).to eq(0)
+ end
+ end
+
+ context "when the user's password is automatically set" do
+ let_it_be(:user) { create(:user, password_automatically_set: true) }
+
+ it 'fails to authenticate and does not call GitLab::Auth even when using the correct password' do
+ expect(::Gitlab::Auth).not_to receive(:find_with_user_password)
+
+ authenticate(password)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(user.reload.failed_attempts).to eq(0)
+ end
+ end
end
end
- shared_examples 'CORS preflight OPTIONS request' do
- it 'returns 200' do
- expect(response).to have_gitlab_http_status(:ok)
+ context 'for CORS requests' do
+ let(:cors_request_headers) { { 'Origin' => 'http://notgitlab.com' } }
+ let(:other_headers) { {} }
+ let(:headers) { cors_request_headers.merge(other_headers) }
+ let(:allowed_methods) { 'POST, OPTIONS' }
+ let(:authorization_methods) { %w[Authorization X-CSRF-Token X-Requested-With] }
+
+ shared_examples 'cross-origin POST request' do
+ it 'allows cross-origin requests' do
+ expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
+ expect(response.headers['Access-Control-Allow-Headers']).to be_nil
+ expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ end
end
- it 'allows cross-origin requests' do
- expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
- expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
- expect(response.headers['Access-Control-Allow-Headers']).to eq authorization_methods
- expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ shared_examples 'CORS preflight OPTIONS request' do
+ it 'returns 200' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'allows cross-origin requests' do
+ expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
+ expect(response.headers['Access-Control-Allow-Headers']).to eq authorization_methods
+ expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
+ end
end
- end
- describe 'POST /oauth/token' do
- before do
- post '/oauth/token', headers: headers
+ describe 'POST /oauth/token' do
+ before do
+ post '/oauth/token', headers: headers
+ end
+
+ it_behaves_like 'cross-origin POST request'
end
- it_behaves_like 'cross-origin POST request'
- end
+ describe 'OPTIONS /oauth/token' do
+ let(:other_headers) { { 'Access-Control-Request-Headers' => authorization_methods, 'Access-Control-Request-Method' => 'POST' } }
- describe 'OPTIONS /oauth/token' do
- let(:other_headers) { { 'Access-Control-Request-Headers' => authorization_methods, 'Access-Control-Request-Method' => 'POST' } }
+ before do
+ options '/oauth/token', headers: headers
+ end
- before do
- options '/oauth/token', headers: headers
+ it_behaves_like 'CORS preflight OPTIONS request'
end
- it_behaves_like 'CORS preflight OPTIONS request'
- end
+ describe 'POST /oauth/revoke' do
+ let(:other_headers) { { 'Content-Type' => 'application/x-www-form-urlencoded' } }
- describe 'POST /oauth/revoke' do
- let(:other_headers) { { 'Content-Type' => 'application/x-www-form-urlencoded' } }
+ before do
+ post '/oauth/revoke', headers: headers, params: { token: '12345' }
+ end
- before do
- post '/oauth/revoke', headers: headers, params: { token: '12345' }
- end
+ it 'returns 200' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
- it 'returns 200' do
- expect(response).to have_gitlab_http_status(:ok)
+ it_behaves_like 'cross-origin POST request'
end
- it_behaves_like 'cross-origin POST request'
- end
+ describe 'OPTIONS /oauth/revoke' do
+ let(:other_headers) { { 'Access-Control-Request-Headers' => authorization_methods, 'Access-Control-Request-Method' => 'POST' } }
- describe 'OPTIONS /oauth/revoke' do
- let(:other_headers) { { 'Access-Control-Request-Headers' => authorization_methods, 'Access-Control-Request-Method' => 'POST' } }
+ before do
+ options '/oauth/revoke', headers: headers
+ end
- before do
- options '/oauth/revoke', headers: headers
+ it_behaves_like 'CORS preflight OPTIONS request'
end
-
- it_behaves_like 'CORS preflight OPTIONS request'
end
end
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
index 953adb2cbf6..fdfeb367739 100644
--- a/spec/requests/organizations/organizations_controller_spec.rb
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -5,36 +5,6 @@ require 'spec_helper'
RSpec.describe Organizations::OrganizationsController, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
- shared_examples 'successful response' do
- it 'renders 200 OK' do
- gitlab_request
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- shared_examples 'redirects to sign in page' do
- it 'redirects to sign in page' do
- gitlab_request
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
- shared_examples 'action disabled by `ui_for_organizations` feature flag' do
- context 'when `ui_for_organizations` feature flag is disabled' do
- before do
- stub_feature_flags(ui_for_organizations: false)
- end
-
- it 'renders 404' do
- gitlab_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
shared_examples 'when the user is signed in' do
context 'when the user is signed in' do
before do
@@ -44,15 +14,15 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
context 'with no association to an organization' do
let_it_be(:user) { create(:user) }
- it_behaves_like 'successful response'
- it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
context 'as as admin', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
- it_behaves_like 'successful response'
- it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
context 'as an organization user' do
@@ -62,22 +32,22 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
create :organization_user, organization: organization, user: user
end
- it_behaves_like 'successful response'
- it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
end
end
shared_examples 'controller action that requires authentication' do
context 'when the user is not signed in' do
- it_behaves_like 'redirects to sign in page'
+ it_behaves_like 'organization - redirects to sign in page'
context 'when `ui_for_organizations` feature flag is disabled' do
before do
stub_feature_flags(ui_for_organizations: false)
end
- it_behaves_like 'redirects to sign in page'
+ it_behaves_like 'organization - redirects to sign in page'
end
end
@@ -86,8 +56,8 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
shared_examples 'controller action that does not require authentication' do
context 'when the user is not logged in' do
- it_behaves_like 'successful response'
- it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
it_behaves_like 'when the user is signed in'
diff --git a/spec/requests/organizations/settings_controller_spec.rb b/spec/requests/organizations/settings_controller_spec.rb
new file mode 100644
index 00000000000..77048b04b0c
--- /dev/null
+++ b/spec/requests/organizations/settings_controller_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::SettingsController, feature_category: :cell do
+ let_it_be(:organization) { create(:organization) }
+
+ describe 'GET #general' do
+ subject(:gitlab_request) { get general_settings_organization_path(organization) }
+
+ context 'when the user is not signed in' do
+ it_behaves_like 'organization - redirects to sign in page'
+
+ context 'when `ui_for_organizations` feature flag is disabled' do
+ before do
+ stub_feature_flags(ui_for_organizations: false)
+ end
+
+ it_behaves_like 'organization - redirects to sign in page'
+ end
+ end
+
+ context 'when the user is signed in' do
+ before do
+ sign_in(user)
+ end
+
+ context 'with no association to an organization' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'organization - not found response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+
+ context 'as as admin', :enable_admin_mode do
+ let_it_be(:user) { create(:admin) }
+
+ it_behaves_like 'organization - successful response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+
+ context 'as an organization user' do
+ let_it_be(:user) { create :user }
+
+ before do
+ create :organization_user, organization: organization, user: user
+ end
+
+ it_behaves_like 'organization - not found response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/issue_links_controller_spec.rb b/spec/requests/projects/issue_links_controller_spec.rb
index c242f762cde..ea73b733285 100644
--- a/spec/requests/projects/issue_links_controller_spec.rb
+++ b/spec/requests/projects/issue_links_controller_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Projects::IssueLinksController, feature_category: :team_planning
expect(json_response.count).to eq(1)
expect(json_response.first).to include(
- 'path' => project_work_items_path(issue_b.project, issue_b.iid),
+ 'path' => project_work_item_path(issue_b.project, issue_b.iid),
'type' => 'TASK'
)
end
diff --git a/spec/requests/projects/issues_controller_spec.rb b/spec/requests/projects/issues_controller_spec.rb
index 1ae65939c86..6c9b1966149 100644
--- a/spec/requests/projects/issues_controller_spec.rb
+++ b/spec/requests/projects/issues_controller_spec.rb
@@ -14,35 +14,12 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
end
- describe 'GET #new' do
- include_context 'group project issue'
-
- before do
- group.add_developer(user)
- login_as(user)
- end
-
- it_behaves_like "observability csp policy", described_class do
- let(:tested_path) do
- new_project_issue_path(project)
- end
- end
- end
-
describe 'GET #show' do
before do
group.add_developer(user)
login_as(user)
end
- it_behaves_like "observability csp policy", described_class do
- include_context 'group project issue'
-
- let(:tested_path) do
- project_issue_path(project, issue)
- end
- end
-
describe 'incident tabs' do
let_it_be(:incident) { create(:incident, project: project) }
diff --git a/spec/requests/projects/merge_requests/creations_spec.rb b/spec/requests/projects/merge_requests/creations_spec.rb
index 8f55aa90bee..41246d419a1 100644
--- a/spec/requests/projects/merge_requests/creations_spec.rb
+++ b/spec/requests/projects/merge_requests/creations_spec.rb
@@ -76,17 +76,5 @@ RSpec.describe 'merge requests creations', feature_category: :code_review_workfl
end
end
end
-
- it_behaves_like "observability csp policy", Projects::MergeRequests::CreationsController do
- let(:tested_path) do
- project_new_merge_request_path(project, merge_request: {
- title: 'Some feature',
- source_branch: 'fix',
- target_branch: 'feature',
- target_project: project,
- source_project: project
- })
- end
- end
end
end
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index e6a281d8d59..4af8f4fac7f 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -14,19 +14,6 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
let(:merge_request) { create :merge_request, source_project: project, author: user }
- context 'when logged in' do
- before do
- group.add_developer(user)
- login_as(user)
- end
-
- it_behaves_like "observability csp policy", described_class do
- let(:tested_path) do
- project_merge_request_path(project, merge_request)
- end
- end
- end
-
context 'when the author of the merge request is banned', feature_category: :insider_threat do
let_it_be(:user) { create(:user, :banned) }
@@ -161,6 +148,62 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
expect(Gitlab::Json.parse(response.body)['count']['all']).to eq(2)
end
+ context 'when there are pipelines with failed builds' do
+ before do
+ pipeline = create_pipeline
+
+ create(:ci_build, :failed, pipeline: pipeline)
+ create(:ci_build, :failed, pipeline: pipeline)
+ end
+
+ it 'returns the failed build count but not the failed builds' do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['pipelines'].size).to eq(1)
+ expect(Gitlab::Json.parse(response.body)['pipelines'][0]['failed_builds_count']).to eq(2)
+ expect(Gitlab::Json.parse(response.body)['pipelines'][0]).not_to have_key('failed_builds')
+ end
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ # warm up
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+ end
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['count']['all']).to eq(1)
+
+ pipeline_2 = create_pipeline
+ create(:ci_build, :failed, pipeline: pipeline_2)
+ create(:ci_build, :failed, pipeline: pipeline_2)
+
+ expect do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+ end.to issue_same_number_of_queries_as(control)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['count']['all']).to eq(2)
+ end
+
+ context 'when the FF ci_fix_performance_pipelines_json_endpoint is disabled' do
+ before do
+ stub_feature_flags(ci_fix_performance_pipelines_json_endpoint: false)
+ end
+
+ it 'returns the failed builds' do
+ get pipelines_project_merge_request_path(project, merge_request, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Gitlab::Json.parse(response.body)['pipelines'].size).to eq(1)
+ expect(Gitlab::Json.parse(response.body)['pipelines'][0]['failed_builds_count']).to eq(2)
+ expect(Gitlab::Json.parse(response.body)['pipelines'][0]['failed_builds'].size).to eq(2)
+ end
+ end
+ end
+
private
def create_pipeline
diff --git a/spec/requests/projects/ml/models_controller_spec.rb b/spec/requests/projects/ml/models_controller_spec.rb
index 8569f2396d3..b4402ad9a27 100644
--- a/spec/requests/projects/ml/models_controller_spec.rb
+++ b/spec/requests/projects/ml/models_controller_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project) }
let_it_be(:user) { project.first_owner }
let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
let_it_be(:model2) { create(:ml_models, project: project) }
+ let_it_be(:model3) { create(:ml_models, project: project) }
let_it_be(:model_in_different_project) { create(:ml_models) }
let(:model_registry_enabled) { true }
+ let(:params) { {} }
before do
allow(Ability).to receive(:allowed?).and_call_original
@@ -39,7 +41,7 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
it 'fetches the correct models' do
index_request
- expect(assigns(:models)).to match_array([model1, model2])
+ expect(assigns(:paginator).records).to match_array([model3, model2, model1])
end
it 'does not perform N+1 sql queries' do
@@ -58,11 +60,75 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
is_expected.to have_gitlab_http_status(:not_found)
end
end
+
+ describe 'pagination' do
+ before do
+ stub_const("Projects::Ml::ModelsController::MAX_MODELS_PER_PAGE", 2)
+ end
+
+ it 'paginates', :aggregate_failures do
+ list_models
+
+ paginator = assigns(:paginator)
+
+ expect(paginator.records).to match_array([model3, model2])
+
+ list_models({ cursor: paginator.cursor_for_next_page })
+
+ expect(assigns(:paginator).records.first).to eq(model1)
+ end
+ end
+ end
+
+ describe 'show' do
+ let(:model_id) { model1.id }
+ let(:request_project) { model1.project }
+
+ subject(:show_request) do
+ show_model
+ response
+ end
+
+ before do
+ show_request
+ end
+
+ it 'renders the template' do
+ is_expected.to render_template('projects/ml/models/show')
+ end
+
+ it 'fetches the correct model' do
+ show_request
+
+ expect(assigns(:model)).to eq(model1)
+ end
+
+ context 'when model id does not exist' do
+ let(:model_id) { non_existing_record_id }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when model project does not match project id' do
+ let(:request_project) { model_in_different_project.project }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when user does not have access' do
+ let(:model_registry_enabled) { false }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
end
private
- def list_models
- get project_ml_models_path(project)
+ def list_models(new_params = nil)
+ get project_ml_models_path(project), params: new_params || params
+ end
+
+ def show_model
+ get project_ml_model_path(request_project, model_id)
end
end
diff --git a/spec/requests/projects/settings/access_tokens_controller_spec.rb b/spec/requests/projects/settings/access_tokens_controller_spec.rb
index b4cfa964ac8..8e0d3fed3d3 100644
--- a/spec/requests/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/projects/settings/access_tokens_controller_spec.rb
@@ -117,23 +117,5 @@ RSpec.describe Projects::Settings::AccessTokensController, feature_category: :sy
it 'sets available scopes' do
expect(assigns(:scopes)).to include(Gitlab::Auth::K8S_PROXY_SCOPE)
end
-
- context 'with feature flag k8s_proxy_pat disabled' do
- before do
- stub_feature_flags(k8s_proxy_pat: false)
- get project_settings_access_tokens_path(resource)
- end
-
- it 'includes details of the active project access tokens' do
- active_access_tokens =
- ::ProjectAccessTokenSerializer.new.represent(resource_access_tokens.reverse, project: resource)
-
- expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
- end
-
- it 'sets available scopes' do
- expect(assigns(:scopes)).not_to include(Gitlab::Auth::K8S_PROXY_SCOPE)
- end
- end
end
end
diff --git a/spec/requests/projects/work_items_spec.rb b/spec/requests/projects/work_items_spec.rb
index ee9a0ff0a4e..5e9d340de64 100644
--- a/spec/requests/projects/work_items_spec.rb
+++ b/spec/requests/projects/work_items_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe 'Work Items', feature_category: :team_planning do
sign_in(current_user)
end
- it 'renders index' do
- get project_work_items_url(work_item.project, work_items_path: work_item.iid)
+ it 'renders show' do
+ get project_work_item_url(work_item.project, work_item.iid)
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/registrations_controller_spec.rb b/spec/requests/registrations_controller_spec.rb
index 8b857046a4d..71f2f347f0d 100644
--- a/spec/requests/registrations_controller_spec.rb
+++ b/spec/requests/registrations_controller_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe RegistrationsController, type: :request, feature_category: :syste
describe 'POST #create' do
let_it_be(:user_attrs) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
- subject(:create_user) { post user_registration_path, params: { user: user_attrs } }
+ subject(:request) { post user_registration_path, params: { user: user_attrs } }
+
+ it_behaves_like 'Base action controller'
context 'when email confirmation is required' do
before do
@@ -15,7 +17,7 @@ RSpec.describe RegistrationsController, type: :request, feature_category: :syste
end
it 'redirects to the `users_almost_there_path`', unless: Gitlab.ee? do
- create_user
+ request
expect(response).to redirect_to(users_almost_there_path(email: user_attrs[:email]))
end
diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb
index 3428e607305..1a925969c5a 100644
--- a/spec/requests/sessions_spec.rb
+++ b/spec/requests/sessions_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe 'Sessions', feature_category: :system_access do
let(:user) { create(:user) }
+ it_behaves_like 'Base action controller' do
+ subject(:request) { get new_user_session_path }
+ end
+
context 'for authentication', :allow_forgery_protection do
it 'logout does not require a csrf token' do
login_as(user)
diff --git a/spec/requests/users/namespace_visits_controller_spec.rb b/spec/requests/users/namespace_visits_controller_spec.rb
index eeeffcce67d..899d84192c6 100644
--- a/spec/requests/users/namespace_visits_controller_spec.rb
+++ b/spec/requests/users/namespace_visits_controller_spec.rb
@@ -19,23 +19,11 @@ RSpec.describe Users::NamespaceVisitsController, type: :request, feature_categor
context "when user is signed-in" do
let_it_be(:user) { create(:user) }
- let(:server_side_frecent_namespaces) { true }
before do
- stub_feature_flags(server_side_frecent_namespaces: server_side_frecent_namespaces)
sign_in(user)
end
- context "when the server_side_frecent_namespaces feature flag is disabled" do
- let(:server_side_frecent_namespaces) { false }
-
- it 'throws an error 302' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
context "when entity type is not provided" do
let_it_be(:request_params) { { id: '1' } }
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index f96d7864782..d4e7dc1542a 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -528,7 +528,7 @@ RSpec.describe UsersController, feature_category: :user_management do
get user_calendar_activities_url public_user.username
- expect(response.body).to include(project_work_items_path(project, work_item.iid))
+ expect(response.body).to include(project_work_item_path(project, work_item.iid))
expect(response.body).to include(project_issue_path(project, issue))
end
diff --git a/spec/routing/environments_spec.rb b/spec/routing/environments_spec.rb
index 5ba02c384e2..08353390483 100644
--- a/spec/routing/environments_spec.rb
+++ b/spec/routing/environments_spec.rb
@@ -6,8 +6,7 @@ RSpec.describe 'environments routing' do
let(:project) { create(:project) }
let(:environment) do
- create(:environment, project: project,
- name: 'staging-1.0/review')
+ create(:environment, project: project, name: 'staging-1.0/review')
end
let(:environments_route) do
@@ -42,8 +41,7 @@ RSpec.describe 'environments routing' do
end
def folder_action(**opts)
- options = { namespace_id: project.namespace.path,
- project_id: project.path }
+ options = { namespace_id: project.namespace.path, project_id: project.path }
['projects/environments#folder', options.merge(opts)]
end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 3ba7d5ad871..ba50e8eccc1 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -49,9 +49,7 @@ RSpec.shared_examples 'groups routing' do
it 'routes to the avatars controller' do
expect(delete("/groups/#{group_path}/-/avatar"))
- .to route_to(group_id: group_path,
- controller: 'groups/avatars',
- action: 'destroy')
+ .to route_to(group_id: group_path, controller: 'groups/avatars', action: 'destroy')
end
it 'routes to the boards controller' do
@@ -72,14 +70,6 @@ RSpec.shared_examples 'groups routing' do
expect(get("groups/#{group_path}/-/harbor/repositories/test/artifacts/test/tags")).to route_to('groups/harbor/tags#index', group_id: group_path, repository_id: 'test', artifact_id: 'test')
end
- it 'routes to the observability controller explore method' do
- expect(get("groups/#{group_path}/-/observability/explore")).to route_to('groups/observability#explore', group_id: group_path)
- end
-
- it 'routes to the observability controller datasources method' do
- expect(get("groups/#{group_path}/-/observability/datasources")).to route_to('groups/observability#datasources', group_id: group_path)
- end
-
it 'routes to the usage quotas controller' do
expect(get("groups/#{group_path}/-/usage_quotas")).to route_to("groups/usage_quotas#index", group_id: group_path)
end
diff --git a/spec/routing/organizations/settings_controller_routing_spec.rb b/spec/routing/organizations/settings_controller_routing_spec.rb
new file mode 100644
index 00000000000..99d482f3d47
--- /dev/null
+++ b/spec/routing/organizations/settings_controller_routing_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::SettingsController, :routing, feature_category: :cell do
+ let_it_be(:organization) { build(:organization) }
+
+ it 'routes to settings#general' do
+ expect(get("/-/organizations/#{organization.path}/settings/general"))
+ .to route_to('organizations/settings#general', organization_path: organization.path)
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index abc42d11c63..82d58d12607 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -211,11 +211,20 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/refs/feature%2B45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature+45', path: 'foo/bar/baz')
expect(get('/gitlab/gitlabhq/-/refs/feature@45/logs_tree/foo/bar/baz')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'feature@45', path: 'foo/bar/baz')
expect(get('/gitlab/gitlabhq/-/refs/stable/logs_tree/files.scss')).to route_to('projects/refs#logs_tree', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'stable', path: 'files.scss')
- assert_routing({ path: "/gitlab/gitlabhq/-/refs/stable/logs_tree/new%0A%0Aline.txt",
- method: :get },
- { controller: 'projects/refs', action: 'logs_tree',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "stable", path: "new\n\nline.txt" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/refs/stable/logs_tree/new%0A%0Aline.txt",
+ method: :get
+ },
+ {
+ controller: 'projects/refs',
+ action: 'logs_tree',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "stable",
+ path: "new\n\nline.txt"
+ }
+ )
end
it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/refs/switch', '/gitlab/gitlabhq/-/refs/switch'
@@ -498,11 +507,19 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/blame/master/files.scss')).to route_to('projects/blame#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/files.scss')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/blame/master/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/blame', action: 'show',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "master/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/blame/master/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/blame',
+ action: 'show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "master/#{newline_file}"
+ }
+ )
end
it 'to #streaming' do
@@ -525,11 +542,19 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/blob/blob/master/blob/index.js')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'blob/master/blob/index.js')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/blob/blob/master/blob/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/blob', action: 'show',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "blob/master/blob/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/blob/blob/master/blob/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/blob',
+ action: 'show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "blob/master/blob/#{newline_file}"
+ }
+ )
end
it 'to #show from unscoped routing' do
@@ -546,11 +571,19 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/tree/tree/master/tree/files')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'tree/master/tree/files')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/tree/master/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/tree', action: 'show',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "master/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/tree/master/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/tree',
+ action: 'show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "master/#{newline_file}"
+ }
+ )
end
it 'to #show from unscoped routing' do
@@ -566,22 +599,38 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/find_file/master')).to route_to('projects/find_file#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/find_file/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/find_file', action: 'show',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: newline_file.to_s })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/find_file/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/find_file',
+ action: 'show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: newline_file.to_s
+ }
+ )
end
it 'to #list' do
expect(get('/gitlab/gitlabhq/-/files/master.json')).to route_to('projects/find_file#list', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master.json')
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/files/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/find_file', action: 'list',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: newline_file.to_s })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/files/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/find_file',
+ action: 'list',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: newline_file.to_s
+ }
+ )
end
it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/find_file", "/gitlab/gitlabhq/-/find_file"
@@ -591,30 +640,44 @@ RSpec.describe 'project routing' do
describe Projects::BlobController, 'routing' do
it 'to #edit' do
expect(get('/gitlab/gitlabhq/-/edit/master/app/models/project.rb')).to(
- route_to('projects/blob#edit',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: 'master/app/models/project.rb'))
+ route_to('projects/blob#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ )
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/blob', action: 'edit',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "master/docs/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/blob',
+ action: 'edit',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "master/docs/#{newline_file}"
+ }
+ )
end
it 'to #preview' do
expect(post('/gitlab/gitlabhq/-/preview/master/app/models/project.rb')).to(
- route_to('projects/blob#preview',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: 'master/app/models/project.rb'))
+ route_to('projects/blob#preview', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ )
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/blob', action: 'edit',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "master/docs/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/edit/master/docs/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/blob',
+ action: 'edit',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "master/docs/#{newline_file}"
+ }
+ )
end
it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/new/master", "/gitlab/gitlabhq/-/new/master"
@@ -626,11 +689,19 @@ RSpec.describe 'project routing' do
it 'to #show' do
newline_file = "new\n\nline.txt"
url_encoded_newline_file = ERB::Util.url_encode(newline_file)
- assert_routing({ path: "/gitlab/gitlabhq/-/raw/master/#{url_encoded_newline_file}",
- method: :get },
- { controller: 'projects/raw', action: 'show',
- namespace_id: 'gitlab', project_id: 'gitlabhq',
- id: "master/#{newline_file}" })
+ assert_routing(
+ {
+ path: "/gitlab/gitlabhq/-/raw/master/#{url_encoded_newline_file}",
+ method: :get
+ },
+ {
+ controller: 'projects/raw',
+ action: 'show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ id: "master/#{newline_file}"
+ }
+ )
end
it 'to #show from unscoped routing' do
@@ -743,20 +814,24 @@ RSpec.describe 'project routing' do
describe '#destroy' do
it 'correctly routes to a destroy action' do
expect(delete('/gitlab/gitlabhq/registry/repository/1/tags/rc1'))
- .to route_to('projects/registry/tags#destroy',
- namespace_id: 'gitlab',
- project_id: 'gitlabhq',
- repository_id: '1',
- id: 'rc1')
+ .to route_to(
+ 'projects/registry/tags#destroy',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ repository_id: '1',
+ id: 'rc1'
+ )
end
it 'takes registry tag name constrains into account' do
expect(delete('/gitlab/gitlabhq/registry/repository/1/tags/-rc1'))
- .not_to route_to('projects/registry/tags#destroy',
- namespace_id: 'gitlab',
- project_id: 'gitlabhq',
- repository_id: '1',
- id: '-rc1')
+ .not_to route_to(
+ 'projects/registry/tags#destroy',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ repository_id: '1',
+ id: '-rc1'
+ )
end
end
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index a59ca5211ed..7c4f040266e 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -113,14 +113,10 @@ RSpec.describe HelpController, "routing" do
it 'to #show' do
path = '/help/user/markdown.md'
- expect(get(path)).to route_to('help#show',
- path: 'user/markdown',
- format: 'md')
+ expect(get(path)).to route_to('help#show', path: 'user/markdown', format: 'md')
path = '/help/user/markdown/markdown_logo.png'
- expect(get(path)).to route_to('help#show',
- path: 'user/markdown/markdown_logo',
- format: 'png')
+ expect(get(path)).to route_to('help#show', path: 'user/markdown/markdown_logo', format: 'png')
end
end
diff --git a/spec/rubocop/batched_background_migrations_spec.rb b/spec/rubocop/batched_background_migrations_spec.rb
new file mode 100644
index 00000000000..a9b99bb466b
--- /dev/null
+++ b/spec/rubocop/batched_background_migrations_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../rubocop/batched_background_migrations'
+
+RSpec.describe RuboCop::BatchedBackgroundMigrations, feature_category: :database do
+ let(:bbm_dictionary_file_name) { "#{described_class::DICTIONARY_BASE_DIR}/test_migration.yml" }
+ let(:migration_version) { 20230307160250 }
+ let(:finalized_by_version) { 20230307160255 }
+ let(:bbm_dictionary_data) do
+ {
+ migration_job_name: 'TestMigration',
+ feature_category: :database,
+ introduced_by_url: 'https://test_url',
+ milestone: 16.5,
+ queued_migration_version: migration_version,
+ finalized_by: finalized_by_version
+ }
+ end
+
+ before do
+ File.open(bbm_dictionary_file_name, 'w') do |file|
+ file.write(bbm_dictionary_data.stringify_keys.to_yaml)
+ end
+ end
+
+ after do
+ FileUtils.rm(bbm_dictionary_file_name)
+ end
+
+ subject(:batched_background_migration) { described_class.new(migration_version) }
+
+ describe '#finalized_by' do
+ it 'returns the finalized_by version of the bbm with given version' do
+ expect(batched_background_migration.finalized_by).to eq(finalized_by_version.to_s)
+ end
+
+ it 'returns nothing for non-existing bbm dictionary' do
+ expect(described_class.new('random').finalized_by).to be_nil
+ end
+ end
+end
diff --git a/spec/rubocop/check_graceful_task_spec.rb b/spec/rubocop/check_graceful_task_spec.rb
index 38c2d68a593..aa66643dd8e 100644
--- a/spec/rubocop/check_graceful_task_spec.rb
+++ b/spec/rubocop/check_graceful_task_spec.rb
@@ -1,14 +1,9 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'stringio'
-
-require_relative '../support/helpers/next_instance_of'
+require 'rubocop_spec_helper'
require_relative '../../rubocop/check_graceful_task'
RSpec.describe RuboCop::CheckGracefulTask do
- include NextInstanceOf
-
let(:output) { StringIO.new }
subject(:task) { described_class.new(output) }
@@ -119,9 +114,9 @@ RSpec.describe RuboCop::CheckGracefulTask do
end
context 'with args' do
- let(:args) { %w[a.rb Lint/EmptyFile b.rb Lint/Syntax] }
+ let(:args) { %w[Lint/EmptyFile Lint/Syntax] }
- it_behaves_like 'rubocop scan', rubocop_args: %w[--only Lint/EmptyFile,Lint/Syntax a.rb b.rb]
+ it_behaves_like 'rubocop scan', rubocop_args: %w[--only Lint/EmptyFile,Lint/Syntax]
it 'does not notify slack' do
expect(Gitlab::Popen).not_to receive(:popen)
diff --git a/spec/rubocop/cop/background_migration/feature_category_spec.rb b/spec/rubocop/cop/background_migration/feature_category_spec.rb
index 1d1b6cfad5a..12794de4f38 100644
--- a/spec/rubocop/cop/background_migration/feature_category_spec.rb
+++ b/spec/rubocop/cop/background_migration/feature_category_spec.rb
@@ -66,4 +66,10 @@ RSpec.describe RuboCop::Cop::BackgroundMigration::FeatureCategory, feature_categ
RUBY
end
end
+
+ describe '#external_dependency_checksum' do
+ it 'returns a SHA256 digest used by RuboCop to invalid cache' do
+ expect(cop.external_dependency_checksum).to match(/^\h{64}$/)
+ end
+ end
end
diff --git a/spec/rubocop/cop/experiments_test_coverage_spec.rb b/spec/rubocop/cop/experiments_test_coverage_spec.rb
index eb1e672ef40..8221d0d6720 100644
--- a/spec/rubocop/cop/experiments_test_coverage_spec.rb
+++ b/spec/rubocop/cop/experiments_test_coverage_spec.rb
@@ -4,7 +4,7 @@ require 'rubocop_spec_helper'
require_relative '../../../rubocop/cop/experiments_test_coverage'
-RSpec.describe RuboCop::Cop::ExperimentsTestCoverage, feature_category: :experimentation_conversion do
+RSpec.describe RuboCop::Cop::ExperimentsTestCoverage, feature_category: :acquisition do
let(:class_offense) { described_class::CLASS_OFFENSE }
let(:block_offense) { described_class::BLOCK_OFFENSE }
diff --git a/spec/rubocop/cop/gemfile/missing_feature_category_spec.rb b/spec/rubocop/cop/gemfile/missing_feature_category_spec.rb
new file mode 100644
index 00000000000..5f8e32f0c03
--- /dev/null
+++ b/spec/rubocop/cop/gemfile/missing_feature_category_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../rubocop/cop/gemfile/missing_feature_category'
+
+RSpec.describe RuboCop::Cop::Gemfile::MissingFeatureCategory, feature_category: :tooling do
+ let(:valid_category) { RuboCop::FeatureCategories.available.first }
+ let(:invalid_category) { :invalid_category }
+
+ it 'flags missing feature category in gem method without keyword argument' do
+ expect_offense(<<~RUBY)
+ gem 'foo', '~> 1.0'
+ ^^^^^^^^^^^^^^^^^^^ Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#gemfile
+ RUBY
+ end
+
+ it 'flags missing feature category in gem method with keyword argument' do
+ expect_offense(<<~RUBY)
+ gem 'foo', '~> 1.0', require: false
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#gemfile
+ RUBY
+ end
+
+ it 'flags invalid feature category in gem method as the only keyword argument' do
+ expect_offense(<<~RUBY, invalid: invalid_category)
+ gem 'foo', '~> 1.0', feature_category: :%{invalid}
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#gemfile
+ RUBY
+ end
+
+ it 'flags invalid feature category in gem method as the last keyword argument' do
+ expect_offense(<<~RUBY, invalid: invalid_category)
+ gem 'foo', '~> 1.0', require: false, feature_category: :%{invalid}
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#gemfile
+ RUBY
+ end
+
+ it 'flags invalid feature category in gem method as the first keyword argument' do
+ expect_offense(<<~RUBY, invalid: invalid_category)
+ gem 'foo', '~> 1.0', feature_category: :%{invalid}, require: false
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#gemfile
+ RUBY
+ end
+
+ it 'does not flag in gem method if feature category is valid as the only keyword argument' do
+ expect_no_offenses(<<~RUBY)
+ gem 'foo', '~> 1.0', feature_category: :#{valid_category}
+ RUBY
+ end
+
+ it 'does not flag in gem method if feature category is valid as the last keyword argument' do
+ expect_no_offenses(<<~RUBY)
+ gem 'foo', '~> 1.0', require: false, feature_category: :#{valid_category}
+ RUBY
+ end
+
+ describe '#external_dependency_checksum' do
+ it 'returns a SHA256 digest used by RuboCop to invalid cache' do
+ expect(cop.external_dependency_checksum).to match(/^\h{64}$/)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb b/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb
new file mode 100644
index 00000000000..2dba6194d44
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/avoid_gitlab_instance_checks_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require 'rspec-parameterized'
+require_relative '../../../../rubocop/cop/gitlab/avoid_gitlab_instance_checks'
+
+RSpec.describe RuboCop::Cop::Gitlab::AvoidGitlabInstanceChecks, feature_category: :shared do
+ let(:msg) { described_class::MSG }
+
+ describe 'bad examples' do
+ where(:code) do
+ %w[
+ Gitlab.com?
+ Gitlab.com_except_jh?
+ Gitlab.com_and_canary?
+ Gitlab.com_but_not_canary?
+ Gitlab.org_or_com?
+ ::Gitlab.com?
+ Gitlab::CurrentSettings.should_check_namespace_plan?
+ ::Gitlab::CurrentSettings.should_check_namespace_plan?
+ ]
+ end
+
+ with_them do
+ it 'registers an offense' do
+ expect_offense(<<~CODE, node: code)
+ return if %{node}
+ ^{node} Avoid the use of [...]
+ CODE
+ end
+ end
+ end
+
+ describe 'good examples' do
+ where(:code) do
+ %w[com? com Gitlab.com Gitlab::CurrentSettings.check_namespace_plan?]
+ end
+
+ with_them do
+ it 'does not register an offense' do
+ expect_no_offenses(code)
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb b/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb
index b15c298099d..184f2c3ee92 100644
--- a/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb
+++ b/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb
@@ -16,6 +16,10 @@ RSpec.describe RuboCop::Cop::Gitlab::FeatureAvailableUsage do
expect_no_offenses('License.feature_available?(:push_rules)')
end
+ it 'does not flag the use of Gitlab::Saas.feature_available?' do
+ expect_no_offenses('Gitlab::Saas.feature_available?("some/feature")')
+ end
+
it 'flags the use with a dynamic feature as nil' do
expect_offense(<<~SOURCE)
feature_available?(nil)
diff --git a/spec/rubocop/cop/migration/prevent_index_creation_spec.rb b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
index 9d886467a48..088edfedfc9 100644
--- a/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
+++ b/spec/rubocop/cop/migration/prevent_index_creation_spec.rb
@@ -4,7 +4,7 @@ require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/migration/prevent_index_creation'
RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
- let(:forbidden_tables) { %w(ci_builds) }
+ let(:forbidden_tables) { %w(ci_builds namespaces) }
let(:forbidden_tables_list) { forbidden_tables.join(', ') }
context 'when in migration' do
@@ -12,14 +12,26 @@ RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
allow(cop).to receive(:in_migration?).and_return(true)
end
+ let(:offense) { "Adding new index to #{forbidden_tables_list} is forbidden. [...]" }
+
context 'when adding an index to a forbidden table' do
+ it 'does not register an offense when direction is down' do
+ forbidden_tables.each do |table_name|
+ expect_no_offenses(<<~RUBY)
+ def down
+ add_concurrent_index :#{table_name}, :runners_token, unique: true, name: INDEX_NAME
+ end
+ RUBY
+ end
+ end
+
context 'when table_name is a symbol' do
it "registers an offense when add_index is used", :aggregate_failures do
forbidden_tables.each do |table_name|
expect_offense(<<~RUBY)
def change
add_index :#{table_name}, :protected
- ^^^^^^^^^ Adding new index to #{forbidden_tables_list} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ ^^^^^^^^^ #{offense}
end
RUBY
end
@@ -30,7 +42,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
expect_offense(<<~RUBY)
def change
add_concurrent_index :#{table_name}, :protected
- ^^^^^^^^^^^^^^^^^^^^ Adding new index to #{forbidden_tables_list} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ ^^^^^^^^^^^^^^^^^^^^ #{offense}
end
RUBY
end
@@ -43,7 +55,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
expect_offense(<<~RUBY)
def change
add_index "#{table_name}", :protected
- ^^^^^^^^^ Adding new index to #{forbidden_tables_list} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ ^^^^^^^^^ #{offense}
end
RUBY
end
@@ -54,7 +66,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
expect_offense(<<~RUBY)
def change
add_concurrent_index "#{table_name}", :protected
- ^^^^^^^^^^^^^^^^^^^^ Adding new index to #{forbidden_tables_list} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ ^^^^^^^^^^^^^^^^^^^^ #{offense}
end
RUBY
end
@@ -70,7 +82,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventIndexCreation do
def change
add_concurrent_index TABLE_NAME, :protected
- ^^^^^^^^^^^^^^^^^^^^ Adding new index to #{forbidden_tables_list} is forbidden, see https://gitlab.com/gitlab-org/gitlab/-/issues/332886
+ ^^^^^^^^^^^^^^^^^^^^ #{offense}
end
RUBY
end
diff --git a/spec/rubocop/cop/migration/unfinished_dependencies_spec.rb b/spec/rubocop/cop/migration/unfinished_dependencies_spec.rb
new file mode 100644
index 00000000000..cac48871856
--- /dev/null
+++ b/spec/rubocop/cop/migration/unfinished_dependencies_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/migration/unfinished_dependencies'
+
+RSpec.describe RuboCop::Cop::Migration::UnfinishedDependencies, feature_category: :database do
+ let(:version) { 20230307160250 }
+
+ let(:migration) do
+ <<~RUBY
+ class TestMigration < Gitlab::Database::Migration[2.1]
+ def perform; end
+ end
+ RUBY
+ end
+
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+
+ allow(cop).to receive(:version).and_return(version)
+ end
+
+ shared_examples 'migration with rubocop offense' do
+ it 'registers an offense' do
+ expect_offense(migration)
+ end
+ end
+
+ shared_examples 'migration without any rubocop offense' do
+ it 'does not register any offense' do
+ expect_no_offenses(migration)
+ end
+ end
+
+ context 'without any dependent batched background migrations' do
+ it_behaves_like 'migration without any rubocop offense'
+ end
+
+ context 'with dependent batched background migrations' do
+ let(:dependent_migration_versions) { [20230307160240] }
+
+ let(:migration) do
+ <<~RUBY
+ class TestMigration < Gitlab::Database::Migration[2.1]
+ DEPENDENT_BATCHED_BACKGROUND_MIGRATIONS = #{dependent_migration_versions}
+
+ def perform; end
+ end
+ RUBY
+ end
+
+ context 'with unfinished dependent migration' do
+ before do
+ allow(cop).to receive(:fetch_finalized_by)
+ .with(dependent_migration_versions.first)
+ .and_return(nil)
+ end
+
+ it_behaves_like 'migration with rubocop offense' do
+ let(:migration) do
+ <<~RUBY
+ class TestMigration < Gitlab::Database::Migration[2.1]
+ DEPENDENT_BATCHED_BACKGROUND_MIGRATIONS = #{dependent_migration_versions}
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{format(described_class::NOT_FINALIZED_MSG, version: dependent_migration_versions.first)}
+
+ def perform; end
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'with incorrectly finalized dependent migration' do
+ let(:dependent_migration_versions) { [20230307160240, 20230307160230] }
+
+ before do
+ allow(cop).to receive(:fetch_finalized_by)
+ .with(dependent_migration_versions.first)
+ .and_return(version - 10)
+
+ allow(cop).to receive(:fetch_finalized_by)
+ .with(dependent_migration_versions.last)
+ .and_return(version + 10)
+ end
+
+ it_behaves_like 'migration with rubocop offense' do
+ let(:migration) do
+ <<~RUBY
+ class TestMigration < Gitlab::Database::Migration[2.1]
+ DEPENDENT_BATCHED_BACKGROUND_MIGRATIONS = #{dependent_migration_versions}
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{format(described_class::FINALIZED_BY_LATER_MIGRATION_MSG, version: dependent_migration_versions.last)}
+
+ def perform; end
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'with properly finalized dependent background migrations' do
+ before do
+ allow_next_instance_of(RuboCop::BatchedBackgroundMigrations) do |bbms|
+ allow(bbms).to receive(:finalized_by).and_return(version - 5)
+ end
+ end
+
+ it_behaves_like 'migration without any rubocop offense'
+ end
+ end
+
+ context 'for non migrations' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(false)
+ end
+
+ it_behaves_like 'migration without any rubocop offense'
+ end
+end
diff --git a/spec/rubocop/cop/qa/fabricate_usage_spec.rb b/spec/rubocop/cop/qa/fabricate_usage_spec.rb
new file mode 100644
index 00000000000..7c4b42b91e0
--- /dev/null
+++ b/spec/rubocop/cop/qa/fabricate_usage_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../rubocop/cop/qa/fabricate_usage'
+
+RSpec.describe RuboCop::Cop::QA::FabricateUsage, feature_category: :quality_management do
+ let(:source_file) { 'qa/qa/specs/spec.rb' }
+
+ it 'registers an offense when using fabricate_via_api! for a valid resource' do
+ expect_offense(<<~RUBY)
+ Resource::Project.fabricate_via_api! do |project|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Prefer create(:project[, ...]) here.
+ project.name = 'test'
+ end
+ RUBY
+ end
+
+ it 'registers an offense for groups' do
+ expect_offense(<<~RUBY)
+ Resource::Group.fabricate_via_api! do |group|
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Prefer create(:group[, ...]) here.
+ group.path = 'test'
+ end
+ RUBY
+ end
+
+ it 'does not register an offense when using fabricate_via_api! for an unenforced resource' do
+ expect_no_offenses(<<~RUBY)
+ Resource::Invalid.fabricate_via_api! do |project|
+ project.name = 'test'
+ end
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/rspec/env_mocking_spec.rb b/spec/rubocop/cop/rspec/env_mocking_spec.rb
index 189fccf483a..fec2000c88b 100644
--- a/spec/rubocop/cop/rspec/env_mocking_spec.rb
+++ b/spec/rubocop/cop/rspec/env_mocking_spec.rb
@@ -34,23 +34,23 @@ RSpec.describe RuboCop::Cop::RSpec::EnvMocking, feature_category: :tooling do
context 'with mocking bracket calls ' do
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_brackets_string_quotes, %(stub_env('FOO', 'bar'))
+ offense_call_brackets_string_quotes, %(stub_env('FOO', 'bar'))
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_brackets_variables, %(stub_env(key, value))
+ offense_call_brackets_variables, %(stub_env(key, value))
end
context 'with mocking fetch calls' do
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_fetch_string_quotes, %(stub_env('FOO', 'bar'))
+ offense_call_fetch_string_quotes, %(stub_env('FOO', 'bar'))
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_fetch_variables, %(stub_env(key, value))
+ offense_call_fetch_variables, %(stub_env(key, value))
end
context 'with other special cases and variations' do
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_root_env_variables, %(stub_env(key, value))
+ offense_call_root_env_variables, %(stub_env(key, value))
it_behaves_like 'cop offense mocking the ENV constant correctable with stub_env',
- offense_call_key_value_method_calls, %(stub_env(fetch_key(object), fetch_value(object)))
+ offense_call_key_value_method_calls, %(stub_env(fetch_key(object), fetch_value(object)))
end
context 'with acceptable cases' do
diff --git a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb b/spec/rubocop/cop/rspec/feature_category_spec.rb
index e5287f7105e..05e3cae012e 100644
--- a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb
+++ b/spec/rubocop/cop/rspec/feature_category_spec.rb
@@ -3,14 +3,15 @@
require 'rubocop_spec_helper'
require 'rspec-parameterized'
-require_relative '../../../../rubocop/cop/rspec/invalid_feature_category'
+require_relative '../../../../rubocop/feature_categories'
+require_relative '../../../../rubocop/cop/rspec/feature_category'
-RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :tooling do
+RSpec.describe RuboCop::Cop::RSpec::FeatureCategory, feature_category: :tooling do
shared_examples 'feature category validation' do |valid_category|
it 'flags invalid feature category in top level example group' do
expect_offense(<<~RUBY, invalid: invalid_category)
RSpec.describe 'foo', feature_category: :%{invalid}, foo: :bar do
- ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples.
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples
end
RUBY
end
@@ -19,7 +20,7 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
expect_offense(<<~RUBY, valid: valid_category, invalid: invalid_category)
RSpec.describe 'foo', feature_category: :"%{valid}" do
context 'bar', foo: :bar, feature_category: :%{invalid} do
- ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples.
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples
end
end
RUBY
@@ -29,7 +30,7 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
expect_offense(<<~RUBY, valid: valid_category, invalid: invalid_category)
RSpec.describe 'foo', feature_category: :"%{valid}" do
it 'bar', feature_category: :%{invalid} do
- ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples.
+ ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples
end
end
RUBY
@@ -63,21 +64,14 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
let(:invalid_category) { :invalid_category }
- context 'with categories defined in config/feature_categories.yml' do
- where(:valid_category) do
- YAML.load_file(rails_root_join('config/feature_categories.yml'))
- end
+ context 'with defined in config/feature_categories.yml and custom categories' do
+ where(:valid_category) { RuboCop::FeatureCategories.available_with_custom.to_a }
with_them do
it_behaves_like 'feature category validation', params[:valid_category]
end
end
- context 'with custom categories' do
- it_behaves_like 'feature category validation', 'tooling'
- it_behaves_like 'feature category validation', 'shared'
- end
-
it 'flags invalid feature category for non-symbols' do
expect_offense(<<~RUBY, invalid: invalid_category)
RSpec.describe 'foo', feature_category: "%{invalid}" do
@@ -91,9 +85,11 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
end
it 'does not flag use of invalid categories in non-example code' do
+ valid_category = RuboCop::FeatureCategories.available.first
+
# See https://gitlab.com/gitlab-org/gitlab/-/issues/381882#note_1265865125
expect_no_offenses(<<~RUBY)
- RSpec.describe 'A spec' do
+ RSpec.describe 'A spec', feature_category: :#{valid_category} do
let(:api_handler) do
Class.new(described_class) do
namespace '/test' do
@@ -112,6 +108,18 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
RUBY
end
+ it 'flags missing feature category in top level example group' do
+ expect_offense(<<~RUBY)
+ RSpec.describe 'foo' do
+ ^^^^^^^^^^^^^^^^^^^^^^^ Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples
+ end
+
+ RSpec.describe 'foo', some: :tag do
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples
+ end
+ RUBY
+ end
+
describe '#external_dependency_checksum' do
it 'returns a SHA256 digest used by RuboCop to invalid cache' do
expect(cop.external_dependency_checksum).to match(/^\h{64}$/)
diff --git a/spec/rubocop/cop/rspec/missing_feature_category_spec.rb b/spec/rubocop/cop/rspec/missing_feature_category_spec.rb
deleted file mode 100644
index 41b1d2b8580..00000000000
--- a/spec/rubocop/cop/rspec/missing_feature_category_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'rubocop_spec_helper'
-require_relative '../../../../rubocop/cop/rspec/missing_feature_category'
-
-RSpec.describe RuboCop::Cop::RSpec::MissingFeatureCategory, feature_category: :tooling do
- it 'flags missing feature category in top level example group' do
- expect_offense(<<~RUBY)
- RSpec.describe 'foo' do
- ^^^^^^^^^^^^^^^^^^^^ Please add missing feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples.
- end
-
- RSpec.describe 'foo', some: :tag do
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Please add missing feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples.
- end
- RUBY
- end
-
- it 'does not flag if feature category is defined' do
- expect_no_offenses(<<~RUBY)
- RSpec.describe 'foo', feature_category: :foo do
- end
-
- RSpec.describe 'foo', some: :tag, feature_category: :foo do
- end
-
- RSpec.describe 'foo', feature_category: :foo, some: :tag do
- end
- RUBY
- end
-end
diff --git a/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb b/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb
index 1d1c0852db2..d2ccd504fcd 100644
--- a/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb
+++ b/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb
@@ -10,10 +10,10 @@ require_relative '../../../../rubocop/cop/style/regexp_literal_mixed_preserve'
RSpec.describe RuboCop::Cop::Style::RegexpLiteralMixedPreserve, :config do
let(:config) do
supported_styles = { 'SupportedStyles' => %w[slashes percent_r mixed mixed_preserve] }
- RuboCop::Config.new('Style/PercentLiteralDelimiters' =>
- percent_literal_delimiters_config,
- 'Style/RegexpLiteralMixedPreserve' =>
- cop_config.merge(supported_styles))
+ RuboCop::Config.new(
+ 'Style/PercentLiteralDelimiters' => percent_literal_delimiters_config,
+ 'Style/RegexpLiteralMixedPreserve' => cop_config.merge(supported_styles)
+ )
end
let(:percent_literal_delimiters_config) { { 'PreferredDelimiters' => { '%r' => '{}' } } }
diff --git a/spec/rubocop/feature_categories_spec.rb b/spec/rubocop/feature_categories_spec.rb
new file mode 100644
index 00000000000..ffe7ade82e2
--- /dev/null
+++ b/spec/rubocop/feature_categories_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../rubocop/feature_categories'
+
+RSpec.describe RuboCop::FeatureCategories, feature_category: :tooling do
+ subject(:feature_categories) { described_class.new(categories) }
+
+ let(:categories) { ['valid_category'] }
+
+ describe '.available' do
+ it 'returns a list of available feature categories in a set of strings' do
+ expect(described_class.available).to be_a(Set)
+ expect(described_class.available).to all(be_a(String))
+ end
+ end
+
+ describe '.available_with_custom' do
+ it 'returns a list of available feature categories' do
+ expect(described_class.available_with_custom).to include(described_class.available)
+ end
+
+ it 'returns a list containing the custom feature categories' do
+ expect(described_class.available_with_custom).to include(described_class::CUSTOM_CATEGORIES)
+ end
+ end
+
+ describe '.config_checksum' do
+ it 'returns a SHA256 digest used by RuboCop to invalid cache' do
+ expect(described_class.config_checksum).to match(/^\h{64}$/)
+ end
+ end
+
+ describe '#check' do
+ let(:value_node) { instance_double(RuboCop::AST::SymbolNode, sym_type?: true) }
+ let(:document_link) { 'https://example.com' }
+
+ def check
+ expect do |block|
+ feature_categories.check(
+ value_node: value_node,
+ document_link: document_link,
+ &block)
+ end
+ end
+
+ context 'when value_node is nil' do
+ let(:value_node) { nil }
+
+ it 'yields a message asking for a feature category with document link only' do
+ check.to yield_with_args(<<~MARKDOWN.chomp)
+ Please use a valid feature category. See https://example.com
+ MARKDOWN
+ end
+ end
+
+ context 'when value_node is not a symbol node' do
+ before do
+ allow(value_node).to receive(:sym_type?).and_return(false)
+ end
+
+ it 'yields a message asking for a symbol value' do
+ check.to yield_with_args(described_class::MSG_SYMBOL)
+ end
+ end
+
+ context 'when category is found' do
+ before do
+ allow(value_node).to receive(:value).and_return(categories.first)
+ end
+
+ it 'returns nil without yielding anything' do
+ check.not_to yield_with_args
+ end
+ end
+
+ context 'when a similar category is found' do
+ before do
+ allow(value_node).to receive(:value).and_return('invalid_category')
+ end
+
+ it 'yields a message asking for a feature category with suggestion and document link' do
+ check.to yield_with_args(<<~MARKDOWN.chomp)
+ Please use a valid feature category. Did you mean `:valid_category`? See https://example.com
+ MARKDOWN
+ end
+ end
+
+ context 'when no similar category is found' do
+ before do
+ allow(value_node).to receive(:value).and_return('something_completely_different')
+ end
+
+ it 'yields a message asking for a feature category with document link only' do
+ check.to yield_with_args(<<~MARKDOWN.chomp)
+ Please use a valid feature category. See https://example.com
+ MARKDOWN
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/formatter/graceful_formatter_spec.rb b/spec/rubocop/formatter/graceful_formatter_spec.rb
index d76e566e2b4..b9a56bec115 100644
--- a/spec/rubocop/formatter/graceful_formatter_spec.rb
+++ b/spec/rubocop/formatter/graceful_formatter_spec.rb
@@ -220,19 +220,20 @@ RSpec.describe RuboCop::Formatter::GracefulFormatter, :isolated_environment do
def fake_offense(cop_name)
# rubocop:disable RSpec/VerifiedDoubles
- double(:offense,
- cop_name: cop_name,
- corrected?: false,
- correctable?: false,
- severity: double(:severity, name: :convention, code: :C),
- line: 5,
- column: 23,
- real_column: 23,
- corrected_with_todo?: false,
- message: "#{cop_name} message",
- location: double(:location, source_line: 'line', first_line: 1, last_line: 1, single_line?: true),
- highlighted_area: double(:highlighted_area, begin_pos: 1, size: 2, source_buffer: 'line', source: 'i')
- )
+ double(
+ :offense,
+ cop_name: cop_name,
+ corrected?: false,
+ correctable?: false,
+ severity: double(:severity, name: :convention, code: :C),
+ line: 5,
+ column: 23,
+ real_column: 23,
+ corrected_with_todo?: false,
+ message: "#{cop_name} message",
+ location: double(:location, source_line: 'line', first_line: 1, last_line: 1, single_line?: true),
+ highlighted_area: double(:highlighted_area, begin_pos: 1, size: 2, source_buffer: 'line', source: 'i')
+ )
# rubocop:enable RSpec/VerifiedDoubles
end
end
diff --git a/spec/rubocop_spec_helper.rb b/spec/rubocop_spec_helper.rb
index 9884cdd0272..2f1dc2843be 100644
--- a/spec/rubocop_spec_helper.rb
+++ b/spec/rubocop_spec_helper.rb
@@ -8,6 +8,7 @@ require 'fast_spec_helper'
require 'rubocop'
require 'rubocop/rspec/shared_contexts/default_rspec_language_config_context'
+require_relative 'support/helpers/next_instance_of'
require_relative 'rubocop/support_workaround'
RSpec.configure do |config|
@@ -21,6 +22,7 @@ RSpec.configure do |config|
config.include RuboCop::RSpec::ExpectOffense, type: :rubocop
config.include RuboCop::RSpec::ExpectOffense, type: :rubocop_rspec
+ config.include NextInstanceOf
config.include_context 'config', type: :rubocop
config.include_context 'with default RSpec/Language config', type: :rubocop_rspec
diff --git a/spec/scripts/changed-feature-flags_spec.rb b/spec/scripts/changed-feature-flags_spec.rb
deleted file mode 100644
index f1e381b0656..00000000000
--- a/spec/scripts/changed-feature-flags_spec.rb
+++ /dev/null
@@ -1,168 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'tmpdir'
-
-load File.expand_path('../../scripts/changed-feature-flags', __dir__)
-
-RSpec.describe 'scripts/changed-feature-flags' do
- describe GetFeatureFlagsFromFiles do
- let!(:feature_flag_definition1) do
- file = File.open(File.join(ff_dir, "#{file_name1}.yml"), 'w+')
- file.write(<<~YAML)
- ---
- name: foo_flag
- default_enabled: true
- YAML
- file.rewind
- file
- end
-
- let!(:feature_flag_definition2) do
- file = File.open(File.join(ff_dir, "#{file_name2}.yml"), 'w+')
- file.write(<<~YAML)
- ---
- name: bar_flag
- default_enabled: false
- YAML
- file.rewind
- file
- end
-
- let!(:feature_flag_diff1) do
- FileUtils.mkdir_p(File.join(diffs_dir, ff_sub_dir))
- file = File.open(File.join(diffs_dir, ff_sub_dir, "#{file_name1}.yml.diff"), 'w+')
- file.write(<<~YAML)
- @@ -5,4 +5,4 @@
- name: foo_flag
- -default_enabled: false
- +default_enabled: true
- YAML
- file.rewind
- file
- end
-
- let!(:feature_flag_diff2) do
- FileUtils.mkdir_p(File.join(diffs_dir, ff_sub_dir))
- file = File.open(File.join(diffs_dir, ff_sub_dir, "#{file_name2}.yml.diff"), 'w+')
- file.write(<<~YAML)
- @@ -0,0 +0,0 @@
- name: bar_flag
- -default_enabled: true
- +default_enabled: false
- YAML
- file.rewind
- file
- end
-
- let!(:deleted_feature_flag_diff) do
- FileUtils.mkdir_p(File.join(diffs_dir, ff_sub_dir))
- file = File.open(File.join(diffs_dir, ff_sub_dir, "foobar_ff_#{SecureRandom.hex(8)}.yml.deleted.diff"), 'w+')
- file.write(<<~YAML)
- @@ -0,0 +0,0 @@
- -name: foobar_flag
- -default_enabled: true
- YAML
- file.rewind
- file
- end
-
- before do
- allow(Dir).to receive(:pwd).and_return(Dir.tmpdir)
- end
-
- after do
- feature_flag_definition1.close
- feature_flag_definition2.close
- feature_flag_diff1.close
- feature_flag_diff2.close
- deleted_feature_flag_diff.close
- FileUtils.rm_r(ff_dir)
- FileUtils.rm_r(diffs_dir)
- end
-
- describe '.extracted_flags' do
- let(:file_name1) { "foo_ff_#{SecureRandom.hex(8)}" }
- let(:file_name2) { "bar_ff_#{SecureRandom.hex(8)}" }
- let(:ff_dir) { FileUtils.mkdir_p(File.join(Dir.tmpdir, ff_sub_dir)) }
- let(:diffs_dir) { FileUtils.mkdir_p(File.join(Dir.tmpdir, 'diffs')).first }
-
- shared_examples 'extract feature flags' do
- it 'returns feature flags on their own' do
- subject = described_class.new({ files: diffs_dir })
-
- expect(subject.extracted_flags.split(',')).to include('foo_flag', 'bar_flag')
- end
-
- it 'returns feature flags and their state as enabled' do
- subject = described_class.new({ files: diffs_dir, state: 'enabled' })
-
- expect(subject.extracted_flags.split(',')).to include('foo_flag=enabled', 'bar_flag=enabled')
- end
-
- it 'returns feature flags and their state as disabled' do
- subject = described_class.new({ files: diffs_dir, state: 'disabled' })
-
- expect(subject.extracted_flags.split(',')).to include('foo_flag=disabled', 'bar_flag=disabled')
- end
-
- it 'does not return feature flags when there are mixed deleted and non-deleted definition files' do
- subject = described_class.new({ files: diffs_dir, state: 'deleted' })
-
- expect(subject.extracted_flags).to eq('')
- end
- end
-
- context 'with definition files in the development directory' do
- let(:ff_sub_dir) { %w[feature_flags development] }
-
- it_behaves_like 'extract feature flags'
- end
-
- context 'with definition files in the ops directory' do
- let(:ff_sub_dir) { %w[feature_flags ops] }
-
- it_behaves_like 'extract feature flags'
- end
-
- context 'with definition files in the experiment directory' do
- let(:ff_sub_dir) { %w[feature_flags experiment] }
-
- it 'ignores the files' do
- subject = described_class.new({ files: diffs_dir })
-
- expect(subject.extracted_flags).to eq('')
- end
- end
-
- context 'with only deleted definition files' do
- let(:ff_sub_dir) { %w[feature_flags development] }
-
- before do
- feature_flag_diff1.close
- feature_flag_diff2.close
- FileUtils.rm_r(feature_flag_diff1)
- FileUtils.rm_r(feature_flag_diff2)
- end
-
- it 'returns feature flags and their state as deleted' do
- subject = described_class.new({ files: diffs_dir, state: 'deleted' })
-
- expect(subject.extracted_flags).to eq('foobar_flag=deleted')
- end
-
- it 'does not return feature flags when the desired state is enabled' do
- subject = described_class.new({ files: diffs_dir, state: 'enabled' })
-
- expect(subject.extracted_flags).to eq('')
- end
-
- it 'does not return feature flags when the desired state is disabled' do
- subject = described_class.new({ files: diffs_dir, state: 'disabled' })
-
- expect(subject.extracted_flags).to eq('')
- end
- end
- end
- end
-end
diff --git a/spec/scripts/generate_rspec_pipeline_spec.rb b/spec/scripts/generate_rspec_pipeline_spec.rb
index 91b5739cf63..894c33968b8 100644
--- a/spec/scripts/generate_rspec_pipeline_spec.rb
+++ b/spec/scripts/generate_rspec_pipeline_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin
describe '#generate!' do
let!(:rspec_files) { Tempfile.new(['rspec_files_path', '.txt']) }
let(:rspec_files_content) do
- "spec/migrations/a_spec.rb spec/migrations/b_spec.rb " \
+ "spec/migrations/a_spec.rb spec/migrations/b_spec.rb spec/migrations/c_spec.rb spec/migrations/d_spec.rb " \
"spec/lib/gitlab/background_migration/a_spec.rb spec/lib/gitlab/background_migration/b_spec.rb " \
"spec/models/a_spec.rb spec/models/b_spec.rb " \
"spec/controllers/a_spec.rb spec/controllers/b_spec.rb " \
@@ -63,8 +63,13 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin
let(:knapsack_report_content) do
<<~JSON
{
- "spec/migrations/a_spec.rb": 360.3,
- "spec/migrations/b_spec.rb": 180.1,
+ "spec/migrations/a_spec.rb": 620.3,
+ "spec/migrations/b_spec.rb": 610.1,
+ "spec/migrations/c_spec.rb": 20.1,
+ "spec/migrations/d_spec.rb": 20.1,
+ "spec/migrations/e_spec.rb": 20.1,
+ "spec/migrations/f_spec.rb": 20.1,
+ "spec/migrations/g_spec.rb": 20.1,
"spec/lib/gitlab/background_migration/a_spec.rb": 60.5,
"spec/lib/gitlab/background_migration/b_spec.rb": 180.3,
"spec/models/a_spec.rb": 360.2,
@@ -123,7 +128,7 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin
expect(File.read("#{pipeline_template.path}.yml"))
.to eq(
- "rspec migration:\n parallel: 2\nrspec background_migration:\n parallel: 2\n" \
+ "rspec migration:\n parallel: 4\nrspec background_migration:\n parallel: 2\n" \
"rspec unit:\n parallel: 2\nrspec integration:\n parallel: 2\n" \
"rspec system:\n parallel: 2"
)
@@ -164,12 +169,27 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin
expect(File.read("#{pipeline_template.path}.yml"))
.to eq(
- "rspec migration:\n parallel: 2\nrspec background_migration:\n" \
+ "rspec migration:\n parallel: 4\nrspec background_migration:\n" \
"rspec unit:\n parallel: 2\nrspec integration:\n" \
"rspec system:\n parallel: 2"
)
end
+ context 'and RSpec files have a high duration' do
+ let(:rspec_files_content) do
+ "spec/migrations/a_spec.rb spec/migrations/b_spec.rb"
+ end
+
+ it 'generates the pipeline config with parallelization based on Knapsack' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml"))
+ .to eq(
+ "rspec migration:\n parallel: 2"
+ )
+ end
+ end
+
context 'and Knapsack report does not contain valid JSON' do
let(:knapsack_report_content) { "#{super()}," }
diff --git a/spec/scripts/pipeline/average_reports_spec.rb b/spec/scripts/pipeline/average_reports_spec.rb
new file mode 100644
index 00000000000..2eee8d34fd5
--- /dev/null
+++ b/spec/scripts/pipeline/average_reports_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tempfile'
+require 'json'
+require_relative '../../../scripts/pipeline/average_reports'
+
+RSpec.describe AverageReports, feature_category: :tooling do
+ let(:initial_report) do
+ {
+ 'spec/frontend/fixtures/analytics.rb' => 1,
+ 'spec/frontend/fixtures/runner_instructions.rb' => 0.8074841039997409,
+ 'ee/spec/frontend/fixtures/analytics/value_streams_test_stage.rb' => 50.35115972699987,
+ 'ee/spec/frontend/fixtures/merge_requests.rb' => 19.16644390500005,
+ 'old' => 123
+ }
+ end
+
+ let(:new_report) do
+ {
+ 'spec/frontend/fixtures/analytics.rb' => 2,
+ 'spec/frontend/fixtures/runner_instructions.rb' => 0,
+ 'ee/spec/frontend/fixtures/analytics/value_streams_test_stage.rb' => 0,
+ 'ee/spec/frontend/fixtures/merge_requests.rb' => 0,
+ 'new' => 234
+ }
+ end
+
+ let(:new_report_2) do
+ {
+ 'spec/frontend/fixtures/analytics.rb' => 3,
+ 'new' => 468
+ }
+ end
+
+ let(:initial_report_file) do
+ Tempfile.new('temp_initial_report.json').tap do |f|
+ # rubocop:disable Gitlab/Json
+ f.write(JSON.dump(initial_report))
+ # rubocop:enable Gitlab/Json
+ f.close
+ end
+ end
+
+ let(:new_report_file_1) do |_f|
+ Tempfile.new('temp_new_report1.json').tap do |f|
+ # rubocop:disable Gitlab/Json
+ f.write(JSON.dump(new_report))
+ # rubocop:enable Gitlab/Json
+ f.close
+ end
+ end
+
+ let(:new_report_file_2) do |_f|
+ Tempfile.new('temp_new_report2.json').tap do |f|
+ # rubocop:disable Gitlab/Json
+ f.write(JSON.dump(new_report_2))
+ # rubocop:enable Gitlab/Json
+ f.close
+ end
+ end
+
+ before do
+ allow(subject).to receive(:puts)
+ end
+
+ after do
+ initial_report_file.unlink
+ new_report_file_1.unlink
+ new_report_file_2.unlink
+ end
+
+ describe 'execute' do
+ context 'with 1 new report' do
+ subject do
+ described_class.new(
+ initial_report_file: initial_report_file.path,
+ new_report_files: [new_report_file_1.path]
+ )
+ end
+
+ it 'returns average durations' do
+ results = subject.execute
+
+ expect(results['spec/frontend/fixtures/analytics.rb']).to be_within(0.01).of(1.5)
+ expect(results['spec/frontend/fixtures/runner_instructions.rb']).to be_within(0.01).of(0.4)
+ expect(results['ee/spec/frontend/fixtures/analytics/value_streams_test_stage.rb']).to be_within(0.01).of(25.17)
+ expect(results['ee/spec/frontend/fixtures/merge_requests.rb']).to be_within(0.01).of(9.58)
+ expect(results['new']).to be_within(0.01).of(234)
+
+ # excludes entry missing from the new report
+ expect(results['old']).to be_nil
+ end
+ end
+
+ context 'with 2 new reports' do
+ subject do
+ described_class.new(
+ initial_report_file: initial_report_file.path,
+ new_report_files: [new_report_file_1.path, new_report_file_2.path]
+ )
+ end
+
+ it 'returns average durations' do
+ results = subject.execute
+
+ expect(subject).to have_received(:puts).with("Updating #{initial_report_file.path} with 2 new reports...")
+ expect(subject).to have_received(:puts).with("Updated 5 data points from #{new_report_file_1.path}")
+ expect(subject).to have_received(:puts).with("Updated 2 data points from #{new_report_file_2.path}")
+
+ expect(results['spec/frontend/fixtures/analytics.rb']).to be_within(0.01).of(2)
+ expect(results['new']).to be_within(0.01).of(351)
+
+ # retains entry present in one of the new reports
+ expect(results['spec/frontend/fixtures/runner_instructions.rb']).to be_within(0.01).of(0.4)
+ expect(results['ee/spec/frontend/fixtures/analytics/value_streams_test_stage.rb']).to be_within(0.01).of(25.17)
+ expect(results['ee/spec/frontend/fixtures/merge_requests.rb']).to be_within(0.01).of(9.58)
+
+ # excludes entry missing from both of the new reports
+ expect(results['old']).to be_nil
+ end
+ end
+
+ context 'when some of the new report files do not exist' do
+ subject do
+ described_class.new(
+ initial_report_file: initial_report_file.path,
+ new_report_files: [new_report_file_1.path, 'file_does_not_exist.json']
+ )
+ end
+
+ it 'ignores the nil file and only process 1 new report' do
+ subject.execute
+
+ expect(subject).to have_received(:puts).with("Updating #{initial_report_file.path} with 1 new reports...")
+ expect(subject).to have_received(:puts).with("Updated 5 data points from #{new_report_file_1.path}")
+ end
+ end
+ end
+end
diff --git a/spec/scripts/pipeline/create_test_failure_issues_spec.rb b/spec/scripts/pipeline/create_test_failure_issues_spec.rb
deleted file mode 100644
index 2a5910f5238..00000000000
--- a/spec/scripts/pipeline/create_test_failure_issues_spec.rb
+++ /dev/null
@@ -1,188 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop:disable RSpec/VerifiedDoubles
-
-require 'fast_spec_helper'
-require 'active_support/testing/time_helpers'
-require 'rspec-parameterized'
-
-require_relative '../../../scripts/pipeline/create_test_failure_issues'
-
-RSpec.describe CreateTestFailureIssues, feature_category: :tooling do
- describe CreateTestFailureIssue do
- include ActiveSupport::Testing::TimeHelpers
-
- let(:server_host) { 'example.com' }
- let(:project_path) { 'group/project' }
-
- let(:env) do
- {
- 'CI_SERVER_HOST' => server_host,
- 'CI_PROJECT_PATH' => project_path,
- 'CI_PIPELINE_URL' => "https://#{server_host}/#{project_path}/-/pipelines/1234"
- }
- end
-
- let(:api_token) { 'api_token' }
- let(:creator) { described_class.new(project: project_path, api_token: api_token) }
- let(:test_name) { 'The test description' }
- let(:test_file) { 'spec/path/to/file_spec.rb' }
- let(:test_file_content) do
- <<~CONTENT
- # comment
-
- RSpec.describe Foo, feature_category: :source_code_management do
- end
-
- CONTENT
- end
-
- let(:test_file_stub) { double(read: test_file_content) }
- let(:failed_test) do
- {
- 'name' => test_name,
- 'file' => test_file,
- 'job_url' => "https://#{server_host}/#{project_path}/-/jobs/5678"
- }
- end
-
- let(:categories_mapping) do
- {
- 'source_code_management' => {
- 'group' => 'source_code',
- 'label' => 'Category:Source Code Management'
- }
- }
- end
-
- let(:groups_mapping) do
- {
- 'source_code' => {
- 'label' => 'group::source_code'
- }
- }
- end
-
- let(:test_hash) { Digest::SHA256.hexdigest(failed_test['file'] + failed_test['name'])[0...12] }
- let(:latest_format_issue_title) { "#{failed_test['file']} [test-hash:#{test_hash}]" }
- let(:latest_format_issue_description) do
- <<~DESCRIPTION
- ### Test description
-
- `#{failed_test['name']}`
-
- ### Test file path
-
- [`#{failed_test['file']}`](https://#{server_host}/#{project_path}/-/blob/master/#{failed_test['file']})
-
- <!-- Don't add anything after the report list since it's updated automatically -->
- ### Reports (1)
-
- #{failed_test_report_line}
- DESCRIPTION
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- before do
- stub_env(env)
- allow(creator).to receive(:puts)
- end
-
- describe '#upsert' do
- let(:expected_search_payload) do
- {
- state: :opened,
- search: test_hash,
- in: :title,
- per_page: 1
- }
- end
-
- let(:find_issue_stub) { double('FindIssues') }
- let(:issue_stub) { double('Issue', title: latest_format_issue_title, web_url: 'issue_web_url') }
-
- let(:failed_test_report_line) do
- "1. #{Time.new.utc.strftime('%F')}: #{failed_test['job_url']} (#{env['CI_PIPELINE_URL']})"
- end
-
- before do
- allow(File).to receive(:open).and_call_original
- allow(File).to receive(:open).with(File.expand_path(File.join('..', '..', '..', test_file), __dir__))
- .and_return(test_file_stub)
-
- allow(FindIssues).to receive(:new).with(project: project_path, api_token: api_token).and_return(find_issue_stub)
-
- allow(creator).to receive(:categories_mapping).and_return(categories_mapping)
- allow(creator).to receive(:groups_mapping).and_return(groups_mapping)
- end
-
- context 'when no issues are found' do
- let(:create_issue_stub) { double('CreateIssue') }
- let(:expected_create_payload) do
- {
- title: latest_format_issue_title,
- description: latest_format_issue_description,
- labels: described_class::DEFAULT_LABELS.map { |label| "wip-#{label}" } + [
- "wip-#{categories_mapping['source_code_management']['label']}",
- "wip-#{groups_mapping['source_code']['label']}"
- ],
- weight: 1
- }
- end
-
- before do
- allow(find_issue_stub).to receive(:execute).with(expected_search_payload).and_return([])
- end
-
- it 'calls CreateIssue#execute(payload)' do
- expect(CreateIssue).to receive(:new).with(project: project_path, api_token: api_token)
- .and_return(create_issue_stub)
- expect(create_issue_stub).to receive(:execute).with(expected_create_payload).and_return(issue_stub)
-
- creator.upsert(failed_test)
- end
- end
-
- context 'when issues are found' do
- let(:issue_stub) do
- double('Issue', iid: 42, title: issue_title, description: issue_description, web_url: 'issue_web_url')
- end
-
- before do
- allow(find_issue_stub).to receive(:execute).with(expected_search_payload).and_return([issue_stub])
- end
-
- # This shared example can be useful if we want to test migration to a new format in the future
- shared_examples 'existing issue update' do
- let(:update_issue_stub) { double('UpdateIssue') }
- let(:expected_update_payload) do
- {
- description: latest_format_issue_description.sub(/^### Reports.*$/, '### Reports (2)') +
- "\n#{failed_test_report_line}",
- weight: 2
- }
- end
-
- it 'calls UpdateIssue#execute(payload)' do
- expect(UpdateIssue).to receive(:new).with(project: project_path, api_token: api_token)
- .and_return(update_issue_stub)
- expect(update_issue_stub).to receive(:execute).with(42, **expected_update_payload)
-
- creator.upsert(failed_test)
- end
- end
-
- context 'when issue already has the latest format' do
- let(:issue_description) { latest_format_issue_description }
- let(:issue_title) { latest_format_issue_title }
-
- it_behaves_like 'existing issue update'
- end
- end
- end
- end
-end
-# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index bed9775ac8c..47904a4e7e5 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -158,7 +158,6 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
it 'exposes the credit card' do
expect(credit_card_hash.keys).to match_array([
- :name,
:similar_records_count,
:card_matches_link
])
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index 7f232a08622..0fd9a12440f 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -2,14 +2,15 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineEntity do
+RSpec.describe Ci::PipelineEntity, feature_category: :continuous_integration do
include Gitlab::Routing
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:request) { double('request', current_user: user) }
- let(:entity) { described_class.represent(pipeline, request: request) }
+ let(:options) { {} }
+ let(:entity) { described_class.represent(pipeline, request: request, **options) }
describe '#as_json' do
subject { entity.as_json }
@@ -255,8 +256,30 @@ RSpec.describe Ci::PipelineEntity do
project.add_maintainer(user)
end
- it 'exposes these failed builds' do
- expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
+ # Remove with `ci_fix_performance_pipelines_json_endpoint`.
+ context 'when disable_failed_builds is true' do
+ let(:options) { { disable_failed_builds: true } }
+
+ it 'exposes the failed builds count but not the failed builds' do
+ expect(subject[:failed_builds_count]).to eq(2)
+ expect(subject).not_to have_key(:failed_builds)
+ end
+ end
+
+ context 'when disable_failed_builds is false' do
+ let(:options) { { disable_failed_builds: false } }
+
+ it 'exposes the failed builds count but not the failed builds' do
+ expect(subject[:failed_builds_count]).to eq(2)
+ expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
+ end
+ end
+
+ context 'when disable_failed_builds is nil' do
+ it 'exposes the failed builds count and the failed builds' do
+ expect(subject[:failed_builds_count]).to eq(2)
+ expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
+ end
end
end
diff --git a/spec/serializers/integrations/field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index 25ac0aa4911..aa503bdfcc8 100644
--- a/spec/serializers/integrations/field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
['Default branch and protected branches', 'default_and_protected']
],
help: nil,
- value: nil,
+ value: 'all',
checkbox_label: nil
}
diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb
index 6042dea249f..04c283ddc2c 100644
--- a/spec/serializers/issue_board_entity_spec.rb
+++ b/spec/serializers/issue_board_entity_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe IssueBoardEntity do
let(:resource) { create(:issue, :task, project: project) }
it 'has a work item path with iid' do
- expect(subject[:real_path]).to eq(project_work_items_path(project, resource.iid))
+ expect(subject[:real_path]).to eq(project_work_item_path(project, resource.iid))
end
end
end
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 38c81257a7d..a8fd96a03bb 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe IssueEntity do
# This was already a path and not a url when the work items change was introduced
it 'has a work item path with iid' do
- expect(subject[:web_url]).to eq(project_work_items_path(project, resource.iid))
+ expect(subject[:web_url]).to eq(project_work_item_path(project, resource.iid))
end
end
end
diff --git a/spec/serializers/linked_project_issue_entity_spec.rb b/spec/serializers/linked_project_issue_entity_spec.rb
index 2f7fb912115..070ddda2a8b 100644
--- a/spec/serializers/linked_project_issue_entity_spec.rb
+++ b/spec/serializers/linked_project_issue_entity_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe LinkedProjectIssueEntity do
it 'returns a work items path using iid' do
expect(serialized_entity).to include(
- path: project_work_items_path(related_issue.project, related_issue.iid)
+ path: project_work_item_path(related_issue.project, related_issue.iid)
)
end
end
diff --git a/spec/serializers/project_import_entity_spec.rb b/spec/serializers/project_import_entity_spec.rb
index 521d0127dbb..a2f895219be 100644
--- a/spec/serializers/project_import_entity_spec.rb
+++ b/spec/serializers/project_import_entity_spec.rb
@@ -39,16 +39,6 @@ RSpec.describe ProjectImportEntity, feature_category: :importers do
it 'includes relation_type' do
expect(subject[:relation_type]).to eq('owned')
end
-
- context 'with remove_legacy_github_client FF is disabled' do
- before do
- stub_feature_flags(remove_legacy_github_client: false)
- end
-
- it "doesn't include relation_type" do
- expect(subject[:relation_type]).to eq(nil)
- end
- end
end
context 'when import is failed' do
diff --git a/spec/services/achievements/update_user_achievement_priorities_service_spec.rb b/spec/services/achievements/update_user_achievement_priorities_service_spec.rb
new file mode 100644
index 00000000000..a020bf9770e
--- /dev/null
+++ b/spec/services/achievements/update_user_achievement_priorities_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::UpdateUserAchievementPrioritiesService, feature_category: :user_profile do
+ describe '#execute' do
+ let_it_be(:achievement_owner) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+
+ let!(:user_achievement1) do
+ create(:user_achievement, achievement: achievement, user: achievement_owner, priority: 0)
+ end
+
+ let_it_be(:user_achievement2) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+ let_it_be(:user_achievement3) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+
+ subject(:response) { described_class.new(current_user, user_achievements).execute }
+
+ context 'when user does not have permission' do
+ let(:current_user) { create(:user) }
+ let(:user_achievements) { [user_achievement1] }
+
+ it 'returns an error', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to match_array(["You can't update at least one of the given user achievements."])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be_with_reload(:current_user) { achievement_owner }
+
+ context 'with empty input' do
+ let(:user_achievements) { [] }
+
+ it 'removes all priorities', :aggregate_failures do
+ expect(response).to be_success
+
+ [user_achievement1, user_achievement2, user_achievement3].each do |ua|
+ expect(ua.reload.priority).to be_nil
+ end
+ end
+ end
+
+ context 'with prioritised achievements' do
+ let(:user_achievements) { [user_achievement3, user_achievement1] }
+
+ it 're-orders the achievements correctly', :aggregate_failures do
+ expect(response).to be_success
+
+ expect(user_achievement1.reload.priority).to eq(1)
+ expect(user_achievement2.reload.priority).to be_nil
+ expect(user_achievement3.reload.priority).to be_zero
+ end
+ end
+
+ context 'when no achievement is prioritized and no prioritizations are made' do
+ let!(:user_achievement1) { create(:user_achievement, achievement: achievement, user: achievement_owner) }
+
+ let(:user_achievements) { [] }
+
+ it 'works without errors', :aggregate_failures do
+ expect(response).to be_success
+
+ [user_achievement1, user_achievement2, user_achievement3].each do |ua|
+ expect(ua.reload.priority).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
index 7e08db2b612..3b80d3276a2 100644
--- a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
+++ b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
@@ -210,6 +210,43 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
end
end
+ describe 'when trusting the user' do
+ let(:action) { 'trust_user' }
+
+ it 'calls the Users::TrustService method' do
+ expect_next_instance_of(Users::TrustService, admin) do |service|
+ expect(service).to receive(:execute).with(abuse_report.user).and_return(status: :success)
+ end
+
+ subject
+ end
+
+ context 'when not closing the report' do
+ let(:close) { false }
+
+ it_behaves_like 'does not close the report'
+ it_behaves_like 'records an event', action: 'trust_user'
+ end
+
+ context 'when closing the report' do
+ it_behaves_like 'closes the report'
+ it_behaves_like 'records an event', action: 'trust_user_and_close_report'
+ end
+
+ context 'when trusting the user fails' do
+ before do
+ allow_next_instance_of(Users::TrustService) do |service|
+ allow(service).to receive(:execute).with(abuse_report.user)
+ .and_return(status: :error, message: 'Trusting the user failed')
+ end
+ end
+
+ it_behaves_like 'returns an error response', 'Trusting the user failed'
+ it_behaves_like 'does not close the report'
+ it_behaves_like 'does not record an event'
+ end
+ end
+
describe 'when only closing the report' do
let(:action) { '' }
diff --git a/spec/services/audit_events/build_service_spec.rb b/spec/services/audit_events/build_service_spec.rb
index 575ec9e58b8..d5a3d1bbaf7 100644
--- a/spec/services/audit_events/build_service_spec.rb
+++ b/spec/services/audit_events/build_service_spec.rb
@@ -129,25 +129,25 @@ RSpec.describe AuditEvents::BuildService, feature_category: :audit_events do
context 'when author is missing' do
let(:author) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "author") }
end
context 'when scope is missing' do
let(:scope) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "scope") }
end
context 'when target is missing' do
let(:target) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "target") }
end
context 'when message is missing' do
let(:message) { nil }
- it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ it { expect { service }.to raise_error(described_class::MissingAttributeError, "message") }
end
end
end
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index be5b753f484..8cd33f8ff1e 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -301,4 +301,45 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
specify { expect(service).to respond_to :process }
specify { expect { service.process(nil) }.to raise_error NotImplementedError }
end
+
+ describe '#available_for?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:available_for) { service.available_for?(merge_request) { true } }
+
+ let(:merge_request) { create(:merge_request) }
+
+ where(:can_be_merged, :open, :broken, :discussions, :blocked, :draft, :skip_draft, :skip_blocked,
+ :skip_discussions, :result) do
+ true | true | false | true | false | false | false | false | false | true
+ true | true | false | true | false | false | true | true | false | true
+ true | true | false | true | false | true | true | false | false | true
+ true | true | false | true | true | false | false | true | false | true
+ true | true | false | false | false | false | false | false | true | true
+ true | true | false | true | false | true | false | false | false | false
+ false | true | false | true | false | false | false | false | false | false
+ true | false | false | true | false | false | false | false | false | false
+ true | true | true | true | false | false | false | false | false | false
+ true | true | false | false | false | false | false | false | false | false
+ true | true | false | true | true | false | false | false | false | false
+ end
+
+ with_them do
+ before do
+ allow(service).to receive(:skip_draft_check).and_return(skip_draft)
+ allow(service).to receive(:skip_blocked_check).and_return(skip_blocked)
+ allow(service).to receive(:skip_discussions_check).and_return(skip_discussions)
+ allow(merge_request).to receive(:can_be_merged_by?).and_return(can_be_merged)
+ allow(merge_request).to receive(:open?).and_return(open)
+ allow(merge_request).to receive(:broken?).and_return(broken)
+ allow(merge_request).to receive(:draft?).and_return(draft)
+ allow(merge_request).to receive(:mergeable_discussions_state?).and_return(discussions)
+ allow(merge_request).to receive(:merge_blocked_by_other_mrs?).and_return(blocked)
+ end
+
+ it 'returns the expected results' do
+ expect(available_for).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 2197b0b4fac..1734ea45507 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
allowed_content_types: allowed_content_types
)
- expect { service.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { service.execute }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
diff --git a/spec/services/bulk_imports/process_service_spec.rb b/spec/services/bulk_imports/process_service_spec.rb
new file mode 100644
index 00000000000..5398e76cb67
--- /dev/null
+++ b/spec/services/bulk_imports/process_service_spec.rb
@@ -0,0 +1,325 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ProcessService, feature_category: :importers do
+ describe '#execute' do
+ let_it_be_with_reload(:bulk_import) { create(:bulk_import) }
+
+ subject { described_class.new(bulk_import) }
+
+ context 'when no bulk import is found' do
+ let(:bulk_import) { nil }
+
+ it 'does nothing' do
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is finished' do
+ it 'does nothing' do
+ bulk_import.update!(status: 2)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is failed' do
+ it 'does nothing' do
+ bulk_import.update!(status: -1)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when bulk import has timed out' do
+ it 'does nothing' do
+ bulk_import.update!(status: 3)
+
+ expect(described_class).not_to receive(:process_bulk_import)
+ subject.execute
+ end
+ end
+
+ context 'when all entities are processed' do
+ it 'marks bulk import as finished' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :finished, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when all entities are failed' do
+ it 'marks bulk import as failed' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.failed?).to eq(true)
+ end
+ end
+
+ context 'when maximum allowed number of import entities in progress' do
+ it 're-enqueues itself' do
+ bulk_import.update!(status: 1)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times do
+ create(:bulk_import_entity, :started, bulk_import: bulk_import)
+ end
+
+ expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
+
+ subject.execute
+ end
+ end
+
+ context 'when bulk import is created' do
+ it 'marks bulk import as started' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(bulk_import.reload.started?).to eq(true)
+ end
+
+ it 'creates all the required pipeline trackers' do
+ entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect { subject.execute }
+ .to change { BulkImports::Tracker.count }
+ .by(BulkImports::Groups::Stage.new(entity_1).pipelines.size * 2)
+
+ expect(entity_1.trackers).not_to be_empty
+ expect(entity_2.trackers).not_to be_empty
+ end
+
+ context 'when there are created entities to process' do
+ before do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+ end
+
+ it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
+
+ subject.execute
+
+ bulk_import.reload
+
+ expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
+ end
+
+ context 'when there are project entities to process' do
+ it 'enqueues ExportRequestWorker' do
+ create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
+
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
+
+ subject.execute
+ end
+ end
+ end
+
+ context 'when exception occurs' do
+ it 'tracks the exception & marks import as failed' do
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ allow(BulkImports::ExportRequestWorker).to receive(:perform_async).and_raise(StandardError)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ kind_of(StandardError),
+ bulk_import_id: bulk_import.id
+ )
+
+ subject.execute
+
+ expect(bulk_import.reload.failed?).to eq(true)
+ end
+ end
+ end
+
+ context 'when importing a group' do
+ it 'creates trackers for group entity' do
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when importing a project' do
+ it 'creates trackers for project entity' do
+ entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when tracker configuration has a minimum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker mininum version' do
+ let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
+
+ before do
+ bulk_import.update!(source_version: '15.0.0')
+ end
+
+ it 'creates trackers as skipped if version requirement does not meet' do
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:skipped, 'PipelineClass4'],
+ [:skipped, 'PipelineClass5']
+ )
+ end
+
+ it 'logs an info message for the skipped pipelines' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:info).with(
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass4',
+ minimum_source_version: '15.1.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ )
+
+ expect(logger).to receive(:info).with(
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass5',
+ minimum_source_version: '16.0.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ )
+ end
+
+ subject.execute
+ end
+ end
+
+ context 'when the source instance version is undefined' do
+ it 'creates trackers as created' do
+ bulk_import.update!(source_version: nil)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+ end
+
+ context 'when tracker configuration has a maximum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker maximum version' do
+ it 'creates trackers as skipped if version requirement does not meet' do
+ bulk_import.update!(source_version: '15.0.0')
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.execute
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:skipped, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+
+ context 'when the source instance version is a patch version' do
+ it 'creates trackers with the same status as the non-patch source version' do
+ bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
+ entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
+
+ bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
+ entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
+
+ described_class.new(bulk_import_1).execute
+ described_class.new(bulk_import_2).execute
+
+ trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+ trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+
+ expect(trackers_1).to eq(trackers_2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_batch_export_service_spec.rb b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
index 09f55f14a96..8548e01a6aa 100644
--- a/spec/services/bulk_imports/relation_batch_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_batch_export_service_spec.rb
@@ -45,6 +45,20 @@ RSpec.describe BulkImports::RelationBatchExportService, feature_category: :impor
service.execute
end
+ context 'when relation is empty and there is nothing to export' do
+ let_it_be(:export) { create(:bulk_import_export, :batched, project: project, relation: 'milestones') }
+ let_it_be(:batch) { create(:bulk_import_export_batch, export: export) }
+
+ it 'creates empty file on disk' do
+ allow(subject).to receive(:export_path).and_return('foo')
+ allow(FileUtils).to receive(:remove_entry)
+
+ expect(FileUtils).to receive(:touch).with('foo/milestones.ndjson')
+
+ subject.execute
+ end
+ end
+
context 'when exception occurs' do
before do
allow(service).to receive(:gzip).and_raise(StandardError, 'Error!')
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
index 1c050fe4143..bd8447e3d97 100644
--- a/spec/services/bulk_imports/relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -13,10 +13,12 @@ RSpec.describe BulkImports::RelationExportService, feature_category: :importers
let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
before do
+ FileUtils.mkdir_p(export_path)
+
group.add_owner(user)
project.add_maintainer(user)
- allow(export).to receive(:export_path).and_return(export_path)
+ allow(subject).to receive(:export_path).and_return(export_path)
end
after :all do
@@ -53,6 +55,16 @@ RSpec.describe BulkImports::RelationExportService, feature_category: :importers
expect(export.upload.export_file).to be_present
end
+ context 'when relation is empty and there is nothing to export' do
+ let(:relation) { 'milestones' }
+
+ it 'creates empty file on disk' do
+ expect(FileUtils).to receive(:touch).with("#{export_path}/#{relation}.ndjson")
+
+ subject.execute
+ end
+ end
+
context 'when exporting a file relation' do
it 'uses file export service' do
service = described_class.new(user, project, 'uploads', jid)
diff --git a/spec/services/chat_names/find_user_service_spec.rb b/spec/services/chat_names/find_user_service_spec.rb
index 14bece4efb4..94a56553983 100644
--- a/spec/services/chat_names/find_user_service_spec.rb
+++ b/spec/services/chat_names/find_user_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state, fea
context 'find user mapping' do
let_it_be(:user) { create(:user) }
- let_it_be(:chat_name) { create(:chat_name, user: user) }
+ let(:chat_name) { create(:chat_name, user: user) }
let(:team_id) { chat_name.team_id }
let(:user_id) { chat_name.chat_id }
@@ -19,26 +19,20 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state, fea
end
it 'updates the last used timestamp if one is not already set' do
- expect(chat_name.last_used_at).to be_nil
-
- subject
-
- expect(chat_name.reload.last_used_at).to be_present
+ expect { subject }.to change { chat_name.reload.last_used_at }.from(nil)
end
it 'only updates an existing timestamp once within a certain time frame' do
- chat_name = create(:chat_name, user: user)
- service = described_class.new(team_id, user_id)
-
- expect(chat_name.last_used_at).to be_nil
-
- service.execute
-
- time = chat_name.reload.last_used_at
+ expect { described_class.new(team_id, user_id).execute }.to change { chat_name.reload.last_used_at }.from(nil)
+ expect { described_class.new(team_id, user_id).execute }.not_to change { chat_name.reload.last_used_at }
+ end
- service.execute
+ it 'records activity for the related user' do
+ expect_next_instance_of(Users::ActivityService, author: user) do |activity_service|
+ expect(activity_service).to receive(:execute)
+ end
- expect(chat_name.reload.last_used_at).to eq(time)
+ subject
end
end
diff --git a/spec/services/ci/catalog/validate_resource_service_spec.rb b/spec/services/ci/catalog/resources/validate_service_spec.rb
index 3bee37b7e55..b43d98788e2 100644
--- a/spec/services/ci/catalog/validate_resource_service_spec.rb
+++ b/spec/services/ci/catalog/resources/validate_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Catalog::ValidateResourceService, feature_category: :pipeline_composition do
+RSpec.describe Ci::Catalog::Resources::ValidateService, feature_category: :pipeline_composition do
describe '#execute' do
context 'with a project that has a README and a description' do
it 'is valid' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a28dd9e7a55..11f9708f9f3 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1953,6 +1953,32 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline.statuses.count).to eq 2
expect(pipeline.statuses.map(&:name)).to match_array %w[test-1 test-my-job]
end
+
+ context 'when inputs have a description' do
+ let(:template) do
+ <<~YAML
+ spec:
+ inputs:
+ stage:
+ suffix:
+ default: my-job
+ description: description
+ ---
+ test-$[[ inputs.suffix ]]:
+ stage: $[[ inputs.stage ]]
+ script: run tests
+ YAML
+ end
+
+ it 'creates a pipeline' do
+ response = execute_service(save_on_errors: true)
+
+ pipeline = response.payload
+
+ expect(pipeline).to be_persisted
+ expect(pipeline.yaml_errors).to be_blank
+ end
+ end
end
context 'when interpolation is invalid' do
diff --git a/spec/services/ci/delete_objects_service_spec.rb b/spec/services/ci/delete_objects_service_spec.rb
index 939b72cef3b..f9fc2316595 100644
--- a/spec/services/ci/delete_objects_service_spec.rb
+++ b/spec/services/ci/delete_objects_service_spec.rb
@@ -47,8 +47,8 @@ RSpec.describe Ci::DeleteObjectsService, :aggregate_failures, feature_category:
context 'with artifacts both ready and not ready for deletion' do
let(:data) { [] }
- let_it_be(:past_ready) { create(:ci_deleted_object, pick_up_at: 2.days.ago) }
- let_it_be(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
+ let!(:past_ready) { create(:ci_deleted_object, pick_up_at: 2.days.ago) }
+ let!(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
it 'skips records with pick_up_at in the future' do
not_ready = create(:ci_deleted_object, pick_up_at: 1.day.from_now)
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index cdbb0c0f8ce..c060c72ffb2 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
end
- context 'when the project in which the arfifact belongs to is undergoing stats refresh' do
+ context 'when the project in which the artifact belongs to is undergoing stats refresh' do
before do
create(:project_build_artifacts_size_refresh, :pending, project: artifact.project)
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index fffac0fd64b..a5dda1d13aa 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -267,230 +267,6 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
- context 'when the use_offset_pagination_for_canceling_redundant_pipelines FF is off' do
- # copy-paste from above
-
- before do
- stub_feature_flags(use_offset_pagination_for_canceling_redundant_pipelines: false)
- end
-
- describe '#execute!' do
- subject(:execute) { service.execute }
-
- context 'when build statuses are set up correctly' do
- it 'has builds of all statuses' do
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
-
- context 'when auto-cancel is enabled' do
- before do
- project.update!(auto_cancel_pending_pipelines: 'enabled')
- end
-
- it 'cancels only previous interruptible builds' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
-
- it 'logs canceled pipelines' do
- allow(Gitlab::AppLogger).to receive(:info)
-
- execute
-
- expect(Gitlab::AppLogger).to have_received(:info).with(
- class: described_class.name,
- message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}",
- canceled_pipeline_id: prev_pipeline.id,
- canceled_by_pipeline_id: pipeline.id,
- canceled_by_pipeline_source: pipeline.source
- )
- end
-
- context 'when the previous pipeline has a child pipeline' do
- let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
-
- context 'with another nested child pipeline' do
- let(:another_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
-
- before do
- create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
- end
-
- it 'cancels all nested child pipeline builds' do
- expect(build_statuses(another_child_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(another_child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
- end
-
- context 'when started after pipeline was finished' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- prev_pipeline.update!(status: "success")
- end
-
- it 'cancels child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
- end
- end
-
- context 'when the child pipeline has interruptible running jobs' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- end
-
- it 'cancels all child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
-
- context 'when the child pipeline includes completed interruptible jobs' do
- before do
- create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
- create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
- end
-
- it 'cancels all child pipeline builds with a cancelable_status' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
- end
- end
- end
-
- context 'when the child pipeline has started non-interruptible job' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- # non-interruptible started
- create(:ci_build, :success, pipeline: child_pipeline)
- end
-
- it 'does not cancel any child pipeline builds' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
- end
- end
-
- context 'when the child pipeline has non-interruptible non-started job' do
- before do
- create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
- end
-
- not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::STARTED_STATUSES
- context 'when the jobs are cancelable' do
- cancelable_not_started_statuses =
- Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
- cancelable_not_started_statuses.each do |status|
- it "cancels all child pipeline builds when build status #{status} included" do
- # non-interruptible but non-started
- create(:ci_build, status.to_sym, pipeline: child_pipeline)
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
- end
- end
- end
-
- context 'when the jobs are not cancelable' do
- not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
- not_cancelable_not_started_statuses.each do |status|
- it "does not cancel child pipeline builds when build status #{status} included" do
- # non-interruptible but non-started
- create(:ci_build, status.to_sym, pipeline: child_pipeline)
-
- expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
-
- execute
-
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
- end
- end
- end
- end
- end
-
- context 'when the pipeline is a child pipeline' do
- let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
- let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
-
- before do
- create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
- create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
- end
-
- it 'does not cancel any builds' do
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
-
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
- end
- end
-
- context 'when the previous pipeline source is webide' do
- let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
-
- it 'does not cancel builds of the previous pipeline' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
-
- it 'does not cancel future pipelines' do
- expect(prev_pipeline.id).to be < pipeline.id
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
-
- described_class.new(prev_pipeline).execute
-
- expect(build_statuses(pipeline.reload)).to contain_exactly('pending')
- end
-
- it_behaves_like 'time limits pipeline cancellation'
- end
-
- context 'when auto-cancel is disabled' do
- before do
- project.update!(auto_cancel_pending_pipelines: 'disabled')
- end
-
- it 'does not cancel any build' do
- subject
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- end
- end
- end
- end
-
private
def build_statuses(pipeline)
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 93dc9481bf0..88ccda90df0 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1247,6 +1247,124 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
end
end
+ describe 'deployments creation' do
+ let(:config) do
+ <<-YAML
+ stages: [stage-0, stage-1, stage-2, stage-3, stage-4]
+
+ test:
+ stage: stage-0
+ script: exit 0
+
+ review:
+ stage: stage-1
+ environment:
+ name: review
+ action: start
+ script: exit 0
+
+ staging:
+ stage: stage-2
+ environment:
+ name: staging
+ action: start
+ script: exit 0
+ when: manual
+ allow_failure: false
+
+ canary:
+ stage: stage-3
+ environment:
+ name: canary
+ action: start
+ script: exit 0
+ when: manual
+
+ production-a:
+ stage: stage-4
+ environment:
+ name: production-a
+ action: start
+ script: exit 0
+ when: manual
+
+ production-b:
+ stage: stage-4
+ environment:
+ name: production-b
+ action: start
+ script: exit 0
+ when: manual
+ needs: [canary]
+ YAML
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ let(:test_job) { all_builds.find_by(name: 'test') }
+ let(:review_deploy_job) { all_builds.find_by(name: 'review') }
+ let(:staging_deploy_job) { all_builds.find_by(name: 'staging') }
+ let(:canary_deploy_job) { all_builds.find_by(name: 'canary') }
+ let(:production_a_deploy_job) { all_builds.find_by(name: 'production-a') }
+ let(:production_b_deploy_job) { all_builds.find_by(name: 'production-b') }
+
+ before do
+ create(:environment, name: 'review', project: project)
+ create(:environment, name: 'staging', project: project)
+ create(:environment, name: 'canary', project: project)
+ create(:environment, name: 'production-a', project: project)
+ create(:environment, name: 'production-b', project: project)
+
+ stub_ci_pipeline_yaml_file(config)
+ pipeline # create the pipeline
+ end
+
+ it 'creates deployment records for the deploy jobs', :aggregate_failures do
+ # processes the 'test' job, not creating a Deployment record
+ expect { process_pipeline }.not_to change { Deployment.count }
+ succeed_pending
+ expect(test_job.status).to eq 'success'
+
+ # processes automatic 'review' deploy job, creating a Deployment record
+ expect { process_pipeline }.to change { Deployment.count }.by(1)
+ succeed_pending
+ expect(review_deploy_job.status).to eq 'success'
+
+ # processes manual 'staging' deploy job, creating a Deployment record
+ # the subsequent manual deploy jobs ('canary', 'production-a', 'production-b')
+ # are not yet processed because 'staging' is set as `allow_failure: false`
+ expect { process_pipeline }.to change { Deployment.count }.by(1)
+ play_manual_action('staging')
+ succeed_pending
+ expect(staging_deploy_job.reload.status).to eq 'success'
+
+ # processes manual 'canary' deployment job
+ # the subsequent manual deploy jobs ('production-a' and 'production-b')
+ # are also processed because 'canary' is set by default as `allow_failure: true`
+ # the 'production-b' is set as `needs: [canary]`, but it is still processed
+ # overall, 3 Deployment records are created
+ expect { process_pipeline }.to change { Deployment.count }.by(3)
+ expect(canary_deploy_job.status).to eq 'manual'
+ expect(production_a_deploy_job.status).to eq 'manual'
+ expect(production_b_deploy_job.status).to eq 'skipped'
+
+ # play and succeed the manual 'canary' and 'production-a' jobs
+ play_manual_action('canary')
+ play_manual_action('production-a')
+ succeed_pending
+ expect(canary_deploy_job.reload.status).to eq 'success'
+ expect(production_a_deploy_job.reload.status).to eq 'success'
+ expect(production_b_deploy_job.reload.status).to eq 'created'
+
+ # process the manual 'production-b' job again, no Deployment record is created
+ # because it has already been created when 'production-b' was first processed
+ expect { process_pipeline }.not_to change { Deployment.count }
+ expect(production_b_deploy_job.reload.status).to eq 'manual'
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
index ff9bcd0f8e9..c58d73815b0 100644
--- a/spec/services/ci/process_sync_events_service_spec.rb
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -145,14 +145,6 @@ RSpec.describe Ci::ProcessSyncEventsService, feature_category: :continuous_integ
end
end
- context 'when the use_traversal_ids FF is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'event consuming'
- end
-
it_behaves_like 'event consuming'
it 'enqueues Namespaces::ProcessSyncEventsWorker if any left' do
diff --git a/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb b/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb
new file mode 100644
index 00000000000..468302cb689
--- /dev/null
+++ b/spec/services/ci/refs/enqueue_pipelines_to_unlock_service_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Refs::EnqueuePipelinesToUnlockService, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#execute' do
+ let_it_be(:ref) { 'master' }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:tag_ref_path) { "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" }
+ let_it_be(:ci_ref_tag) { create(:ci_ref, ref_path: tag_ref_path, project: project) }
+ let_it_be(:branch_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
+ let_it_be(:ci_ref_branch) { create(:ci_ref, ref_path: branch_ref_path, project: project) }
+ let_it_be(:other_ref) { 'other_ref' }
+ let_it_be(:other_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{other_ref}" }
+ let_it_be(:other_ci_ref) { create(:ci_ref, ref_path: other_ref_path, project: project) }
+
+ let(:service) { described_class.new }
+
+ subject(:execute) { service.execute(target_ref, before_pipeline: before_pipeline) }
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ stub_const("#{described_class}::ENQUEUE_INTERVAL_SECONDS", 0)
+ end
+
+ shared_examples_for 'unlocking pipelines' do
+ let(:is_tag) { target_ref.ref_path.include?(::Gitlab::Git::TAG_REF_PREFIX) }
+
+ let!(:other_ref_pipeline) { create_pipeline(:locked, other_ref, tag: false) }
+ let!(:old_unlocked_pipeline) { create_pipeline(:unlocked, ref) }
+ let!(:older_locked_pipeline_1) { create_pipeline(:locked, ref) }
+ let!(:older_locked_pipeline_2) { create_pipeline(:locked, ref) }
+ let!(:older_locked_pipeline_3) { create_pipeline(:locked, ref) }
+ let!(:older_child_pipeline) { create_pipeline(:locked, ref, child_of: older_locked_pipeline_3) }
+ let!(:pipeline) { create_pipeline(:locked, ref) }
+ let!(:child_pipeline) { create_pipeline(:locked, ref, child_of: pipeline) }
+ let!(:newer_pipeline) { create_pipeline(:locked, ref) }
+
+ context 'when before_pipeline is given' do
+ let(:before_pipeline) { pipeline }
+
+ it 'only enqueues older locked pipelines within the ref' do
+ expect { execute }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([
+ older_locked_pipeline_1.id,
+ older_locked_pipeline_2.id,
+ older_locked_pipeline_3.id,
+ older_child_pipeline.id
+ ])
+
+ expect(execute).to include(
+ status: :success,
+ total_pending_entries: 4,
+ total_new_entries: 4
+ )
+ end
+ end
+
+ context 'when before_pipeline is not given' do
+ let(:before_pipeline) { nil }
+
+ it 'enqueues all locked pipelines within the ref' do
+ expect { execute }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([
+ older_locked_pipeline_1.id,
+ older_locked_pipeline_2.id,
+ older_locked_pipeline_3.id,
+ older_child_pipeline.id,
+ pipeline.id,
+ child_pipeline.id,
+ newer_pipeline.id
+ ])
+
+ expect(execute).to include(
+ status: :success,
+ total_pending_entries: 7,
+ total_new_entries: 7
+ )
+ end
+ end
+ end
+
+ context 'when ref is a tag' do
+ let(:target_ref) { ci_ref_tag }
+
+ it_behaves_like 'unlocking pipelines'
+ end
+
+ context 'when ref is a branch' do
+ let(:target_ref) { ci_ref_branch }
+
+ it_behaves_like 'unlocking pipelines'
+ end
+
+ def create_pipeline(type, ref, tag: is_tag, child_of: nil)
+ trait = type == :locked ? :artifacts_locked : :unlocked
+ create(:ci_pipeline, trait, ref: ref, tag: tag, project: project, child_of: child_of).tap do |p|
+ if child_of
+ build = create(:ci_build, pipeline: child_of)
+ create(:ci_sources_pipeline, source_job: build, source_project: project, pipeline: p, project: project)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index caed9815fb3..80fbfc04f9b 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -248,7 +248,8 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
end
describe '#clone!' do
- let(:new_job) { service.clone!(job) }
+ let(:start_pipeline_on_clone) { false }
+ let(:new_job) { service.clone!(job, start_pipeline: start_pipeline_on_clone) }
it 'raises an error when an unexpected class is passed' do
expect { service.clone!(create(:ci_build).present) }.to raise_error(TypeError)
@@ -258,7 +259,24 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
- it_behaves_like 'creates associations for a deployable job', :ci_bridge
+
+ it 'does not create a new deployment' do
+ expect { new_job }.not_to change { Deployment.count }
+ end
+
+ context 'when the pipeline is started automatically' do
+ let(:start_pipeline_on_clone) { true }
+
+ it_behaves_like 'creates associations for a deployable job', :ci_bridge
+ end
+
+ context 'when `create_deployment_only_for_processable_jobs` FF is disabled' do
+ before do
+ stub_feature_flags(create_deployment_only_for_processable_jobs: false)
+ end
+
+ it_behaves_like 'creates associations for a deployable job', :ci_bridge
+ end
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
@@ -272,10 +290,25 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
context 'when the job to be cloned is a build' do
include_context 'retryable build'
- let(:job) { job_to_clone }
-
it_behaves_like 'clones the job'
- it_behaves_like 'creates associations for a deployable job', :ci_build
+
+ it 'does not create a new deployment' do
+ expect { new_job }.not_to change { Deployment.count }
+ end
+
+ context 'when the pipeline is started automatically' do
+ let(:start_pipeline_on_clone) { true }
+
+ it_behaves_like 'creates associations for a deployable job', :ci_build
+ end
+
+ context 'when `create_deployment_only_for_processable_jobs` FF is disabled' do
+ before do
+ stub_feature_flags(create_deployment_only_for_processable_jobs: false)
+ end
+
+ it_behaves_like 'creates associations for a deployable job', :ci_build
+ end
context 'when given variables' do
let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 7252763c13e..b5921773364 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
expect(runner).to be_an_instance_of(::Ci::Runner)
expect(runner.persisted?).to be_falsey
expect(runner.errors.messages).to eq(
- runner_projects: ['Maximum number of ci registered project runners (1) exceeded']
+ 'runner_projects.base': ['Maximum number of ci registered project runners (1) exceeded']
)
expect(project.runners.reload.size).to eq(1)
end
@@ -252,7 +252,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_categor
expect(runner).to be_an_instance_of(::Ci::Runner)
expect(runner.persisted?).to be_falsey
expect(runner.errors.messages).to eq(
- runner_namespaces: ['Maximum number of ci registered group runners (1) exceeded']
+ 'runner_namespaces.base': ['Maximum number of ci registered group runners (1) exceeded']
)
expect(group.runners.reload.size).to eq(1)
end
diff --git a/spec/services/ci/unlock_pipeline_service_spec.rb b/spec/services/ci/unlock_pipeline_service_spec.rb
new file mode 100644
index 00000000000..1a1150dca9e
--- /dev/null
+++ b/spec/services/ci/unlock_pipeline_service_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnlockPipelineService, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#execute', :aggregate_failures do
+ let(:service) { described_class.new(pipeline) }
+
+ let!(:pipeline) do
+ create(
+ :ci_pipeline,
+ :with_coverage_report_artifact,
+ :with_codequality_mr_diff_report,
+ :with_persisted_artifacts,
+ locked: :artifacts_locked
+ )
+ end
+
+ subject(:execute) { service.execute }
+
+ context 'when pipeline is not yet exclusively leased' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'unlocks the pipeline and all its artifacts' do
+ expect { execute }
+ .to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: false,
+ unlocked_job_artifacts: pipeline.job_artifacts.count,
+ unlocked_pipeline_artifacts: pipeline.pipeline_artifacts.count
+ )
+ end
+
+ context 'and pipeline is already unlocked' do
+ before do
+ described_class.new(pipeline).execute
+ end
+
+ it 'skips the pipeline' do
+ expect(Ci::JobArtifact).not_to receive(:where)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: true,
+ exec_timeout: false,
+ unlocked_job_artifacts: 0,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+
+ context 'and max execution duration was reached' do
+ let!(:first_artifact) { pipeline.job_artifacts.order(:id).first }
+ let!(:last_artifact) { pipeline.job_artifacts.order(:id).last }
+
+ before do
+ stub_const("#{described_class}::MAX_EXEC_DURATION", 0.seconds)
+ end
+
+ it 'keeps the unlocked state of job artifacts already processed and re-enqueues the pipeline' do
+ expect { execute }
+ .to change { first_artifact.reload.artifact_unlocked? }.to(true)
+ .and not_change { last_artifact.reload.artifact_unlocked? }
+ .and not_change { pipeline.reload.locked }
+ .and not_change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: true,
+ unlocked_job_artifacts: 1,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+
+ context 'and an error happened' do
+ context 'and was raised in the middle batches of job artifacts being unlocked' do
+ let!(:first_artifact) { pipeline.job_artifacts.order(:id).first }
+ let!(:last_artifact) { pipeline.job_artifacts.order(:id).last }
+
+ before do
+ mock_relation = instance_double('Ci::JobArtifact::ActiveRecord_Relation')
+ allow(Ci::JobArtifact).to receive(:where).and_call_original
+ allow(Ci::JobArtifact).to receive(:where).with(id: [last_artifact.id]).and_return(mock_relation)
+ allow(mock_relation).to receive(:update_all).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts already processed and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { first_artifact.reload.artifact_unlocked? }.to(true)
+ .and not_change { last_artifact.reload.artifact_unlocked? }
+ .and not_change { pipeline.reload.locked }
+ .and not_change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+
+ context 'and was raised while unlocking pipeline artifacts' do
+ before do
+ allow(pipeline).to receive_message_chain(:pipeline_artifacts, :update_all).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and not_change { Ci::PipelineArtifact.where(pipeline_id: pipeline.id).all?(&:artifact_unlocked?) }
+ .and not_change { pipeline.reload.locked }.from('artifacts_locked')
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+
+ context 'and was raised while unlocking pipeline' do
+ before do
+ allow(pipeline).to receive(:update_column).and_raise('An error')
+ end
+
+ it 'keeps the unlocked state of job artifacts and pipeline artifacts and re-enqueues the pipeline' do
+ expect { execute }
+ .to raise_error('An error')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and not_change { pipeline.reload.locked }.from('artifacts_locked')
+ .and change { pipeline_ids_waiting_to_be_unlocked }.from([]).to([pipeline.id])
+ end
+ end
+ end
+ end
+
+ context 'when pipeline is already exclusively leased' do
+ before do
+ allow(service).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+
+ it 'does nothing and returns success' do
+ expect { execute }.not_to change { pipeline.reload.locked }
+
+ expect(execute).to include(
+ status: :success,
+ skipped_already_leased: true,
+ unlocked_job_artifacts: 0,
+ unlocked_pipeline_artifacts: 0
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index 2a70d450575..77dcad35f70 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -86,7 +86,6 @@ RSpec.describe Deployments::CreateService, feature_category: :continuous_deliver
)
expect(service.deployment_attributes).to eq(
- cluster_id: 1,
project_id: 2,
environment_id: 3,
ref: 'master',
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index b6a80cf26cc..5534dea85b2 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 8e5065184ca..8a4dd8b5fc2 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -219,7 +219,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 48959baeaa5..e087f2ffc7e 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
# NOTE: This should be reduced as we work on reducing Gitaly calls.
# Gitaly requests shouldn't go above this threshold as much as possible
# as it may add more to the Gitaly N+1 issue we are experiencing.
- expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(20)
+ expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(19)
end
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 74f1f4bc7ac..fe54663b983 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -685,44 +685,21 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let(:commits_to_sync) { [] }
shared_examples 'enqueues Jira sync worker' do
- context "batch_delay_jira_branch_sync_worker feature flag is enabled" do
- before do
- stub_feature_flags(batch_delay_jira_branch_sync_worker: true)
- end
-
- specify :aggregate_failures do
- Sidekiq::Testing.fake! do
- if commits_to_sync.any?
- expect(JiraConnect::SyncBranchWorker)
- .to receive(:perform_in)
- .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
- else
- expect(JiraConnect::SyncBranchWorker)
- .to receive(:perform_async)
- .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
- end
-
- expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
- end
- end
- end
-
- context "batch_delay_jira_branch_sync_worker feature flag is disabled" do
- before do
- stub_feature_flags(batch_delay_jira_branch_sync_worker: false)
- end
-
- specify :aggregate_failures do
- Sidekiq::Testing.fake! do
+ specify :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ if commits_to_sync.any?
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_in)
+ .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
+ else
expect(JiraConnect::SyncBranchWorker)
.to receive(:perform_async)
.with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
.and_call_original
-
- expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
end
+
+ expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
end
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 5e37f33e4f2..78deb3cf254 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -491,30 +491,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it 'returns true' do
expect(service.execute).to eq(true)
end
-
- context 'error moving group' do
- before do
- allow(internal_group).to receive(:move_dir).and_raise(Gitlab::UpdatePathError)
- end
-
- it 'does not raise an error' do
- expect { service.execute }.not_to raise_error
- end
-
- it 'returns false' do
- expect(service.execute).to eq(false)
- end
-
- it 'has the right error' do
- service.execute
-
- expect(internal_group.errors.full_messages.first).to eq('Gitlab::UpdatePathError')
- end
-
- it "hasn't changed the path" do
- expect { service.execute }.not_to change { internal_group.reload.path }
- end
- end
end
context 'for a subgroup' do
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 982b8b11383..39832ee4b13 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -15,115 +15,157 @@ RSpec.describe Import::GithubService, feature_category: :importers do
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
let(:user_namespace_path) { user.namespace_path }
let(:optional_stages) { nil }
+ let(:timeout_strategy) { "optimistic" }
let(:params) do
{
repo_id: 123,
new_name: 'new_repo',
target_namespace: user_namespace_path,
- optional_stages: optional_stages
+ optional_stages: optional_stages,
+ timeout_strategy: timeout_strategy
}
end
+ let(:client) { Gitlab::GithubImport::Client.new(token) }
+ let(:project_double) { instance_double(Project, persisted?: true) }
+
subject(:github_importer) { described_class.new(client, user, params) }
- shared_examples 'handles errors' do |klass|
- let(:client) { klass.new(token) }
- let(:project_double) { instance_double(Project, persisted?: true) }
+ before do
+ allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
+ allow(settings)
+ .to receive(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
+ end
+
+ context 'do not raise an exception on input error' do
+ let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
before do
- allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
- allow(settings)
- .to receive(:write)
- .with(
- optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
- )
+ expect(client).to receive(:repository).and_raise(exception)
end
- context 'do not raise an exception on input error' do
- let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
+ it 'logs the original error' do
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: 'Import failed due to a GitHub error',
+ status: 404,
+ error: 'Not Found'
+ }).and_call_original
- before do
- expect(client).to receive(:repository).and_raise(exception)
- end
+ subject.execute(access_params, :github)
+ end
- it 'logs the original error' do
- expect(Gitlab::Import::Logger).to receive(:error).with({
- message: 'Import failed due to a GitHub error',
- status: 404,
- error: 'Not Found'
- }).and_call_original
+ it 'returns an error with message and code' do
+ result = subject.execute(access_params, :github)
- subject.execute(access_params, :github)
- end
+ expect(result).to include(
+ message: 'Import failed due to a GitHub error: Not Found (HTTP 404)',
+ status: :error,
+ http_status: :unprocessable_entity
+ )
+ end
+ end
- it 'returns an error with message and code' do
- result = subject.execute(access_params, :github)
+ it 'raises an exception for unknown error causes' do
+ exception = StandardError.new('Not Implemented')
- expect(result).to include(
- message: 'Import failed due to a GitHub error: Not Found (HTTP 404)',
- status: :error,
- http_status: :unprocessable_entity
- )
- end
- end
+ expect(client).to receive(:repository).and_raise(exception)
- it 'raises an exception for unknown error causes' do
- exception = StandardError.new('Not Implemented')
+ expect(Gitlab::Import::Logger).not_to receive(:error)
- expect(client).to receive(:repository).and_raise(exception)
+ expect { subject.execute(access_params, :github) }.to raise_error(exception)
+ end
+
+ context 'repository size validation' do
+ let(:repository_double) { { name: 'repository', size: 99 } }
- expect(Gitlab::Import::Logger).not_to receive(:error)
+ before do
+ allow(subject).to receive(:authorized?).and_return(true)
+ expect(client).to receive(:repository).and_return(repository_double)
- expect { subject.execute(access_params, :github) }.to raise_error(exception)
+ allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
+ allow(creator).to receive(:execute).and_return(project_double)
+ end
end
- context 'repository size validation' do
- let(:repository_double) { { name: 'repository', size: 99 } }
+ context 'when there is no repository size limit defined' do
+ it 'skips the check, succeeds, and tracks an access level' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings)
+ .to have_received(:write)
+ .with(optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Owner' }
+ )
+ end
+ end
- before do
- allow(subject).to receive(:authorized?).and_return(true)
- expect(client).to receive(:repository).and_return(repository_double)
+ context 'when the target namespace repository size limit is defined' do
+ let_it_be(:group) { create(:group, repository_size_limit: 100) }
- allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
- allow(creator).to receive(:execute).and_return(project_double)
- end
+ before do
+ params[:target_namespace] = group.full_path
end
- context 'when there is no repository size limit defined' do
- it 'skips the check, succeeds, and tracks an access level' do
- expect(subject.execute(access_params, :github)).to include(status: :success)
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
- expect_snowplow_event(
- category: 'Import::GithubService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { import_type: 'github', user_role: 'Owner' }
+ it 'succeeds when the repository is smaller than the limit' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
)
- end
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Not a member' }
+ )
end
- context 'when the target namespace repository size limit is defined' do
- let_it_be(:group) { create(:group, repository_size_limit: 100) }
+ it 'returns error when the repository is larger than the limit' do
+ repository_double[:size] = 101
- before do
- params[:target_namespace] = group.full_path
- end
+ expect(subject.execute(access_params, :github)).to include(size_limit_error)
+ end
+ end
+
+ context 'when target namespace repository limit is not defined' do
+ let_it_be(:group) { create(:group) }
+ before do
+ stub_application_setting(repository_size_limit: 100)
+ end
+
+ context 'when application size limit is defined' do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings)
.to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
+ .with(
+ optional_stages: nil,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
expect_snowplow_event(
category: 'Import::GithubService',
action: 'create',
label: 'import_access_level',
user: user,
- extra: { import_type: 'github', user_role: 'Not a member' }
+ extra: { import_type: 'github', user_role: 'Owner' }
)
end
@@ -133,167 +175,142 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(subject.execute(access_params, :github)).to include(size_limit_error)
end
end
-
- context 'when target namespace repository limit is not defined' do
- let_it_be(:group) { create(:group) }
-
- before do
- stub_application_setting(repository_size_limit: 100)
- end
-
- context 'when application size limit is defined' do
- it 'succeeds when the repository is smaller than the limit' do
- expect(subject.execute(access_params, :github)).to include(status: :success)
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens])
- expect_snowplow_event(
- category: 'Import::GithubService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { import_type: 'github', user_role: 'Owner' }
- )
- end
-
- it 'returns error when the repository is larger than the limit' do
- repository_double[:size] = 101
-
- expect(subject.execute(access_params, :github)).to include(size_limit_error)
- end
- end
- end
-
- context 'when optional stages params present' do
- let(:optional_stages) do
- {
- single_endpoint_issue_events_import: true,
- single_endpoint_notes_import: 'false',
- attachments_import: false,
- collaborators_import: true
- }
- end
-
- it 'saves optional stages choice to import_data' do
- subject.execute(access_params, :github)
-
- expect(settings)
- .to have_received(:write)
- .with(
- optional_stages: optional_stages,
- additional_access_tokens: access_params[:additional_access_tokens]
- )
- end
- end
-
- context 'when additional access tokens are present' do
- it 'saves additional access tokens to import_data' do
- subject.execute(access_params, :github)
-
- expect(settings)
- .to have_received(:write)
- .with(optional_stages: optional_stages, additional_access_tokens: %w[foo bar])
- end
- end
end
- context 'when import source is disabled' do
- let(:repository_double) do
+ context 'when optional stages params present' do
+ let(:optional_stages) do
{
- name: 'vim',
- description: 'test',
- full_name: 'test/vim',
- clone_url: 'http://repo.com/repo/repo.git',
- private: false,
- has_wiki?: false
+ single_endpoint_issue_events_import: true,
+ single_endpoint_notes_import: 'false',
+ attachments_import: false,
+ collaborators_import: true
}
end
- before do
- stub_application_setting(import_sources: nil)
- allow(client).to receive(:repository).and_return(repository_double)
+ it 'saves optional stages choice to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
end
+ end
- it 'returns forbidden' do
- result = subject.execute(access_params, :github)
+ context 'when timeout strategy param is present' do
+ let(:timeout_strategy) { 'pessimistic' }
- expect(result).to include(
- status: :error,
- http_status: :forbidden
- )
+ it 'saves timeout strategy to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: access_params[:additional_access_tokens],
+ timeout_strategy: timeout_strategy
+ )
end
end
- context 'when a blocked/local URL is used as github_hostname' do
- let(:message) { 'Error while attempting to import from GitHub' }
- let(:error) { "Invalid URL: #{url}" }
+ context 'when additional access tokens are present' do
+ it 'saves additional access tokens to import_data' do
+ subject.execute(access_params, :github)
- before do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+ expect(settings)
+ .to have_received(:write)
+ .with(
+ optional_stages: optional_stages,
+ additional_access_tokens: %w[foo bar],
+ timeout_strategy: timeout_strategy
+ )
end
+ end
+ end
- where(url: %w[https://localhost https://10.0.0.1])
-
- with_them do
- it 'returns and logs an error' do
- allow(github_importer).to receive(:url).and_return(url)
+ context 'when import source is disabled' do
+ let(:repository_double) do
+ {
+ name: 'vim',
+ description: 'test',
+ full_name: 'test/vim',
+ clone_url: 'http://repo.com/repo/repo.git',
+ private: false,
+ has_wiki?: false
+ }
+ end
- expect(Gitlab::Import::Logger).to receive(:error).with({
- message: message,
- error: error
- }).and_call_original
- expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url))
- end
- end
+ before do
+ stub_application_setting(import_sources: nil)
+ allow(client).to receive(:repository).and_return(repository_double)
end
- context 'when target_namespace is blank' do
- before do
- params[:target_namespace] = ''
- end
+ it 'returns forbidden' do
+ result = subject.execute(access_params, :github)
- it 'raises an exception' do
- expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
- end
+ expect(result).to include(
+ status: :error,
+ http_status: :forbidden
+ )
end
+ end
- context 'when namespace to import repository into does not exist' do
- before do
- params[:target_namespace] = 'unknown_path'
- end
+ context 'when a blocked/local URL is used as github_hostname' do
+ let(:message) { 'Error while attempting to import from GitHub' }
+ let(:error) { "Invalid URL: #{url}" }
- it 'returns an error' do
- expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
- end
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
end
- context 'when user has no permissions to import repository into the specified namespace' do
- let_it_be(:group) { create(:group) }
+ where(url: %w[https://localhost https://10.0.0.1])
- before do
- params[:target_namespace] = group.full_path
- end
+ with_them do
+ it 'returns and logs an error' do
+ allow(github_importer).to receive(:url).and_return(url)
- it 'returns an error' do
- expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: message,
+ error: error
+ }).and_call_original
+ expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url))
end
end
end
- context 'when remove_legacy_github_client feature flag is enabled' do
+ context 'when target_namespace is blank' do
+ before do
+ params[:target_namespace] = ''
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
+ end
+ end
+
+ context 'when namespace to import repository into does not exist' do
before do
- stub_feature_flags(remove_legacy_github_client: true)
+ params[:target_namespace] = 'unknown_path'
end
- include_examples 'handles errors', Gitlab::GithubImport::Client
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
+ end
end
- context 'when remove_legacy_github_client feature flag is disabled' do
+ context 'when user has no permissions to import repository into the specified namespace' do
+ let_it_be(:group) { create(:group) }
+
before do
- stub_feature_flags(remove_legacy_github_client: false)
+ params[:target_namespace] = group.full_path
end
- include_examples 'handles errors', Gitlab::LegacyGithubImport::Client
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ end
end
def size_limit_error
diff --git a/spec/services/import/validate_remote_git_endpoint_service_spec.rb b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
index 1d2b3975832..15e80f2c85d 100644
--- a/spec/services/import/validate_remote_git_endpoint_service_spec.rb
+++ b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
@@ -7,7 +7,9 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
let_it_be(:base_url) { 'http://demo.host/path' }
let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" }
- let_it_be(:error_message) { "#{base_url} is not a valid HTTP Git repository" }
+ let_it_be(:endpoint_error_message) { "#{base_url} endpoint error:" }
+ let_it_be(:body_error_message) { described_class::INVALID_BODY_MESSAGE }
+ let_it_be(:content_type_error_message) { described_class::INVALID_CONTENT_TYPE_MESSAGE }
describe '#execute' do
let(:valid_response) do
@@ -70,13 +72,14 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
end
it 'reports error when status code is not 200' do
- stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ status: 301 }))
+ error_response = { status: 401 }
+ stub_full_request(endpoint_url, method: :get).to_return(error_response)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq("#{endpoint_error_message} #{error_response[:status]}")
end
it 'reports error when invalid URL is provided' do
@@ -94,27 +97,49 @@ RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :impo
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq(content_type_error_message)
end
- it 'reports error when body is in invalid format' do
+ it 'reports error when body is too short' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' }))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq(body_error_message)
+ end
+
+ it 'reports error when body is in invalid format' do
+ stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid long content with no git respons whatshowever' }))
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq(body_error_message)
+ end
+
+ it 'reports error when http exceptions are raised' do
+ err = SocketError.new('dummy message')
+ stub_full_request(endpoint_url, method: :get).to_raise(err)
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result.error?).to be(true)
+ expect(result.message).to eq("HTTP #{err.class.name.underscore} error: #{err.message}")
end
- it 'reports error when exception is raised' do
- stub_full_request(endpoint_url, method: :get).to_raise(SocketError.new('dummy message'))
+ it 'reports error when other exceptions are raised' do
+ err = StandardError.new('internal dummy message')
+ stub_full_request(endpoint_url, method: :get).to_raise(err)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
- expect(result.message).to eq(error_message)
+ expect(result.message).to eq("Internal #{err.class.name.underscore} error: #{err.message}")
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index dabbd4bfa84..009f68594d7 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -194,7 +194,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
end
end
- context "closed by a merge request", :sidekiq_might_not_need_inline do
+ context "closed by a merge request" do
subject(:close_issue) do
perform_enqueued_jobs do
described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_merge_request)
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
index aa5dec20a13..7d709bbd9c8 100644
--- a/spec/services/issues/set_crm_contacts_service_spec.rb
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -106,6 +106,14 @@ RSpec.describe Issues::SetCrmContactsService, feature_category: :team_planning d
it_behaves_like 'setting contacts'
it_behaves_like 'adds system note', 1, 1
+
+ context 'with empty list' do
+ let(:params) { { replace_ids: [] } }
+ let(:expected_contacts) { [] }
+
+ it_behaves_like 'setting contacts'
+ it_behaves_like 'adds system note', 0, 2
+ end
end
context 'add' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index eb9fe2b4ed7..c4012e2a98f 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
let_it_be(:label3) { create(:label, title: 'c', project: project) }
let_it_be(:milestone) { create(:milestone, project: project) }
+ let(:container) { project }
let(:issue) do
create(
:issue,
@@ -49,7 +50,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
def update_issue(opts)
- described_class.new(container: project, current_user: user, params: opts).execute(issue)
+ described_class.new(container: container, current_user: user, params: opts).execute(issue)
end
it_behaves_like 'issuable update service updating last_edited_at values' do
@@ -825,7 +826,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
end
it 'updates updated_at' do
- expect(issue.reload.updated_at).to be > Time.current
+ expect(issue.reload.updated_at).to be_future
end
end
end
@@ -1006,6 +1007,12 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
it_behaves_like 'keeps issuable labels sorted after update'
it_behaves_like 'broadcasting issuable labels updates'
+ context 'when the issue belongs directly to a group' do
+ let(:container) { group }
+
+ it_behaves_like 'updating issuable labels'
+ end
+
def update_issuable(update_params)
update_issue(update_params)
end
diff --git a/spec/services/jira_connect/sync_service_spec.rb b/spec/services/jira_connect/sync_service_spec.rb
index 7457cdca13c..019370ce87f 100644
--- a/spec/services/jira_connect/sync_service_spec.rb
+++ b/spec/services/jira_connect/sync_service_spec.rb
@@ -7,9 +7,11 @@ RSpec.describe JiraConnect::SyncService, feature_category: :integrations do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:merge_request_reviewer) { create(:merge_request_reviewer, merge_request: merge_request) }
let(:client) { Atlassian::JiraConnect::Client }
- let(:info) { { a: 'Some', b: 'Info' } }
+ let(:info) { { a: 'Some', b: 'Info', merge_requests: [merge_request] } }
subject do
described_class.new(project).execute(**info)
@@ -44,6 +46,20 @@ RSpec.describe JiraConnect::SyncService, feature_category: :integrations do
subject
end
+ it 'does not execute any queries for preloaded reviewers' do
+ expect_next(client).to store_info
+
+ expect_log(:info, { 'status': 'success' })
+
+ amount = ActiveRecord::QueryRecorder
+ .new { info[:merge_requests].flat_map(&:merge_request_reviewers).map(&:reviewer) }
+ .count
+
+ expect(amount).to be_zero
+
+ subject
+ end
+
context 'when a request returns errors' do
it 'logs each response as an error' do
expect_next(client).to store_info(
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 96fa8ab278d..b977292bcf4 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -167,12 +167,15 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
let(:user_id) { '' }
it 'does not add a member' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(an_instance_of(described_class::BlankInvitesError), class: described_class.to_s, user_id: user.id)
expect(Gitlab::EventStore)
.not_to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to be_present
+ expect(execute_service[:message]).to eq(s_('AddMember|No users specified.'))
expect(source.users).not_to include member
expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false)
end
@@ -182,6 +185,10 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
let(:user_id) { 1.upto(101).to_a.join(',') }
it 'limits the number of users to 100' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(an_instance_of(described_class::TooManyInvitesError), class: described_class.to_s, user_id: user.id)
+
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
expect(source.users).not_to include member
@@ -297,113 +304,4 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
end
end
-
- context 'when assigning tasks to be done' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code), tasks_project_id: source.id }
- end
-
- it 'creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, [member.id])
- .once
- .and_call_original
- expect { execute_service }.to change { source.issues.count }.by(2)
-
- expect(source.issues).to all have_attributes(
- project: source,
- author: user
- )
- end
-
- context 'when it is an invite by email passed to user_id' do
- let(:user_id) { 'email@example.org' }
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when passing many user ids' do
- before do
- stub_licensed_features(multiple_issue_assignees: false)
- end
-
- let(:another_user) { create(:user) }
- let(:user_id) { [member.id, another_user.id].join(',') }
-
- it 'still creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, array_including(member.id, another_user.id))
- .once
- .and_call_original
- expect { execute_service }.to change { source.issues.count }.by(2)
-
- expect(source.issues).to all have_attributes(
- project: source,
- author: user
- )
- end
- end
-
- context 'when a `tasks_project_id` is missing' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when `tasks_to_be_done` are missing' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when invalid `tasks_to_be_done` are passed' do
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(invalid_task) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when invalid `tasks_project_id` is passed' do
- let(:another_project) { create(:project) }
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: another_project.id, tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
-
- context 'when a member was already invited' do
- let(:user_id) { create(:project_member, :invited, project: source).invite_email }
- let(:additional_params) do
- { invite_source: '_invite_source_', tasks_project_id: source.id, tasks_to_be_done: %w(ci code) }
- end
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- execute_service
- end
- end
- end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 76cd5d6c89e..bf81388357f 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -24,11 +24,6 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
it_behaves_like 'records an onboarding progress action', :user_added
-
- it 'does not create task issues' do
- expect(TasksToBeDone::CreateWorker).not_to receive(:perform_async)
- expect { result }.not_to change { project.issues.count }
- end
end
context 'when email belongs to an existing user as a confirmed secondary email' do
@@ -321,11 +316,11 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
let(:params) { { email: unconfirmed_user.email } }
- it 'adds an existing user to members' do
+ it 'adds a new member as an invite for unconfirmed primary email' do
expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
- expect(project.users).to include unconfirmed_user
- expect(project.members.last).not_to be_invite
+ expect(project.users).not_to include unconfirmed_user
+ expect(project.members.last).to be_invite
end
end
@@ -339,23 +334,6 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
expect(result[:status]).to eq(:success)
expect(project.users).to include project_user
end
-
- context 'when assigning tasks to be done' do
- let(:params) do
- { user_id: project_user.id, tasks_to_be_done: %w(ci code), tasks_project_id: project.id }
- end
-
- it 'creates 2 task issues', :aggregate_failures do
- expect(TasksToBeDone::CreateWorker)
- .to receive(:perform_async)
- .with(anything, user.id, [project_user.id])
- .once
- .and_call_original
- expect { result }.to change { project.issues.count }.by(2)
-
- expect(project.issues).to all have_attributes(project: project, author: user)
- end
- end
end
end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 81fc5661032..e7fe5c19fa3 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -82,39 +82,12 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
it 'records a value' do
service.execute(merge_request)
- expect(merge_request.approvals.last.patch_id_sha).not_to be_nil
+ expect(merge_request.approvals.last.patch_id_sha).to eq(merge_request.current_patch_id_sha)
end
- context 'when base_sha is nil' do
+ context 'when MergeRequest#current_patch_id_sha is nil' do
it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:base_sha).at_least(:once).and_return(nil)
- end
-
- service.execute(merge_request)
-
- expect(merge_request.approvals.last.patch_id_sha).to be_nil
- end
- end
-
- context 'when head_sha is nil' do
- it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:head_sha).at_least(:once).and_return(nil)
- end
-
- service.execute(merge_request)
-
- expect(merge_request.approvals.last.patch_id_sha).to be_nil
- end
- end
-
- context 'when base_sha and head_sha match' do
- it 'records patch_id_sha as nil' do
- expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
- expect(diff_ref).to receive(:base_sha).at_least(:once).and_return("abc123")
- expect(diff_ref).to receive(:head_sha).at_least(:once).and_return("abc123")
- end
+ expect(merge_request).to receive(:current_patch_id_sha).and_return(nil)
service.execute(merge_request)
diff --git a/spec/services/merge_requests/create_ref_service_spec.rb b/spec/services/merge_requests/create_ref_service_spec.rb
index 5f7b9430416..b99187f9a56 100644
--- a/spec/services/merge_requests/create_ref_service_spec.rb
+++ b/spec/services/merge_requests/create_ref_service_spec.rb
@@ -246,13 +246,13 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
expect_next_instance_of(described_class) do |instance|
original = instance.method(:maybe_merge!)
- expect(instance).to receive(:maybe_merge!) do |*args|
+ expect(instance).to receive(:maybe_merge!) do |*args, **kwargs|
# Corrupt target_ref before the merge, simulating a race with
# another instance of the service for the same MR. source_sha is
# just an arbitrary valid commit that differs from what was just
# written.
project.repository.write_ref(target_ref, source_sha)
- original.call(*args)
+ original.call(*args, **kwargs)
end
end
diff --git a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
index d9e60911ada..7ce2317918d 100644
--- a/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
+++ b/spec/services/merge_requests/delete_non_latest_diffs_service_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe MergeRequests::DeleteNonLatestDiffsService, :clean_gitlab_redis_s
merge_request.reset
end
- it 'schedules non-latest merge request diffs removal' do
+ it 'schedules non-latest merge request diffs removal',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/426807' do
diffs = merge_request.merge_request_diffs
expect(diffs.count).to eq(4)
diff --git a/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
new file mode 100644
index 00000000000..14173c19bfb
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_conflict_status_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckConflictStatusService, feature_category: :code_review_workflow do
+ subject(:check_conflict_status) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ describe '#execute' do
+ let(:result) { check_conflict_status.execute }
+
+ before do
+ allow(merge_request).to receive(:can_be_merged?).and_return(can_be_merged)
+ end
+
+ context 'when MergeRequest#can_be_merged is true' do
+ let(:can_be_merged) { true }
+
+ it 'returns a check result with status success' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when MergeRequest#can_be_merged is false' do
+ let(:can_be_merged) { false }
+
+ it 'returns a check result with status failed' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:conflict)
+ end
+ end
+ end
+
+ describe '#skip?' do
+ it 'returns false' do
+ expect(check_conflict_status.skip?).to eq false
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_conflict_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
index cb624705a02..3837022232d 100644
--- a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
@@ -3,9 +3,12 @@
require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_category: :code_review_workflow do
- subject(:check_draft_status) { described_class.new(merge_request: merge_request, params: {}) }
+ subject(:check_draft_status) { described_class.new(merge_request: merge_request, params: params) }
- let(:merge_request) { build(:merge_request) }
+ let_it_be(:merge_request) { build(:merge_request) }
+
+ let(:params) { { skip_draft_check: skip_check } }
+ let(:skip_check) { false }
describe '#execute' do
let(:result) { check_draft_status.execute }
@@ -33,8 +36,20 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService, feature_cat
end
describe '#skip?' do
- it 'returns false' do
- expect(check_draft_status.skip?).to eq false
+ context 'when skip check param is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_draft_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check param is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_draft_status.skip?).to eq false
+ end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
new file mode 100644
index 00000000000..31ec44856b1
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_rebase_status_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckRebaseStatusService, feature_category: :code_review_workflow do
+ subject(:check_rebase_status) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:params) { { skip_rebase_check: skip_check } }
+ let(:skip_check) { false }
+
+ describe '#execute' do
+ let(:result) { check_rebase_status.execute }
+
+ before do
+ allow(merge_request).to receive(:should_be_rebased?).and_return(should_be_rebased)
+ end
+
+ context 'when the merge request should be rebased' do
+ let(:should_be_rebased) { true }
+
+ it 'returns a check result with status failed' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq :need_rebase
+ end
+ end
+
+ context 'when the merge request should not be rebased' do
+ let(:should_be_rebased) { false }
+
+ it 'returns a check result with status success' do
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ context 'when skip check is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_rebase_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_rebase_status.skip?).to eq false
+ end
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_rebase_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index bfff582994b..546d583a2fb 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -3,16 +3,32 @@
require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redis_cache, feature_category: :code_review_workflow do
+ let(:checks) { MergeRequest.all_mergeability_checks }
+ let(:execute_all) { false }
+
subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
describe '#execute' do
- subject(:execute) { run_checks.execute }
+ subject(:execute) { run_checks.execute(checks, execute_all: execute_all) }
let_it_be(:merge_request) { create(:merge_request) }
let(:params) { {} }
let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
+ shared_examples 'checks are all executed' do
+ context 'when all checks are set to be executed' do
+ let(:execute_all) { true }
+
+ specify do
+ result = execute
+
+ expect(result.success?).to eq(success?)
+ expect(result.payload[:results].count).to eq(expected_count)
+ end
+ end
+ end
+
context 'when every check is skipped', :eager_load do
before do
MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
@@ -25,17 +41,28 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
it 'is still a success' do
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 0 }
+ end
end
context 'when a check is skipped' do
- it 'does not execute the check' do
- merge_request.mergeability_checks.each do |check|
+ before do
+ checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(false)
allow(service).to receive(:execute).and_return(success_result)
end
end
+ allow_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
+ allow(service).to receive(:skip?).and_return(true)
+ end
+ end
+
+ it 'does not execute the check' do
expect_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
expect(service).to receive(:skip?).and_return(true)
expect(service).not_to receive(:execute)
@@ -43,6 +70,34 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { checks.count - 1 }
+ end
+
+ context 'when one check fails' do
+ let(:failed_result) { Gitlab::MergeRequests::Mergeability::CheckResult.failed(payload: { reason: 'failed' }) }
+
+ before do
+ allow_next_instance_of(MergeRequests::Mergeability::CheckOpenStatusService) do |service|
+ allow(service).to receive(:skip?).and_return(false)
+ allow(service).to receive(:execute).and_return(failed_result)
+ end
+ end
+
+ it 'returns the failure reason' do
+ result = execute
+
+ expect(result.success?).to eq(false)
+ expect(execute.payload[:failure_reason]).to eq(:failed)
+ end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { false }
+ let(:expected_count) { checks.count - 1 }
+ end
+ end
end
context 'when a check is not skipped' do
@@ -50,7 +105,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
before do
- merge_request.mergeability_checks.each do |check|
+ checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(true)
end
@@ -64,11 +119,13 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
context 'when the check is cacheable' do
context 'when the check is cached' do
- it 'returns the cached result' do
+ before do
expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
end
+ end
+ it 'returns the cached result' do
expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
expect(logger).to receive(:commit)
@@ -76,15 +133,22 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 1 }
+ end
end
context 'when the check is not cached' do
- it 'writes and returns the result' do
+ before do
expect_next_instance_of(Gitlab::MergeRequests::Mergeability::ResultsStore) do |service|
expect(service).to receive(:read).with(merge_check: merge_check).and_return(nil)
expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
end
+ end
+ it 'writes and returns the result' do
expect_next_instance_of(MergeRequests::Mergeability::Logger, merge_request: merge_request) do |logger|
expect(logger).to receive(:instrument).with(mergeability_name: 'check_ci_status_service').and_call_original
expect(logger).to receive(:commit)
@@ -92,6 +156,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
expect(execute.success?).to eq(true)
end
+
+ it_behaves_like 'checks are all executed' do
+ let(:success?) { true }
+ let(:expected_count) { 1 }
+ end
end
end
@@ -106,76 +175,4 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redi
end
end
end
-
- describe '#success?' do
- subject(:success) { run_checks.success? }
-
- let_it_be(:merge_request) { create(:merge_request) }
-
- context 'when the execute method has been executed' do
- before do
- run_checks.execute
- end
-
- context 'when all the checks succeed' do
- it 'returns true' do
- expect(success).to eq(true)
- end
- end
-
- context 'when one check fails' do
- before do
- allow(merge_request).to receive(:open?).and_return(false)
- run_checks.execute
- end
-
- it 'returns false' do
- expect(success).to eq(false)
- end
- end
- end
-
- context 'when execute has not been exectued' do
- it 'raises an error' do
- expect { subject }
- .to raise_error(/Execute needs to be called before/)
- end
- end
- end
-
- describe '#failure_reason' do
- subject(:failure_reason) { run_checks.failure_reason }
-
- let_it_be(:merge_request) { create(:merge_request) }
-
- context 'when the execute method has been executed' do
- context 'when all the checks succeed' do
- before do
- run_checks.execute
- end
-
- it 'returns nil' do
- expect(failure_reason).to eq(nil)
- end
- end
-
- context 'when one check fails' do
- before do
- allow(merge_request).to receive(:open?).and_return(false)
- run_checks.execute
- end
-
- it 'returns the open reason' do
- expect(failure_reason).to eq(:not_open)
- end
- end
- end
-
- context 'when execute has not been exectued' do
- it 'raises an error' do
- expect { subject }
- .to raise_error(/Execute needs to be called before/)
- end
- end
- end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 72e41f7b814..f5494f429c3 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -788,7 +788,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
update_merge_request({ label_ids: [label.id] })
end
- expect(merge_request.reload.updated_at).to be > Time.current
+ expect(merge_request.reload.updated_at).to be_future
end
end
@@ -897,6 +897,27 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
update_merge_request(title: 'New title')
end
+ context 'when additional_merge_when_checks_ready is enabled' do
+ it 'publishes a DraftStateChangeEvent' do
+ expected_data = {
+ current_user_id: user.id,
+ merge_request_id: merge_request.id
+ }
+
+ expect { update_merge_request(title: 'New title') }.to publish_event(MergeRequests::DraftStateChangeEvent).with(expected_data)
+ end
+ end
+
+ context 'when additional_merge_when_checks_ready is disabled' do
+ before do
+ stub_feature_flags(additional_merge_when_checks_ready: false)
+ end
+
+ it 'does not publish a DraftStateChangeEvent' do
+ expect { update_merge_request(title: 'New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
+ end
+ end
+
context 'when removing through wip_event param' do
it 'removes Draft from the title' do
expect { update_merge_request({ wip_event: "ready" }) }
@@ -923,6 +944,27 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
should_not_email(non_subscriber)
end
+ context 'when additional_merge_when_checks_ready is enabled' do
+ it 'publishes a DraftStateChangeEvent' do
+ expected_data = {
+ current_user_id: user.id,
+ merge_request_id: merge_request.id
+ }
+
+ expect { update_merge_request(title: 'Draft: New title') }.to publish_event(MergeRequests::DraftStateChangeEvent).with(expected_data)
+ end
+ end
+
+ context 'when additional_merge_when_checks_ready is disabled' do
+ before do
+ stub_feature_flags(additional_merge_when_checks_ready: false)
+ end
+
+ it 'does not publish a DraftStateChangeEvent' do
+ expect { update_merge_request(title: 'Draft: New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
+ end
+ end
+
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index 9b46675a08e..c68a581c8ff 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: :activation do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
diff --git a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
index 3c645fa84b4..f1afc4d66c2 100644
--- a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository, feature_category: :activation do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
diff --git a/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb b/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
index f0e7c241d5d..a3a7d538bcc 100644
--- a/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
+++ b/spec/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ml::ExperimentTracking::HandleCandidateGitlabMetadataService, feature_category: :experimentation_activation do
+RSpec.describe ::Ml::ExperimentTracking::HandleCandidateGitlabMetadataService, feature_category: :activation do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index b5eb5f8037a..0cc66696184 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
let(:namespace) { project.namespace }
subject(:service_action) { execute_create_service }
end
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 54782774b4e..33c973a2431 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
end
describe 'comment removed event tracking', :snowplow do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
let(:note) { create(:note, project: project, noteable: issue) }
let(:service_action) { described_class.new(project, user).execute(note) }
@@ -39,7 +39,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
expect do
service_action
end.to change {
- counter.unique_events(event_names: action, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
+ counter.unique_events(event_names: event, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
}.by(1)
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index b6e29299fdd..0a16037c976 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -334,6 +334,85 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
+ describe '/add_child' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:child) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:second_child) { create(:work_item, :objective, project: project) }
+ let_it_be(:note_text) { "/add_child #{child.to_reference}, #{second_child.to_reference}" }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+ let_it_be(:children) { [child, second_child] }
+
+ shared_examples 'adds child work items' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'adds child work items' do
+ execute(note)
+
+ expect(noteable.valid?).to be_truthy
+ expect(noteable.work_item_children).to eq(children)
+ end
+ end
+
+ context 'when using work item reference' do
+ let_it_be(:note_text) { "/add_child #{child.to_reference(full: true)},#{second_child.to_reference(full: true)}" }
+
+ it_behaves_like 'adds child work items'
+ end
+
+ context 'when using work item iid' do
+ it_behaves_like 'adds child work items'
+ end
+
+ context 'when using work item URL' do
+ let_it_be(:project_path) { "#{Gitlab.config.gitlab.url}/#{project.full_path}" }
+ let_it_be(:url) { "#{project_path}/work_items/#{child.iid},#{project_path}/work_items/#{second_child.iid}" }
+ let_it_be(:note_text) { "/add_child #{url}" }
+
+ it_behaves_like 'adds child work items'
+ end
+ end
+
+ describe '/set_parent' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :objective, project: project) }
+ let_it_be_with_reload(:parent) { create(:work_item, :objective, project: project) }
+ let_it_be(:note_text) { "/set_parent #{parent.to_reference}" }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ shared_examples 'sets work item parent' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'sets work item parent' do
+ execute(note)
+
+ expect(parent.valid?).to be_truthy
+ expect(noteable.work_item_parent).to eq(parent)
+ end
+ end
+
+ context 'when using work item reference' do
+ let_it_be(:note_text) { "/set_parent #{project.full_path}#{parent.to_reference}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+
+ context 'when using work item iid' do
+ let_it_be(:note_text) { "/set_parent #{parent.to_reference}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+
+ context 'when using work item URL' do
+ let_it_be(:url) { "#{Gitlab.config.gitlab.url}/#{project.full_path}/work_items/#{parent.iid}" }
+ let_it_be(:note_text) { "/set_parent #{url}" }
+
+ it_behaves_like 'sets work item parent'
+ end
+ end
+
describe '/promote_to' do
shared_examples 'promotes work item' do |from:, to:|
it 'leaves the note empty' do
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 8389db000b8..908f348c68b 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
let(:namespace) { project.namespace }
subject(:service_action) { update_note(note: 'new text') }
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 8b94bce6650..1c935c27d7f 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -235,7 +235,7 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
# TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it.
# Right now we have several contexts here to test the calculation's different scenarios.
- context "when encoded package data is not padded" do
+ context 'when encoded package data is not padded' do
# 'Hello!' (size = 6 bytes) => 'SGVsbG8h'
let(:encoded_package_data) { 'SGVsbG8h' }
@@ -260,18 +260,18 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
end
end
- [
- '@inv@lid_scope/package',
- '@scope/sub/group',
- '@scope/../../package',
- '@scope%2e%2e%2fpackage'
- ].each do |invalid_package_name|
- context "with invalid name #{invalid_package_name}" do
- let(:package_name) { invalid_package_name }
-
- it 'raises a RecordInvalid error' do
- expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
- end
+ context 'with invalid name' do
+ where(:package_name) do
+ [
+ '@inv@lid_scope/package',
+ '@scope/sub/group',
+ '@scope/../../package',
+ '@scope%2e%2e%2fpackage'
+ ]
+ end
+
+ with_them do
+ it { expect { subject }.to raise_error(ActiveRecord::RecordInvalid) }
end
end
@@ -283,8 +283,6 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
end
context 'with invalid versions' do
- using RSpec::Parameterized::TableSyntax
-
where(:version) do
[
'1',
diff --git a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
index 57b08f8773c..4c761826b53 100644
--- a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
+++ b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :package_registry do
- let_it_be_with_reload(:package_file) { create(:nuget_package).package_files.first }
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+ let_it_be(:package_zip_file) { Zip::File.new(package_file.file) }
- let(:service) { described_class.new(package_file) }
+ let(:service) { described_class.new(package_zip_file) }
describe '#execute' do
subject { service.execute }
@@ -39,35 +40,9 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
end
end
- context 'with invalid package file' do
- let(:package_file) { nil }
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
- context 'when linked to a non nuget package' do
- before do
- package_file.package.maven!
- end
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
- context 'with a 0 byte package file' do
- before do
- allow_next_instance_of(Packages::PackageFileUploader) do |instance|
- allow(instance).to receive(:size).and_return(0)
- end
- end
-
- it_behaves_like 'raises an error', 'invalid package file'
- end
-
context 'without the nuspec file' do
before do
- allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([])
- end
+ allow(package_zip_file).to receive(:glob).and_return([])
end
it_behaves_like 'raises an error', 'nuspec file not found'
@@ -75,9 +50,9 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a too big nuspec file' do
before do
- allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([instance_double(File, size: 6.megabytes)])
- end
+ allow(package_zip_file).to receive(:glob).and_return(
+ [instance_double(File, size: described_class::MAX_FILE_SIZE + 1)]
+ )
end
it_behaves_like 'raises an error', 'nuspec file too big'
@@ -85,10 +60,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a corrupted nupkg file with a wrong entry size' do
let(:nupkg_fixture_path) { expand_fixture_path('packages/nuget/corrupted_package.nupkg') }
-
- before do
- allow(Zip::File).to receive(:new).and_return(Zip::File.new(nupkg_fixture_path, false, false))
- end
+ let(:package_zip_file) { Zip::File.new(nupkg_fixture_path) }
it_behaves_like 'raises an error',
<<~ERROR.squish
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index ea7557b6d64..81a4e4a430b 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :package_registry do
- let_it_be(:package_file) { create(:nuget_package).package_files.first }
-
- subject { described_class.new(package_file) }
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+ let(:service) { described_class.new(package_file) }
describe '#execute' do
+ subject { service.execute }
+
let(:nuspec_file_content) do
- <<~XML.squish
+ <<~XML
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
@@ -49,18 +50,15 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
end
it 'calls the necessary services and executes the metadata extraction' do
- expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file) do
- double.tap do |service|
- expect(service).to receive(:execute).and_return(double(payload: nuspec_file_content))
- end
+ expect_next_instance_of(Packages::Nuget::ProcessPackageFileService, package_file) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success(payload: { nuspec_file_content: nuspec_file_content }))
end
- expect(::Packages::Nuget::ExtractMetadataContentService).to receive_message_chain(:new, :execute)
- .with(nuspec_file_content).with(no_args).and_return(double(payload: expected_metadata))
-
- metadata = subject.execute.payload
+ expect_next_instance_of(Packages::Nuget::ExtractMetadataContentService, nuspec_file_content) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
- expect(metadata).to eq(expected_metadata)
+ expect(subject.payload).to eq(expected_metadata)
end
end
end
diff --git a/spec/services/packages/nuget/odata_package_entry_service_spec.rb b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
index d4c47538ce2..b4a22fef32b 100644
--- a/spec/services/packages/nuget/odata_package_entry_service_spec.rb
+++ b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :package_registry do
+ include GrapePathHelpers::NamedRouteMatcher
+
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:params) { { package_name: 'dummy', package_version: '1.0.0' } }
let(:doc) { Nokogiri::XML(subject.payload) }
@@ -10,7 +12,7 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
subject { described_class.new(project, params).execute }
describe '#execute' do
- shared_examples 'returning a package entry with the correct attributes' do |pkg_version, content_url_pkg_version|
+ shared_examples 'returning a package entry with the correct attributes' do |pkg_version = ''|
it 'returns a package entry with the correct attributes' do
expect(doc.root.name).to eq('entry')
expect(doc_node('id').text).to include(
@@ -18,7 +20,7 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
)
expect(doc_node('title').text).to eq(params[:package_name])
expect(doc_node('content').attr('src')).to include(
- content_url(project.id, params[:package_name], content_url_pkg_version)
+ content_url(project.id, params[:package_name], pkg_version)
)
expect(doc_node('Version').text).to eq(pkg_version)
end
@@ -29,29 +31,17 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
expect(subject).to be_success
end
- it_behaves_like 'returning a package entry with the correct attributes', '1.0.0', '1.0.0'
+ it_behaves_like 'returning a package entry with the correct attributes', '1.0.0'
end
- context 'when package_version is nil' do
+ context 'when package_version is not present' do
let(:params) { { package_name: 'dummy', package_version: nil } }
it 'returns a success ServiceResponse' do
expect(subject).to be_success
end
- it_behaves_like 'returning a package entry with the correct attributes',
- described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
- end
-
- context 'when package_version is 0.0.0-latest-version' do
- let(:params) { { package_name: 'dummy', package_version: described_class::SEMVER_LATEST_VERSION_PLACEHOLDER } }
-
- it 'returns a success ServiceResponse' do
- expect(subject).to be_success
- end
-
- it_behaves_like 'returning a package entry with the correct attributes',
- described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ it_behaves_like 'returning a package entry with the correct attributes'
end
end
@@ -64,6 +54,13 @@ RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :pac
end
def content_url(id, package_name, package_version)
- "api/v4/projects/#{id}/packages/nuget/v2/download/#{package_name}/#{package_version}"
+ if package_version.present?
+ filename = "#{package_name}.#{package_version}.nupkg"
+ api_v4_projects_packages_nuget_download_package_name_package_version_package_filename_path(
+ { id: id, package_name: package_name, package_version: package_version, package_filename: filename }, true
+ )
+ else
+ api_v4_projects_packages_nuget_v2_path(id: id)
+ end
end
end
diff --git a/spec/services/packages/nuget/process_package_file_service_spec.rb b/spec/services/packages/nuget/process_package_file_service_spec.rb
new file mode 100644
index 00000000000..cdeb5b32737
--- /dev/null
+++ b/spec/services/packages/nuget/process_package_file_service_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::ProcessPackageFileService, feature_category: :package_registry do
+ let_it_be(:package_file) { build(:package_file, :nuget) }
+
+ let(:service) { described_class.new(package_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'raises an error' do |error_message|
+ it { expect { subject }.to raise_error(described_class::ExtractionError, error_message) }
+ end
+
+ shared_examples 'not creating a symbol file' do
+ it 'does not call the CreateSymbolFilesService' do
+ expect(Packages::Nuget::Symbols::CreateSymbolFilesService).not_to receive(:new)
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with valid package file' do
+ it 'calls the ExtractMetadataFileService' do
+ expect_next_instance_of(Packages::Nuget::ExtractMetadataFileService, instance_of(Zip::File)) do |service|
+ expect(service).to receive(:execute) do
+ instance_double(ServiceResponse).tap do |response|
+ expect(response).to receive(:payload).and_return(instance_of(String))
+ end
+ end
+ end
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with invalid package file' do
+ let(:package_file) { nil }
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'when linked to a non nuget package' do
+ before do
+ package_file.package.maven!
+ end
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'with a 0 byte package file' do
+ before do
+ allow_next_instance_of(Packages::PackageFileUploader) do |instance|
+ allow(instance).to receive(:size).and_return(0)
+ end
+ end
+
+ it_behaves_like 'raises an error', 'invalid package file'
+ end
+
+ context 'with a symbol package file' do
+ let(:package_file) { build(:package_file, :snupkg) }
+
+ it 'calls the CreateSymbolFilesService' do
+ expect_next_instance_of(
+ Packages::Nuget::Symbols::CreateSymbolFilesService, package_file.package, instance_of(Zip::File)
+ ) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with a non symbol package file' do
+ let(:package_file) { build(:package_file, :nuget) }
+
+ it_behaves_like 'not creating a symbol file'
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb b/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb
new file mode 100644
index 00000000000..97bfc3e06a8
--- /dev/null
+++ b/spec/services/packages/nuget/symbols/create_symbol_files_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Symbols::CreateSymbolFilesService, feature_category: :package_registry do
+ let_it_be(:package) { create(:nuget_package) }
+ let_it_be(:package_file) do
+ create(:package_file, :snupkg, package: package,
+ file_fixture: expand_fixture_path('packages/nuget/package_with_symbols.snupkg'))
+ end
+
+ let(:package_zip_file) { Zip::File.new(package_file.file) }
+ let(:service) { described_class.new(package, package_zip_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'logs an error' do |error_class|
+ it 'logs an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(error_class),
+ class: described_class.name,
+ package_id: package.id
+ )
+
+ subject
+ end
+ end
+
+ context 'when symbol files are found' do
+ it 'creates a symbol record and extracts the signature' do
+ expect_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService,
+ instance_of(String)) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ expect { subject }.to change { package.nuget_symbols.count }.by(1)
+ end
+ end
+
+ context 'when symbol files hit the limit' do
+ before do
+ stub_const("#{described_class}::SYMBOL_ENTRIES_LIMIT", 0)
+ end
+
+ it 'does not create a symbol record' do
+ expect { subject }.not_to change { package.nuget_symbols.count }
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+
+ context 'when creating a symbol record without a signature' do
+ before do
+ allow_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: nil))
+ end
+ end
+
+ it 'does not call create! on the symbol record' do
+ expect(::Packages::Nuget::Symbol).not_to receive(:create!)
+
+ subject
+ end
+ end
+
+ context 'when creating duplicate symbol records' do
+ let_it_be(:symbol) { create(:nuget_symbol, package: package) }
+
+ before do
+ allow_next_instance_of(Packages::Nuget::Symbols::ExtractSymbolSignatureService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: symbol.signature))
+ end
+ end
+
+ it 'does not create a symbol record' do
+ expect { subject }.not_to change { package.nuget_symbols.count }
+ end
+
+ it_behaves_like 'logs an error', ActiveRecord::RecordInvalid
+ end
+
+ context 'when a symbol file has the wrong entry size' do
+ before do
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntrySizeError)
+ end
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+
+ context 'when a symbol file has the wrong entry name' do
+ before do
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntryNameError)
+ end
+ end
+
+ it_behaves_like 'logs an error', described_class::ExtractionError
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb b/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb
new file mode 100644
index 00000000000..87b0d00a0a7
--- /dev/null
+++ b/spec/services/packages/nuget/symbols/extract_symbol_signature_service_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Symbols::ExtractSymbolSignatureService, feature_category: :package_registry do
+ let_it_be(:symbol_file) { fixture_file('packages/nuget/symbol/package.pdb') }
+
+ let(:service) { described_class.new(symbol_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with a valid symbol file' do
+ it { expect(subject.payload).to eq('b91a152048fc4b3883bf3cf73fbc03f1FFFFFFFF') }
+ end
+
+ context 'with corrupted data' do
+ let(:symbol_file) { 'corrupted data' }
+
+ it { expect(subject).to be_error }
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index 0459588bf8d..cb70176ee61 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state, feature_category: :package_registry do
include ExclusiveLeaseHelpers
- let!(:package) { create(:nuget_package, :processing, :with_symbol_package) }
+ let!(:package) { create(:nuget_package, :processing, :with_symbol_package, :with_build) }
let(:package_file) { package.package_files.first }
let(:service) { described_class.new(package_file) }
let(:package_name) { 'DummyProject.DummyPackage' }
@@ -101,6 +101,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.and change { Packages::DependencyLink.count }.by(0)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
.and change { ::Packages::Nuget::Metadatum.count }.by(1)
+ .and change { existing_package.build_infos.count }.by(1)
expect(package_file.reload.file_name).to eq(package_file_name)
expect(package_file.package).to eq(existing_package)
end
@@ -260,6 +261,16 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
expect(package_file.package).to eq(existing_package)
end
+ context 'with packages_nuget_symbols records' do
+ before do
+ create_list(:nuget_symbol, 2, package: package)
+ end
+
+ it 'links the symbol records to the existing package' do
+ expect { subject }.to change { existing_package.nuget_symbols.count }.by(2)
+ end
+ end
+
it_behaves_like 'taking the lease'
it_behaves_like 'not updating the package if the lease is taken'
diff --git a/spec/services/packages/protection/create_rule_service_spec.rb b/spec/services/packages/protection/create_rule_service_spec.rb
new file mode 100644
index 00000000000..67835479473
--- /dev/null
+++ b/spec/services/packages/protection/create_rule_service_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Protection::CreateRuleService, '#execute', feature_category: :environment_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+
+ let(:service) { described_class.new(project: project, current_user: current_user, params: params) }
+ let(:current_user) { maintainer }
+ let(:params) { attributes_for(:package_protection_rule) }
+
+ subject { service.execute }
+
+ shared_examples 'a successful service response' do
+ let(:package_protection_rule_count_expected) { 1 }
+ it { is_expected.to be_success }
+
+ it do
+ is_expected.to have_attributes(
+ payload: include(
+ package_protection_rule: be_a(Packages::Protection::Rule)
+ )
+ )
+ end
+
+ it { expect(subject.payload).to include(package_protection_rule: be_a(Packages::Protection::Rule)) }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(1)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq package_protection_rule_count_expected
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: params[:package_name_pattern])).to exist
+ end
+ end
+
+ shared_examples 'an erroneous service response' do
+ let(:package_protection_rule_count_expected) { 0 }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes(payload: include(package_protection_rule: nil)) }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq package_protection_rule_count_expected
+ expect(Packages::Protection::Rule.where(project: project,
+ package_name_pattern: params[:package_name_pattern])).not_to exist
+ end
+ end
+
+ context 'without existing PackageProtectionRules' do
+ context 'when fields are valid' do
+ it_behaves_like 'a successful service response'
+ end
+
+ context 'when fields are invalid' do
+ let(:params) do
+ {
+ package_name_pattern: '',
+ package_type: 'unknown_package_type',
+ push_protected_up_to_access_level: 1000
+ }
+ end
+
+ it_behaves_like 'an erroneous service response'
+ end
+ end
+
+ context 'with existing PackageProtectionRule' do
+ let_it_be(:existing_package_protection_rule) { create(:package_protection_rule, project: project) }
+
+ context 'when package name pattern is slightly different' do
+ let(:params) do
+ attributes_for(
+ :package_protection_rule,
+ # The field `package_name_pattern` is unique; this is why we change the value in a minimum way
+ package_name_pattern: "#{existing_package_protection_rule.package_name_pattern}-unique",
+ package_type: existing_package_protection_rule.package_type,
+ push_protected_up_to_access_level: existing_package_protection_rule.push_protected_up_to_access_level
+ )
+ end
+
+ it_behaves_like 'a successful service response' do
+ let(:package_protection_rule_count_expected) { 2 }
+ end
+ end
+
+ context 'when field `package_name_pattern` is taken' do
+ let(:params) do
+ attributes_for(
+ :package_protection_rule,
+ package_name_pattern: existing_package_protection_rule.package_name_pattern,
+ package_type: existing_package_protection_rule.package_type,
+ push_protected_up_to_access_level: existing_package_protection_rule.push_protected_up_to_access_level
+ )
+ end
+
+ it { is_expected.to be_error }
+
+ it do
+ expect { subject }.to change { Packages::Protection::Rule.count }.by(0)
+
+ expect(Packages::Protection::Rule.where(project: project).count).to eq 1
+ expect(
+ Packages::Protection::Rule.where(
+ project: project,
+ package_name_pattern: params[:package_name_pattern]
+ )
+ ).to exist
+ end
+ end
+ end
+
+ context 'when disallowed params are passed' do
+ let(:params) do
+ attributes_for(:package_protection_rule)
+ .merge(
+ project_id: 1,
+ unsupported_param: 'unsupported_param_value'
+ )
+ end
+
+ it_behaves_like 'a successful service response'
+ end
+
+ context 'with forbidden user access level (project developer role)' do
+ # Because of the access level hierarchy, we can assume that
+ # other access levels below developer role will also not be able to
+ # create package protection rules.
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+
+ let(:current_user) { developer }
+
+ it_behaves_like 'an erroneous service response'
+
+ it { is_expected.to have_attributes(message: match(/Unauthorized/)) }
+ end
+end
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
deleted file mode 100644
index 48690a035f5..00000000000
--- a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::MigrateFromLegacyStorageService, feature_category: :pages do
- let(:batch_size) { 10 }
- let(:mark_projects_as_not_deployed) { false }
- let(:service) { described_class.new(Rails.logger, ignore_invalid_entries: false, mark_projects_as_not_deployed: mark_projects_as_not_deployed) }
-
- shared_examples "migrates projects properly" do
- it 'does not try to migrate pages if pages are not deployed' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
-
- is_expected.to eq(migrated: 0, errored: 0)
- end
-
- context 'when pages are marked as deployed' do
- let(:project) { create(:project) }
-
- before do
- project.mark_pages_as_deployed
- end
-
- context 'when pages directory does not exist' do
- context 'when mark_projects_as_not_deployed is set' do
- let(:mark_projects_as_not_deployed) { true }
-
- it 'counts project as migrated' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: true) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- is_expected.to eq(migrated: 1, errored: 0)
- end
- end
-
- it 'counts project as errored' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- is_expected.to eq(migrated: 0, errored: 1)
- end
- end
-
- context 'when pages directory exists on disk' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'migrates pages projects without deployments' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
-
- expect(project.pages_metadatum.reload.pages_deployment).to eq(nil)
- expect(subject).to eq(migrated: 1, errored: 0)
- expect(project.pages_metadatum.reload.pages_deployment).to be_present
- end
-
- context 'when deployed already exists for the project' do
- before do
- deployment = create(:pages_deployment, project: project)
- project.set_first_pages_deployment!(deployment)
- end
-
- it 'does not try to migrate project' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
-
- is_expected.to eq(migrated: 0, errored: 0)
- end
- end
- end
- end
- end
-
- describe '#execute_with_threads' do
- subject { service.execute_with_threads(threads: 3, batch_size: batch_size) }
-
- include_examples "migrates projects properly"
-
- context 'when there is work for multiple threads' do
- let(:batch_size) { 2 } # override to force usage of multiple threads
-
- it 'uses multiple threads' do
- projects = create_list(:project, 20)
- projects.each do |project|
- project.mark_pages_as_deployed
-
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- threads = Concurrent::Set.new
-
- expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
- threads.add(Thread.current)
-
- # sleep to be 100% certain that once thread can't consume all the queue
- # it works without it, but I want to avoid making this test flaky
- sleep(0.01)
-
- m.call(*args)
- end
-
- is_expected.to eq(migrated: 20, errored: 0)
- expect(threads.length).to eq(3)
- end
- end
- end
-
- describe "#execute_for_batch" do
- subject { service.execute_for_batch(Project.ids) }
-
- include_examples "migrates projects properly"
-
- it 'only tries to migrate projects with passed ids' do
- projects = create_list(:project, 5)
-
- projects.each(&:mark_pages_as_deployed)
- projects_to_migrate = projects.first(3)
-
- projects_to_migrate.each do |project|
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute).and_call_original
- end
- end
-
- expect(service.execute_for_batch(projects_to_migrate.pluck(:id))).to eq(migrated: 0, errored: 3)
- end
- end
-end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
deleted file mode 100644
index e1cce2c87eb..00000000000
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category: :pages do
- let(:project) { create(:project, :repository) }
- let(:service) { described_class.new(project) }
-
- it 'calls ::Pages::ZipDirectoryService' do
- expect_next_instance_of(::Pages::ZipDirectoryService, project.pages_path, ignore_invalid_entries: true) do |zip_service|
- expect(zip_service).to receive(:execute).and_call_original
- end
-
- expect(described_class.new(project, ignore_invalid_entries: true).execute[:status]).to eq(:error)
- end
-
- context 'when mark_projects_as_not_deployed is passed' do
- let(:service) { described_class.new(project, mark_projects_as_not_deployed: true) }
-
- it 'marks pages as not deployed if public directory is absent and invalid entries are ignored' do
- project.mark_pages_as_deployed
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(false)
- end
-
- it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
- deployment = create(:pages_deployment, project: project)
- project.update_pages_deployment!(deployment)
- project.mark_pages_as_deployed
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
- end
- end
-
- it 'does not mark pages as not deployed if public directory is absent but invalid entries are not ignored' do
- project.mark_pages_as_deployed
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
-
- expect(service.execute).to eq(
- status: :error,
- message: "Archive not created. Missing public directory in #{project.pages_path}"
- )
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
- end
-
- it 'removes pages archive when can not save deployment' do
- archive = fixture_file_upload("spec/fixtures/pages.zip")
- expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
- expect(zip_service).to receive(:execute).and_return(
- status: :success, archive_path: archive.path, entries_count: 3
- )
- end
-
- expect_next_instance_of(PagesDeployment) do |deployment|
- expect(deployment).to receive(:save!).and_raise("Something")
- end
-
- expect do
- service.execute
- end.to raise_error("Something")
-
- expect(File.exist?(archive.path)).to eq(false)
- end
-
- context 'when pages site is deployed to legacy storage' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
- end
-
- it 'creates pages deployment' do
- expect do
- expect(described_class.new(project).execute).to eq(status: :success)
- end.to change { project.reload.pages_deployments.count }.by(1)
-
- deployment = project.pages_metadatum.pages_deployment
-
- Zip::File.open(deployment.file.path) do |zip_file|
- expect(zip_file.glob("public").first.ftype).to eq(:directory)
- expect(zip_file.glob("public/index.html").first.get_input_stream.read).to eq("Hello!")
- end
-
- expect(deployment.file_count).to eq(2)
- expect(deployment.file_sha256).to eq(Digest::SHA256.file(deployment.file.path).hexdigest)
- end
-
- it 'removes tmp pages archive' do
- described_class.new(project).execute
-
- expect(File.exist?(File.join(project.pages_path, '@migrated.zip'))).to eq(false)
- end
-
- it 'does not change pages deployment if it is set' do
- old_deployment = create(:pages_deployment, project: project)
- project.update_pages_deployment!(old_deployment)
-
- expect do
- described_class.new(project).execute
- end.not_to change { project.pages_metadatum.reload.pages_deployment_id }.from(old_deployment.id)
- end
- end
-end
diff --git a/spec/services/pages/zip_directory_service_spec.rb b/spec/services/pages/zip_directory_service_spec.rb
deleted file mode 100644
index 4917bc65a02..00000000000
--- a/spec/services/pages/zip_directory_service_spec.rb
+++ /dev/null
@@ -1,280 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::ZipDirectoryService, feature_category: :pages do
- around do |example|
- Dir.mktmpdir do |dir|
- @work_dir = dir
- example.run
- end
- end
-
- let(:ignore_invalid_entries) { false }
-
- let(:service_directory) { @work_dir }
-
- let(:service) do
- described_class.new(service_directory, ignore_invalid_entries: ignore_invalid_entries)
- end
-
- let(:result) do
- service.execute
- end
-
- let(:status) { result[:status] }
- let(:message) { result[:message] }
- let(:archive) { result[:archive_path] }
- let(:entries_count) { result[:entries_count] }
-
- it 'returns true if ZIP64 is enabled' do
- expect(::Zip.write_zip64_support).to be true
- end
-
- shared_examples 'handles invalid public directory' do
- it 'returns success' do
- expect(status).to eq(:success)
- expect(archive).to be_nil
- expect(entries_count).to be_nil
- end
- end
-
- context "when work directory doesn't exist" do
- let(:service_directory) { "/tmp/not/existing/dir" }
-
- include_examples 'handles invalid public directory'
- end
-
- context 'when public directory is absent' do
- include_examples 'handles invalid public directory'
- end
-
- context 'when public directory is a symlink' do
- before do
- create_dir('target')
- create_file('./target/index.html', 'hello')
- create_link("public", "./target")
- end
-
- include_examples 'handles invalid public directory'
- end
-
- context 'when there is a public directory' do
- before do
- create_dir('public')
- end
-
- it 'creates the file next the public directory' do
- expect(archive).to eq(File.join(@work_dir, "@migrated.zip"))
- end
-
- it 'includes public directory' do
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/")
- expect(entry.ftype).to eq(:directory)
- end
- end
-
- it 'returns number of entries' do
- create_file("public/index.html", "hello")
- create_link("public/link.html", "./index.html")
- expect(entries_count).to eq(3) # + 'public' directory
- end
-
- it 'removes the old file if it exists' do
- # simulate the old run
- described_class.new(@work_dir).execute
-
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(1)
- end
- end
-
- it 'ignores other top level files and directories' do
- create_file("top_level.html", "hello")
- create_dir("public2")
-
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("top_level.html") }.to raise_error(Errno::ENOENT)
- expect { zip_file.get_entry("public2/") }.to raise_error(Errno::ENOENT)
- end
- end
-
- it 'includes index.html file' do
- create_file("public/index.html", "Hello!")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/index.html")
- expect(zip_file.read(entry)).to eq("Hello!")
- end
- end
-
- it 'includes hidden file' do
- create_file("public/.hidden.html", "Hello!")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/.hidden.html")
- expect(zip_file.read(entry)).to eq("Hello!")
- end
- end
-
- it 'includes nested directories and files' do
- create_dir("public/nested")
- create_dir("public/nested/nested2")
- create_file("public/nested/nested2/nested.html", "Hello nested")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/nested")
- expect(entry.ftype).to eq(:directory)
-
- entry = zip_file.get_entry("public/nested/nested2")
- expect(entry.ftype).to eq(:directory)
-
- entry = zip_file.get_entry("public/nested/nested2/nested.html")
- expect(zip_file.read(entry)).to eq("Hello nested")
- end
- end
-
- it 'adds a valid symlink' do
- create_file("public/target.html", "hello")
- create_link("public/link.html", "./target.html")
-
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/link.html")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target.html")
- end
- end
-
- shared_examples "raises or ignores file" do |raised_exception, file|
- it 'raises error' do
- expect do
- result
- end.to raise_error(raised_exception)
- end
-
- context 'when errors are ignored' do
- let(:ignore_invalid_entries) { true }
-
- it 'does not create entry' do
- with_zip_file do |zip_file|
- expect { zip_file.get_entry(file) }.to raise_error(Errno::ENOENT)
- end
- end
- end
- end
-
- context 'when symlink points outside of public directory' do
- before do
- create_file("target.html", "hello")
- create_link("public/link.html", "../target.html")
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/link.html"
- end
-
- context 'when target of the symlink is absent' do
- before do
- create_link("public/link.html", "./target.html")
- end
-
- include_examples "raises or ignores file", Errno::ENOENT, "public/link.html"
- end
-
- context 'when targets itself' do
- before do
- create_link("public/link.html", "./link.html")
- end
-
- include_examples "raises or ignores file", Errno::ELOOP, "public/link.html"
- end
-
- context 'when symlink is absolute and points to outside of directory' do
- before do
- target = File.join(@work_dir, "target")
- FileUtils.touch(target)
-
- create_link("public/link.html", target)
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/link.html"
- end
-
- context 'when entry has unknown ftype' do
- before do
- file = create_file("public/index.html", "hello")
-
- allow(File).to receive(:lstat).and_call_original
- expect(File).to receive(:lstat).with(file) { double("lstat", ftype: "unknown") }
- end
-
- include_examples "raises or ignores file", described_class::InvalidEntryError, "public/index.html"
- end
-
- it "includes raw symlink if it's target is a valid directory" do
- create_dir("public/target")
- create_file("public/target/index.html", "hello")
- create_link("public/link", "./target")
-
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(4) # /public and 3 created above
-
- entry = zip_file.get_entry("public/link")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target")
- end
- end
- end
-
- context "validating fixtures pages archives" do
- using RSpec::Parameterized::TableSyntax
-
- where(:fixture_path) do
- ["spec/fixtures/pages.zip", "spec/fixtures/pages_non_writeable.zip"]
- end
-
- with_them do
- let(:full_fixture_path) { Rails.root.join(fixture_path) }
-
- it 'a created archives contains exactly the same entries' do
- SafeZip::Extract.new(full_fixture_path).extract(directories: ['public'], to: @work_dir)
-
- with_zip_file do |created_archive|
- Zip::File.open(full_fixture_path) do |original_archive|
- original_archive.entries do |original_entry|
- created_entry = created_archive.get_entry(original_entry.name)
-
- expect(created_entry.name).to eq(original_entry.name)
- expect(created_entry.ftype).to eq(original_entry.ftype)
- expect(created_archive.read(created_entry)).to eq(original_archive.read(original_entry))
- end
- end
- end
- end
- end
- end
-
- def create_file(name, content)
- file_path = File.join(@work_dir, name)
-
- File.open(file_path, "w") do |f|
- f.write(content)
- end
-
- file_path
- end
-
- def create_dir(dir)
- Dir.mkdir(File.join(@work_dir, dir))
- end
-
- def create_link(new_name, target)
- File.symlink(target, File.join(@work_dir, new_name))
- end
-
- def with_zip_file
- Zip::File.open(archive) do |zip_file|
- yield zip_file
- end
- end
-end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 411ff5662d4..4b2569f6b2d 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -21,183 +21,88 @@ RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_proje
end
describe '#execute' do
- context 'using legacy storage' do
- let(:project) { create(:project, :repository, :wiki_repo, :legacy_storage) }
- let(:project_storage) { project.send(:storage) }
- let(:gitlab_shell) { Gitlab::Shell.new }
-
- before do
- # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
- # call. This makes testing a bit easier.
- allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
-
- stub_application_setting(hashed_storage_enabled: false)
- end
-
- it 'renames a repository' do
- stub_container_registry_config(enabled: false)
-
- expect_any_instance_of(SystemHooksService)
- .to receive(:execute_hooks_for)
- .with(project, :rename)
-
- expect_any_instance_of(Gitlab::UploadsTransfer)
- .to receive(:rename_project)
- .with(path_before_rename, path_after_rename, project.namespace.full_path)
-
- expect(repo_before_rename).to exist
- expect(wiki_repo_before_rename).to exist
-
- service_execute
-
- expect(repo_before_rename).not_to exist
- expect(wiki_repo_before_rename).not_to exist
- expect(repo_after_rename).to exist
- expect(wiki_repo_after_rename).to exist
- end
-
- context 'container registry with images' do
- let(:container_repository) { create(:container_repository) }
-
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
- project.container_repositories << container_repository
- end
-
- it 'raises a RenameFailedError' do
- expect { service_execute }.to raise_error(described_class::RenameFailedError)
- end
- end
-
- context 'attachments' do
- before do
- expect(project_storage).to receive(:rename_repo) { true }
- end
-
- it 'moves uploads folder to new location' do
- expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
-
- service_execute
- end
- end
-
- it 'updates project full path in gitaly' do
- service_execute
-
- expect(project.repository.full_path).to eq(project.full_path)
- end
-
- it 'updates storage location' do
- allow(project_storage).to receive(:rename_repo).and_return(true)
-
- service_execute
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
+ let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
+ let(:hashed_path) { File.join(hashed_prefix, hash) }
+
+ before do
+ # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
+ # call. This makes testing a bit easier.
+ allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
+
+ stub_application_setting(hashed_storage_enabled: true)
+ end
- expect(project.project_repository).to have_attributes(
- disk_path: project.disk_path,
- shard_name: project.repository_storage
- )
- end
+ it 'renames a repository' do
+ stub_container_registry_config(enabled: false)
- context 'with hashed storage upgrade when renaming enabled' do
- it 'calls HashedStorage::MigrationService with correct options' do
- stub_application_setting(hashed_storage_enabled: true)
+ expect_any_instance_of(SystemHooksService)
+ .to receive(:execute_hooks_for)
+ .with(project, :rename)
- expect_next_instance_of(::Projects::HashedStorage::MigrationService) do |service|
- expect(service).to receive(:execute).and_return(true)
- end
+ expect(project).to receive(:expire_caches_before_rename)
- service_execute
- end
- end
+ service_execute
end
- context 'using hashed storage' do
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
- let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
- let(:hashed_path) { File.join(hashed_prefix, hash) }
+ context 'container registry with images' do
+ let(:container_repository) { create(:container_repository) }
before do
- # Project#gitlab_shell returns a new instance of Gitlab::Shell on every
- # call. This makes testing a bit easier.
- allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
-
- stub_application_setting(hashed_storage_enabled: true)
+ stub_container_registry_config(enabled: true)
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+ project.container_repositories << container_repository
end
- it 'renames a repository' do
- stub_container_registry_config(enabled: false)
-
- expect(gitlab_shell).not_to receive(:mv_repository)
-
- expect_any_instance_of(SystemHooksService)
- .to receive(:execute_hooks_for)
- .with(project, :rename)
-
- expect(project).to receive(:expire_caches_before_rename)
-
- service_execute
+ it 'raises a RenameFailedError' do
+ expect { service_execute }
+ .to raise_error(described_class::RenameFailedError)
end
+ end
- context 'container registry with images' do
- let(:container_repository) { create(:container_repository) }
+ context 'attachments' do
+ let(:uploader) { create(:upload, :issuable_upload, :with_file, model: project) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:legacy_storage_path) { File.join(file_uploader.root, legacy_storage.disk_path) }
+ let(:hashed_storage_path) { File.join(file_uploader.root, hashed_storage.disk_path) }
- before do
- stub_container_registry_config(enabled: true)
- stub_container_registry_tags(repository: :any, tags: ['tag'])
- project.container_repositories << container_repository
- end
+ it 'keeps uploads folder location unchanged' do
+ expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
- it 'raises a RenameFailedError' do
- expect { service_execute }
- .to raise_error(described_class::RenameFailedError)
- end
+ service_execute
end
- context 'attachments' do
- let(:uploader) { create(:upload, :issuable_upload, :with_file, model: project) }
- let(:file_uploader) { build(:file_uploader, project: project) }
- let(:legacy_storage_path) { File.join(file_uploader.root, legacy_storage.disk_path) }
- let(:hashed_storage_path) { File.join(file_uploader.root, hashed_storage.disk_path) }
+ context 'when not rolled out' do
+ let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
- it 'keeps uploads folder location unchanged' do
- expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
+ it 'moves attachments folder to hashed storage' do
+ expect(File.directory?(legacy_storage_path)).to be_truthy
+ expect(File.directory?(hashed_storage_path)).to be_falsey
service_execute
- end
-
- context 'when not rolled out' do
- let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
-
- it 'moves attachments folder to hashed storage' do
- expect(File.directory?(legacy_storage_path)).to be_truthy
- expect(File.directory?(hashed_storage_path)).to be_falsey
+ expect(project.reload.hashed_storage?(:attachments)).to be_truthy
- service_execute
- expect(project.reload.hashed_storage?(:attachments)).to be_truthy
-
- expect(File.directory?(legacy_storage_path)).to be_falsey
- expect(File.directory?(hashed_storage_path)).to be_truthy
- end
+ expect(File.directory?(legacy_storage_path)).to be_falsey
+ expect(File.directory?(hashed_storage_path)).to be_truthy
end
end
+ end
- it 'updates project full path in gitaly' do
- service_execute
+ it 'updates project full path in gitaly' do
+ service_execute
- expect(project.repository.full_path).to eq(project.full_path)
- end
+ expect(project.repository.full_path).to eq(project.full_path)
+ end
- it 'updates storage location' do
- service_execute
+ it 'updates storage location' do
+ service_execute
- expect(project.project_repository).to have_attributes(
- disk_path: project.disk_path,
- shard_name: project.repository_storage
- )
- end
+ expect(project.project_repository).to have_attributes(
+ disk_path: project.disk_path,
+ shard_name: project.repository_storage
+ )
end
context 'EventStore' do
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
deleted file mode 100644
index e21d8b6fa83..00000000000
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ /dev/null
@@ -1,152 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::MigrateRepositoryService, feature_category: :groups_and_projects do
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo, :design_repo) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
-
- describe '#execute' do
- let(:old_disk_path) { legacy_storage.disk_path }
- let(:new_disk_path) { hashed_storage.disk_path }
-
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'repository lock' do
- it 'tries to lock the repository' do
- expect(service).to receive(:try_to_set_repository_read_only!)
-
- service.execute
- end
-
- it 'fails when a git operation is in progress' do
- allow(project).to receive(:git_transfer_in_progress?) { true }
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
- end
- end
-
- context 'when repository doesnt exist on disk' do
- let(:project) { create(:project, :legacy_storage) }
-
- it 'skips the disk change but increase the version' do
- service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- end
- end
-
- context 'when succeeds' do
- it 'renames project, wiki and design repositories' do
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
- end
-
- it 'updates project to be hashed and not read-only' do
- service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only).to be_falsey
- end
-
- it 'move operation is called for all repositories' do
- expect_move_repository(old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
-
- it 'writes project full path to gitaly' do
- service.execute
-
- expect(project.repository.full_path).to eq project.full_path
- end
- end
-
- context 'when exception happens' do
- it 'handles OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
-
- it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
-
- context 'when one move fails' do
- it 'rollsback repositories to original name' do
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
-
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
- expect(project.repository_read_only?).to be_falsey
- end
-
- context 'when rollback fails' do
- before do
- gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
- end
-
- it 'does not try to move nil repository over existing' do
- expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.hashed_storage?(:repository) }.to(true)
- end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
- end
- end
-end
diff --git a/spec/services/projects/hashed_storage/migration_service_spec.rb b/spec/services/projects/hashed_storage/migration_service_spec.rb
index ffbd5c2500a..d5b04688322 100644
--- a/spec/services/projects/hashed_storage/migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migration_service_spec.rb
@@ -14,43 +14,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
subject(:service) { described_class.new(project, project.full_path, logger: logger) }
describe '#execute' do
- context 'repository migration' do
- let(:repository_service) do
- Projects::HashedStorage::MigrateRepositoryService.new(
- project: project,
- old_disk_path: project.full_path,
- logger: logger
- )
- end
-
- it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(service).to receive(:migrate_repository_service).and_return(repository_service)
- expect(repository_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate migration if repository is already migrated' do
- project.storage_version = ::Project::LATEST_STORAGE_VERSION
- expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new)
-
- service.execute
- end
-
- it 'migrates legacy repositories to hashed storage' do
- legacy_attachments_path = FileUploader.absolute_base_dir(project)
- hashed_project = project.dup.tap { |p| p.id = project.id }
- hashed_project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
- hashed_attachments_path = FileUploader.absolute_base_dir(hashed_project)
-
- expect(logger).to receive(:info).with(/Repository moved from '#{project_legacy_path}' to '#{project_hashed_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_legacy_path}' to '#{wiki_hashed_path}'/)
- expect(logger).to receive(:info).with(/Project attachments moved from '#{legacy_attachments_path}' to '#{hashed_attachments_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(nil).to(2)
- end
- end
-
context 'attachments migration' do
let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
@@ -62,13 +25,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
)
end
- it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
- expect(service).to receive(:migrate_attachments_service).and_return(attachments_service)
- expect(attachments_service).to receive(:execute)
-
- service.execute
- end
-
it 'does not delegate migration if attachments are already migrated' do
project.storage_version = ::Project::LATEST_STORAGE_VERSION
expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
deleted file mode 100644
index d1a68503fa3..00000000000
--- a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackAttachmentsService, feature_category: :groups_and_projects do
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path, logger: nil) }
-
- let(:project) { create(:project, :repository, skip_disk_validation: true) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
- let(:file_uploader) { build(:file_uploader, project: project) }
- let(:old_disk_path) { File.join(base_path(hashed_storage), upload.path) }
- let(:new_disk_path) { File.join(base_path(legacy_storage), upload.path) }
-
- describe '#execute' do
- context 'when succeeds' do
- it 'moves attachments to legacy storage layout' do
- expect(File.file?(old_disk_path)).to be_truthy
- expect(File.file?(new_disk_path)).to be_falsey
- expect(File.exist?(base_path(hashed_storage))).to be_truthy
- expect(File.exist?(base_path(legacy_storage))).to be_falsey
- expect(FileUtils).to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage)).and_call_original
-
- service.execute
-
- expect(File.exist?(base_path(legacy_storage))).to be_truthy
- expect(File.exist?(base_path(hashed_storage))).to be_falsey
- expect(File.file?(old_disk_path)).to be_falsey
- expect(File.file?(new_disk_path)).to be_truthy
- end
-
- it 'returns true' do
- expect(service.execute).to be_truthy
- end
-
- it 'sets skipped to false' do
- service.execute
-
- expect(service.skipped?).to be_falsey
- end
- end
-
- context 'when original folder does not exist anymore' do
- before do
- FileUtils.rm_rf(base_path(hashed_storage))
- end
-
- it 'skips moving folders and go to next' do
- expect(FileUtils).not_to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage))
-
- service.execute
-
- expect(File.exist?(base_path(legacy_storage))).to be_falsey
- expect(File.file?(new_disk_path)).to be_falsey
- end
-
- it 'returns true' do
- expect(service.execute).to be_truthy
- end
-
- it 'sets skipped to true' do
- service.execute
-
- expect(service.skipped?).to be_truthy
- end
- end
-
- context 'when target folder already exists' do
- before do
- FileUtils.mkdir_p(base_path(legacy_storage))
- end
-
- it 'raises AttachmentCannotMoveError' do
- expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.hashed_storage?(:attachments) }.to(false)
- end
- end
-
- describe '#old_disk_path' do
- it 'returns old disk_path for project' do
- expect(service.old_disk_path).to eq(project.disk_path)
- end
- end
-
- describe '#new_disk_path' do
- it 'returns new disk_path for project' do
- service.execute
-
- expect(service.new_disk_path).to eq(project.full_path)
- end
- end
-
- def base_path(storage)
- File.join(FileUploader.root, storage.disk_path)
- end
-end
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
deleted file mode 100644
index 1e5d4ae4d20..00000000000
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ /dev/null
@@ -1,152 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state, feature_category: :groups_and_projects do
- let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :repository, :wiki_repo, :design_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::Hashed.new(project) }
-
- subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
-
- describe '#execute' do
- let(:old_disk_path) { hashed_storage.disk_path }
- let(:new_disk_path) { legacy_storage.disk_path }
-
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'repository lock' do
- it 'tries to lock the repository' do
- expect(service).to receive(:try_to_set_repository_read_only!)
-
- service.execute
- end
-
- it 'fails when a git operation is in progress' do
- allow(project).to receive(:git_transfer_in_progress?) { true }
-
- expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
- end
- end
-
- context 'when repository doesnt exist on disk' do
- let(:project) { create(:project) }
-
- it 'skips the disk change but decrease the version' do
- service.execute
-
- expect(project.legacy_storage?).to be_truthy
- end
- end
-
- context 'when succeeds' do
- it 'renames project, wiki and design repositories' do
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
- end
-
- it 'updates project to be legacy and not read-only' do
- service.execute
-
- expect(project.legacy_storage?).to be_truthy
- expect(project.repository_read_only).to be_falsey
- end
-
- it 'move operation is called for both repositories' do
- expect_move_repository(old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
-
- it 'writes project full path to gitaly' do
- service.execute
-
- expect(project.repository.full_path).to eq project.full_path
- end
- end
-
- context 'when exception happens' do
- it 'handles OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when OpenSSL::Cipher::CipherError' do
- expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
-
- it 'handles Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
-
- expect { service.execute }.not_to raise_exception
- end
-
- it 'ensures rollback when Gitlab::Git::CommandError' do
- expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
- project.reload
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only?).to be_falsey
- end
- end
-
- context 'when one move fails' do
- it 'rolls repositories back to original name' do
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
-
- expect(service).to receive(:rollback_folder_move).and_call_original
-
- service.execute
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
- expect(project.repository_read_only?).to be_falsey
- end
-
- context 'when rollback fails' do
- before do
- gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
- end
-
- it 'does not try to move nil repository over existing' do
- expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
- expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
- expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
-
- service.execute
- end
- end
- end
-
- it 'works even when project validation fails' do
- allow(project).to receive(:valid?) { false }
-
- expect { service.execute }.to change { project.legacy_storage? }.to(true)
- end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
- end
- end
-end
diff --git a/spec/services/projects/hashed_storage/rollback_service_spec.rb b/spec/services/projects/hashed_storage/rollback_service_spec.rb
deleted file mode 100644
index 088eb9d2734..00000000000
--- a/spec/services/projects/hashed_storage/rollback_service_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::HashedStorage::RollbackService, feature_category: :groups_and_projects do
- let(:project) { create(:project, :empty_repo, :wiki_repo) }
- let(:logger) { double }
- let!(:project_attachment) { build(:file_uploader, project: project) }
- let(:project_hashed_path) { Storage::Hashed.new(project).disk_path }
- let(:project_legacy_path) { Storage::LegacyProject.new(project).disk_path }
- let(:wiki_hashed_path) { "#{project_hashed_path}.wiki" }
- let(:wiki_legacy_path) { "#{project_legacy_path}.wiki" }
-
- subject(:service) { described_class.new(project, project.disk_path, logger: logger) }
-
- describe '#execute' do
- context 'attachments rollback' do
- let(:attachments_service_class) { Projects::HashedStorage::RollbackAttachmentsService }
- let(:attachments_service) { attachments_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
-
- it 'delegates rollback to Projects::HashedStorage::RollbackAttachmentsService' do
- expect(service).to receive(:rollback_attachments_service).and_return(attachments_service)
- expect(attachments_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate rollback if repository is in legacy storage already' do
- project.storage_version = nil
- expect(attachments_service_class).not_to receive(:new)
-
- service.execute
- end
-
- it 'rollbacks to legacy storage' do
- hashed_attachments_path = FileUploader.absolute_base_dir(project)
- legacy_project = project.dup
- legacy_project.storage_version = nil
- legacy_attachments_path = FileUploader.absolute_base_dir(legacy_project)
-
- expect(logger).to receive(:info).with(/Project attachments moved from '#{hashed_attachments_path}' to '#{legacy_attachments_path}'/)
-
- expect(logger).to receive(:info).with(/Repository moved from '#{project_hashed_path}' to '#{project_legacy_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_hashed_path}' to '#{wiki_legacy_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(2).to(nil)
- end
- end
-
- context 'repository rollback' do
- let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
- let(:repository_service_class) { Projects::HashedStorage::RollbackRepositoryService }
- let(:repository_service) { repository_service_class.new(project: project, old_disk_path: project.disk_path, logger: logger) }
-
- it 'delegates rollback to RollbackRepositoryService' do
- expect(service).to receive(:rollback_repository_service).and_return(repository_service)
- expect(repository_service).to receive(:execute)
-
- service.execute
- end
-
- it 'does not delegate rollback if repository is in legacy storage already' do
- project.storage_version = nil
-
- expect(repository_service_class).not_to receive(:new)
-
- service.execute
- end
-
- it 'rollbacks to legacy storage' do
- expect(logger).to receive(:info).with(/Repository moved from '#{project_hashed_path}' to '#{project_legacy_path}'/)
- expect(logger).to receive(:info).with(/Repository moved from '#{wiki_hashed_path}' to '#{wiki_legacy_path}'/)
-
- expect { service.execute }.to change { project.storage_version }.from(1).to(nil)
- end
- end
- end
-end
diff --git a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
deleted file mode 100644
index fab8cafd1a0..00000000000
--- a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_category: :experimentation_adoption do
- describe '#execute' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
-
- before do
- allow(Notify)
- .to receive(:build_ios_app_guide_email)
- .and_return(instance_double(ActionMailer::MessageDelivery, deliver_later: true))
- end
-
- subject(:execute) do
- described_class.new(project, campaign).execute
- end
-
- context 'users can receive marketing emails' do
- let(:maintainer) { create(:user) }
- let(:developer) { create(:user) }
-
- before do
- project.add_developer(developer)
- project.add_maintainer(maintainer)
- end
-
- it 'sends the email to all project members with access_level >= Developer', :aggregate_failures do
- [project.owner, maintainer, developer].each do |user|
- email = user.notification_email_or_default
-
- expect(Notify).to receive(:build_ios_app_guide_email).with(email)
- end
-
- execute
- end
-
- it 'records sent emails', :aggregate_failures do
- expect { execute }.to change { Users::InProductMarketingEmail.count }.from(0).to(3)
-
- [project.owner, maintainer, developer].each do |user|
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).to exist
- end
- end
-
- it 'tracks experiment :email_sent event', :experiment do
- expect(experiment(:build_ios_app_guide_email)).to track(:email_sent)
- .on_next_instance
- .with_context(project: project)
-
- execute
- end
- end
-
- shared_examples 'does not send the email' do
- it do
- email = user.notification_email_or_default
- expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
- execute
- end
- end
-
- shared_examples 'does not create a record of the sent email' do
- it do
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).not_to exist
-
- execute
- end
- end
-
- context "when user can't receive marketing emails" do
- before do
- project.add_developer(user)
- end
-
- context 'when user.can?(:receive_notifications) is false' do
- it 'does not send the email' do
- allow_next_found_instance_of(User) do |user|
- allow(user).to receive(:can?).with(:receive_notifications) { false }
-
- email = user.notification_email_or_default
- expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
-
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- campaign: campaign
- )
- ).not_to exist
- end
-
- execute
- end
- end
- end
-
- context 'when campaign email has already been sent to the user' do
- before do
- project.add_developer(user)
- create(:in_product_marketing_email, :campaign, user: user, campaign: campaign)
- end
-
- it_behaves_like 'does not send the email'
- end
-
- context "when user is a reporter" do
- before do
- project.add_reporter(user)
- end
-
- it_behaves_like 'does not send the email'
- it_behaves_like 'does not create a record of the sent email'
- end
-
- context "when user is a guest" do
- before do
- project.add_guest(user)
- end
-
- it_behaves_like 'does not send the email'
- it_behaves_like 'does not create a record of the sent email'
- end
- end
-end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 00c156ba538..ef2a89a15b1 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService, feature_category: :sou
it 'streams the download' do
expected_options = { headers: anything, stream_body: true }
- expect(Gitlab::HTTP).to receive(:perform_request).with(Net::HTTP::Get, anything, expected_options)
+ expect(Gitlab::HTTP).to receive(:get).with(anything, expected_options)
subject.execute
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index b01e64439ec..692f43eb205 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -18,6 +18,14 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
described_class.new(project, user).execute(noteable)
end
+ it 'returns results in correct order' do
+ group = create(:group).tap { |g| g.add_owner(user) }
+
+ expect(run_service.pluck(:username)).to eq([
+ noteable.author.username, 'all', user.username, group.full_path
+ ])
+ end
+
it 'includes `All Project and Group Members`' do
expect(run_service).to include(a_hash_including({ username: "all", name: "All Project and Group Members" }))
end
@@ -104,6 +112,24 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
expect(group_items.first[:avatar_url]).to eq("/gitlab/uploads/-/system/group/avatar/#{group.id}/dk.png")
end
end
+
+ context 'with subgroups' do
+ let(:group_1) { create(:group, path: 'bb') }
+ let(:group_2) { create(:group, path: 'zz') }
+ let(:subgroup) { create(:group, path: 'aa', parent: group_1) }
+
+ before do
+ group_1.add_owner(user)
+ group_2.add_owner(user)
+ subgroup.add_owner(user)
+ end
+
+ it 'returns results ordered by full path' do
+ expect(group_items.pluck(:username)).to eq([
+ group_1.full_path, subgroup.full_path, group_2.full_path
+ ])
+ end
+ end
end
context 'when `disable_all_mention` FF is enabled' do
diff --git a/spec/services/projects/record_target_platforms_service_spec.rb b/spec/services/projects/record_target_platforms_service_spec.rb
index 7c6907c7a95..bf87b763341 100644
--- a/spec/services/projects/record_target_platforms_service_spec.rb
+++ b/spec/services/projects/record_target_platforms_service_spec.rb
@@ -51,52 +51,6 @@ RSpec.describe Projects::RecordTargetPlatformsService, '#execute', feature_categ
end
end
end
-
- describe 'Build iOS guide email experiment' do
- shared_examples 'tracks experiment assignment event' do
- it 'tracks the assignment event', :experiment do
- expect(experiment(:build_ios_app_guide_email))
- .to track(:assignment)
- .with_context(project: project)
- .on_next_instance
-
- execute
- end
- end
-
- context 'experiment candidate' do
- before do
- stub_experiments(build_ios_app_guide_email: :candidate)
- end
-
- it 'executes a Projects::InProductMarketingCampaignEmailsService' do
- service_double = instance_double(Projects::InProductMarketingCampaignEmailsService, execute: true)
-
- expect(Projects::InProductMarketingCampaignEmailsService)
- .to receive(:new).with(project, Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
- .and_return service_double
- expect(service_double).to receive(:execute)
-
- execute
- end
-
- it_behaves_like 'tracks experiment assignment event'
- end
-
- context 'experiment control' do
- before do
- stub_experiments(build_ios_app_guide_email: :control)
- end
-
- it 'does not execute a Projects::InProductMarketingCampaignEmailsService' do
- expect(Projects::InProductMarketingCampaignEmailsService).not_to receive(:new)
-
- execute
- end
-
- it_behaves_like 'tracks experiment assignment event'
- end
- end
end
context 'when project is not an XCode project' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 1ddf6168c07..22264819e3b 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -425,28 +425,6 @@ RSpec.describe Projects::TransferService, feature_category: :groups_and_projects
end
end
- context 'namespace which contains orphan repository with same projects path name' do
- let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(group.full_path, "#{project.path}.git"), nil, nil) }
-
- before do
- group.add_owner(user)
-
- raw_fake_repo.create_repository
- end
-
- after do
- raw_fake_repo.remove
- end
-
- it 'does not allow the project transfer' do
- transfer_result = execute_transfer
-
- expect(transfer_result).to eq false
- expect(project.namespace).to eq(user.namespace)
- expect(project.errors[:new_namespace]).to include('Cannot move project')
- end
- end
-
context 'target namespace containing the same project name' do
before do
group.add_owner(user)
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6c767876d05..0ad7693a047 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -22,6 +22,20 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
subject(:service) { described_class.new(project, build) }
+ RSpec.shared_examples 'old deployments' do
+ it 'deactivates old deployments from the same project with the same path prefix', :freeze_time do
+ other_project = create(:pages_deployment)
+ same_project_other_path_prefix = create(:pages_deployment, project: project, path_prefix: 'other')
+ same_project = create(:pages_deployment, project: project)
+
+ expect { expect(service.execute[:status]).to eq(:success) }
+ .to not_change { other_project.reload.deleted_at }
+ .and not_change { same_project_other_path_prefix.reload.deleted_at }
+ .and change { same_project.reload.deleted_at }
+ .from(nil).to(described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY.from_now)
+ end
+ end
+
RSpec.shared_examples 'pages size limit is' do |size_limit|
context "when size is below the limit" do
before do
@@ -36,6 +50,8 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(deploy_status.description).not_to be_present
expect(project.pages_metadatum).to be_deployed
end
+
+ it_behaves_like 'old deployments'
end
context "when size is above the limit" do
@@ -95,6 +111,8 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
build.reload
end
+ it_behaves_like 'old deployments'
+
it "doesn't delete artifacts after deploying" do
expect(service.execute[:status]).to eq(:success)
@@ -146,31 +164,6 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
end
- context 'when there is an old pages deployment' do
- let!(:old_deployment_from_another_project) { create(:pages_deployment) }
- let!(:old_deployment) { create(:pages_deployment, project: project) }
-
- it 'schedules a destruction of older deployments' do
- expect(DestroyPagesDeploymentsWorker).to(
- receive(:perform_in).with(
- described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
- project.id,
- instance_of(Integer)
- )
- )
-
- service.execute
- end
-
- it 'removes older deployments', :sidekiq_inline do
- expect do
- service.execute
- end.not_to change { PagesDeployment.count } # it creates one and deletes one
-
- expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
- end
- end
-
context 'when archive does not have pages directory' do
let(:file) { empty_file }
let(:metadata_filename) { empty_metadata_filename }
@@ -291,20 +284,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
expect(deployment.ci_build_id).to eq(build.id)
end
- context 'when old deployment present' do
- let!(:old_build) { create(:ci_build, name: 'pages', pipeline: old_pipeline, ref: 'HEAD') }
- let!(:old_deployment) { create(:pages_deployment, ci_build: old_build, project: project) }
-
- before do
- project.update_pages_deployment!(old_deployment)
- end
-
- it 'deactivates old deployments' do
- expect(service.execute[:status]).to eq(:success)
-
- expect(old_deployment.reload.deleted_at).not_to be_nil
- end
- end
+ it_behaves_like 'old deployments'
context 'when newer deployment present' do
before do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index b30c1d30044..d173d23a1d6 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -103,6 +103,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
+ expect(project_repository_double).to receive(:remove)
expect do
subject.execute
@@ -140,10 +141,11 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
+ expect(project_repository_double).to receive(:remove)
expect do
subject.execute
- end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify project repository checksum/)
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify project repository checksum/)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
@@ -316,10 +318,14 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
context 'when object pool checksum does not match' do
let(:new_object_pool_checksum) { 'not_match' }
- it 'raises an error and does not change state' do
+ it 'raises an error and removes the new object pool repository' do
+ expect(object_pool_repository_double).to receive(:remove)
+
original_count = PoolRepository.count
- expect { subject.execute }.to raise_error(UpdateRepositoryStorageMethods::Error)
+ expect do
+ subject.execute
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify object_pool repository/)
project.reload
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 195cfe78b3f..7ab85d8253a 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -356,7 +356,7 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
context 'when changes project features' do
# Using some sample features for testing.
# Not using all the features because some of them must be enabled/disabled together
- %w[issues wiki forking].each do |feature_name|
+ %w[issues wiki forking model_experiments].each do |feature_name|
context "with feature_name:#{feature_name}" do
let(:feature) { "#{feature_name}_access_level" }
let(:params) do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 5e7fb8397e3..2c34d6a59be 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -2478,6 +2478,26 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
it_behaves_like 'quick actions that change work item type'
+
+ context '/set_parent command' do
+ let_it_be(:parent) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:parent_ref) { parent.to_reference(project) }
+
+ let(:content) { "/set_parent #{parent_ref}" }
+
+ it 'returns success message' do
+ _, _, message = service.execute(content, work_item)
+
+ expect(message).to eq('Work item parent set successfully')
+ end
+
+ it 'sets correct update params' do
+ _, updates, _ = service.execute(content, work_item)
+
+ expect(updates).to eq(set_parent: parent)
+ end
+ end
end
describe '#explain' do
@@ -3022,6 +3042,104 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
end
+
+ describe '/set_parent command' do
+ let_it_be(:parent) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:parent_ref) { parent.to_reference(project) }
+
+ let(:command) { "/set_parent #{parent_ref}" }
+
+ shared_examples 'command is available' do
+ it 'explanation contains correct message' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations)
+ .to contain_exactly("Change work item's parent to #{parent_ref}.")
+ end
+
+ it 'contains command' do
+ expect(service.available_commands(work_item)).to include(a_hash_including(name: :set_parent))
+ end
+ end
+
+ shared_examples 'command is not available' do
+ it 'explanation is empty' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations).to eq([])
+ end
+
+ it 'does not contain command' do
+ expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :set_parent))
+ end
+ end
+
+ context 'when user can admin link' do
+ it_behaves_like 'command is available'
+
+ context 'when work item type does not support a parent' do
+ let_it_be(:work_item) { build(:work_item, :incident, project: project) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ context 'when user cannot admin link' do
+ subject(:service) { described_class.new(project, create(:user)) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ describe '/add_child command' do
+ let_it_be(:child) { create(:work_item, :issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, :objective, project: project) }
+ let_it_be(:child_ref) { child.to_reference(project) }
+
+ let(:command) { "/add_child #{child_ref}" }
+
+ shared_examples 'command is available' do
+ it 'explanation contains correct message' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations)
+ .to contain_exactly("Add #{child_ref} to this work item as child(ren).")
+ end
+
+ it 'contains command' do
+ expect(service.available_commands(work_item)).to include(a_hash_including(name: :add_child))
+ end
+ end
+
+ shared_examples 'command is not available' do
+ it 'explanation is empty' do
+ _, explanations = service.explain(command, work_item)
+
+ expect(explanations).to eq([])
+ end
+
+ it 'does not contain command' do
+ expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :add_child))
+ end
+ end
+
+ context 'when user can admin link' do
+ it_behaves_like 'command is available'
+
+ context 'when work item type does not support children' do
+ let_it_be(:work_item) { build(:work_item, :key_result, project: project) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
+
+ context 'when user cannot admin link' do
+ subject(:service) { described_class.new(project, create(:user)) }
+
+ it_behaves_like 'command is not available'
+ end
+ end
end
describe '#available_commands' do
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index 2b6e96a781e..de3ce2b6206 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -83,5 +83,11 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
expect(milestone.reload).to be_persisted
end
end
+
+ it 'executes hooks' do
+ expect(service.release).to receive(:execute_hooks).with('delete')
+
+ service.execute
+ end
end
end
diff --git a/spec/services/repositories/replicate_service_spec.rb b/spec/services/repositories/replicate_service_spec.rb
new file mode 100644
index 00000000000..b4fbc478d2f
--- /dev/null
+++ b/spec/services/repositories/replicate_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::ReplicateService, feature_category: :source_code_management do
+ let(:new_checksum) { 'match' }
+ let(:repository) { instance_double('Gitlab::Git::Repository', checksum: 'match') }
+ let(:new_repository) { instance_double('Gitlab::Git::Repository', checksum: new_checksum) }
+
+ subject { described_class.new(repository) }
+
+ it 'replicates repository' do
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).not_to receive(:remove)
+
+ expect { subject.execute(new_repository, :project) }.not_to raise_error
+ end
+
+ context 'when checksum does not match' do
+ let(:new_checksum) { 'does not match' }
+
+ it 'raises an error and removes new repository' do
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).to receive(:remove)
+
+ expect do
+ subject.execute(new_repository, :project)
+ end.to raise_error(described_class::Error, /Failed to verify project repository/)
+ end
+ end
+
+ context 'when an error is raised during checksum calculation' do
+ it 'raises the error and removes new repository' do
+ error = StandardError.new
+
+ expect(new_repository).to receive(:replicate).with(repository)
+ expect(new_repository).to receive(:checksum).and_raise(error)
+ expect(new_repository).to receive(:remove)
+
+ expect do
+ subject.execute(new_repository, :project)
+ end.to raise_error(error)
+ end
+ end
+end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 28b345f8191..89974360154 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
index ace9847185e..29898e3ab09 100644
--- a/spec/services/snippets/destroy_service_spec.rb
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -70,7 +70,6 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
it 'does not schedule anything and return success' do
allow(snippet).to receive(:repository).and_return(nil)
- expect(GitlabShellWorker).not_to receive(:perform_in)
expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
@@ -151,7 +150,6 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
expect(snippet.repository).not_to be_nil
expect(snippet.repository.exists?).to be_falsey
- expect(GitlabShellWorker).not_to receive(:perform_in)
expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index c417fbfd8b1..66847a43335 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
expect(snippet_repository_double).to receive(:replicate)
.with(snippet.repository.raw)
.and_raise(Gitlab::Git::CommandError)
+ expect(snippet_repository_double).to receive(:remove)
expect do
subject.execute
@@ -101,10 +102,11 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
.with(snippet.repository.raw)
expect(snippet_repository_double).to receive(:checksum)
.and_return('not matching checksum')
+ expect(snippet_repository_double).to receive(:remove)
expect do
subject.execute
- end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq('default')
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 70f43d82ead..361742699b0 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
let(:check_for_spam) { true }
- let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:user) { create(:user) }
let_it_be(:issue) { create(:issue, author: user) }
let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
@@ -136,15 +136,9 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
end
- context 'if allow_possible_spam user custom attribute is set' do
+ context 'if user is trusted to create possible spam' do
before do
- UserCustomAttribute.upsert_custom_attributes(
- [{
- user_id: user.id,
- key: 'allow_possible_spam',
- value: 'does not matter'
- }]
- )
+ user.custom_attributes.create!(key: 'trusted_by', value: 'does not matter')
end
context 'and a service returns a verdict that should be overridden' do
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 4a795f2db20..bcca1ed0b23 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -15,17 +15,34 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
let(:service) { described_class.new(noteable: noteable, project: project, author: author) }
describe '#relate_issuable' do
- let(:noteable_ref) { create(:issue) }
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
- subject { service.relate_issuable(noteable_ref) }
+ let(:noteable_ref) { issue1 }
- it_behaves_like 'a system note' do
- let(:action) { 'relate' }
- end
+ subject(:system_note) { service.relate_issuable(noteable_ref) }
context 'when issue marks another as related' do
+ it_behaves_like 'a system note' do
+ let(:action) { 'relate' }
+ end
+
it 'sets the note text' do
- expect(subject.note).to eq "marked this issue as related to #{noteable_ref.to_reference(project)}"
+ expect(system_note.note).to eq "marked this issue as related to #{issue1.to_reference(project)}"
+ end
+ end
+
+ context 'when issue marks several other issues as related' do
+ let(:noteable_ref) { [issue1, issue2] }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'relate' }
+ end
+
+ it 'sets the note text' do
+ expect(system_note.note).to eq(
+ "marked this issue as related to #{issue1.to_reference(project)} and #{issue2.to_reference(project)}"
+ )
end
end
@@ -695,7 +712,7 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
let(:user) { author }
let(:namespace) { project.namespace }
end
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index 52b99a6976d..3242ae9e533 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
@@ -232,7 +232,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
@@ -364,7 +364,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
let(:user) { author }
let(:namespace) { project.namespace }
end
diff --git a/spec/services/tasks_to_be_done/base_service_spec.rb b/spec/services/tasks_to_be_done/base_service_spec.rb
deleted file mode 100644
index 32b07cab095..00000000000
--- a/spec/services/tasks_to_be_done/base_service_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe TasksToBeDone::BaseService, feature_category: :team_planning do
- let_it_be(:project) { create(:project) }
- let_it_be(:current_user) { create(:user) }
- let_it_be(:assignee_one) { create(:user) }
- let_it_be(:assignee_two) { create(:user) }
- let_it_be(:assignee_ids) { [assignee_one.id] }
- let_it_be(:label) { create(:label, title: 'tasks to be done:ci', project: project) }
-
- before do
- project.add_maintainer(current_user)
- project.add_developer(assignee_one)
- project.add_developer(assignee_two)
- end
-
- subject(:service) do
- TasksToBeDone::CreateCiTaskService.new(
- container: project,
- current_user: current_user,
- assignee_ids: assignee_ids
- )
- end
-
- context 'no existing task issue', :aggregate_failures do
- it 'creates an issue' do
- params = {
- assignee_ids: assignee_ids,
- title: 'Set up CI/CD',
- description: anything,
- add_labels: label.title
- }
-
- expect(Issues::CreateService)
- .to receive(:new)
- .with(container: project, current_user: current_user, params: params, perform_spam_check: false)
- .and_call_original
-
- expect { service.execute }.to change(Issue, :count).by(1)
-
- expect(project.issues.last).to have_attributes(
- author: current_user,
- title: params[:title],
- assignees: [assignee_one],
- labels: [label]
- )
- end
- end
-
- context 'an open issue with the same label already exists', :aggregate_failures do
- let_it_be(:assignee_ids) { [assignee_two.id] }
-
- it 'assigns the user to the existing issue' do
- issue = create(:labeled_issue, project: project, labels: [label], assignees: [assignee_one])
- params = { add_assignee_ids: assignee_ids }
-
- expect(Issues::UpdateService)
- .to receive(:new)
- .with(container: project, current_user: current_user, params: params)
- .and_call_original
-
- expect { service.execute }.not_to change(Issue, :count)
-
- expect(issue.reload.assignees).to match_array([assignee_one, assignee_two])
- end
- end
-end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 0888c27aab2..0b4cf9e53db 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe TodoService, feature_category: :team_planning do
include AfterNextHelpers
+ let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
@@ -31,11 +32,18 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
shared_examples 'reassigned target' do
+ let(:additional_todo_attributes) { {} }
+
it 'creates a pending todo for new assignee' do
target_unassigned.assignees = [john_doe]
service.send(described_method, target_unassigned, author)
- should_create_todo(user: john_doe, target: target_unassigned, action: Todo::ASSIGNED)
+ should_create_todo(
+ user: john_doe,
+ target: target_unassigned,
+ action: Todo::ASSIGNED,
+ **additional_todo_attributes
+ )
end
it 'does not create a todo if unassigned' do
@@ -48,7 +56,13 @@ RSpec.describe TodoService, feature_category: :team_planning do
target_assigned.assignees = [john_doe]
service.send(described_method, target_assigned, john_doe)
- should_create_todo(user: john_doe, target: target_assigned, author: john_doe, action: Todo::ASSIGNED)
+ should_create_todo(
+ user: john_doe,
+ target: target_assigned,
+ author: john_doe,
+ action: Todo::ASSIGNED,
+ **additional_todo_attributes
+ )
end
it 'does not create a todo for guests' do
@@ -657,11 +671,27 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
describe '#mark_todo' do
- it 'creates a todo from a issue' do
+ it 'creates a todo from an issue' do
service.mark_todo(unassigned_issue, author)
should_create_todo(user: author, target: unassigned_issue, action: Todo::MARKED)
end
+
+ context 'when issue belongs to a group' do
+ it 'creates a todo from an issue' do
+ group_issue = create(:issue, :group_level, namespace: group)
+ service.mark_todo(group_issue, group_issue.author)
+
+ should_create_todo(
+ user: group_issue.author,
+ author: group_issue.author,
+ target: group_issue,
+ action: Todo::MARKED,
+ project: nil,
+ group: group
+ )
+ end
+ end
end
describe '#todo_exists?' do
@@ -726,6 +756,22 @@ RSpec.describe TodoService, feature_category: :team_planning do
should_create_todo(user: author, target: work_item, action: Todo::MARKED)
end
+
+ context 'when work item belongs to a group' do
+ it 'creates a todo from a work item' do
+ group_work_item = create(:work_item, :group_level, namespace: group)
+ service.mark_todo(group_work_item, group_work_item.author)
+
+ should_create_todo(
+ user: group_work_item.author,
+ author: group_work_item.author,
+ target: group_work_item,
+ action: Todo::MARKED,
+ project: nil,
+ group: group
+ )
+ end
+ end
end
describe '#todo_exists?' do
@@ -779,7 +825,7 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
end
- context 'assignable is an issue' do
+ context 'assignable is a project level issue' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
@@ -787,6 +833,32 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
end
+ context 'assignable is a project level work_item' do
+ it_behaves_like 'reassigned target' do
+ let(:target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:work_item, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:work_item, project: project, author: author, assignees: []) }
+ end
+ end
+
+ context 'assignable is a group level issue' do
+ it_behaves_like 'reassigned target' do
+ let(:additional_todo_attributes) { { project: nil, group: group } }
+ let(:target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:issue, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:issue, :group_level, namespace: group, author: author, assignees: []) }
+ end
+ end
+
+ context 'assignable is a group level work item' do
+ it_behaves_like 'reassigned target' do
+ let(:additional_todo_attributes) { { project: nil, group: group } }
+ let(:target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
+ let(:addressed_target_assigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
+ let(:target_unassigned) { create(:work_item, :group_level, namespace: group, author: author, assignees: []) }
+ end
+ end
+
context 'assignable is an alert' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
diff --git a/spec/services/update_container_registry_info_service_spec.rb b/spec/services/update_container_registry_info_service_spec.rb
index 416b08bd04b..b21e3f4bd13 100644
--- a/spec/services/update_container_registry_info_service_spec.rb
+++ b/spec/services/update_container_registry_info_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
it 'uses a token with no access permissions' do
expect(Auth::ContainerRegistryAuthenticationService)
- .to receive(:access_token).with([], []).and_return(token)
+ .to receive(:access_token).with({}).and_return(token)
expect(ContainerRegistry::Client)
.to receive(:new).with(api_url, token: token).and_return(client)
@@ -72,13 +72,14 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_vendor).to be_blank
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
+ expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
context 'when able to detect the container registry type' do
context 'when using the GitLab container registry' do
it 'updates application settings accordingly' do
- stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c])
+ stub_registry_info(vendor: 'gitlab', version: '2.9.1-gitlab', features: %w[a b c], db_enabled: true)
stub_supports_gitlab_api(true)
subject
@@ -88,12 +89,13 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_version).to eq('2.9.1-gitlab')
expect(application_settings.container_registry_features)
.to match_array(%W[a b c #{ContainerRegistry::GitlabApiClient::REGISTRY_GITLAB_V1_API_FEATURE}])
+ expect(application_settings.container_registry_db_enabled).to be_truthy
end
end
context 'when using a third-party container registry' do
it 'updates application settings accordingly' do
- stub_registry_info(vendor: 'other', version: nil, features: nil)
+ stub_registry_info(vendor: 'other', version: nil, features: nil, db_enabled: false)
stub_supports_gitlab_api(false)
subject
@@ -102,6 +104,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
expect(application_settings.container_registry_vendor).to eq('other')
expect(application_settings.container_registry_version).to be_blank
expect(application_settings.container_registry_features).to eq([])
+ expect(application_settings.container_registry_db_enabled).to be_falsey
end
end
end
@@ -109,7 +112,7 @@ RSpec.describe UpdateContainerRegistryInfoService, feature_category: :container_
def stub_access_token
allow(Auth::ContainerRegistryAuthenticationService)
- .to receive(:access_token).with([], []).and_return('foo')
+ .to receive(:access_token).with({}).and_return('foo')
end
def stub_registry_info(output)
diff --git a/spec/services/users/auto_ban_service_spec.rb b/spec/services/users/auto_ban_service_spec.rb
new file mode 100644
index 00000000000..b989cec6a9d
--- /dev/null
+++ b/spec/services/users/auto_ban_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::AutoBanService, feature_category: :instance_resiliency do
+ let_it_be_with_reload(:user) { create(:user) }
+ let(:reason) { :auto_ban_reason }
+
+ context 'when auto banning a user', :aggregate_failures do
+ subject(:auto_ban_user) { described_class.new(user: user, reason: reason).execute }
+
+ context 'when successful' do
+ it 'returns success status' do
+ response = auto_ban_user
+
+ expect(response[:status]).to eq(:success)
+ end
+
+ it 'bans the user' do
+ expect { auto_ban_user }.to change { user.state }.from('active').to('banned')
+ end
+
+ it 'creates a BannedUser' do
+ expect { auto_ban_user }.to change { Users::BannedUser.count }.by(1)
+ expect(Users::BannedUser.last.user_id).to eq(user.id)
+ end
+
+ describe 'recording a custom attribute' do
+ it 'records a custom attribute' do
+ expect { auto_ban_user }.to change { UserCustomAttribute.count }.by(1)
+ expect(user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY).first.value).to eq(reason.to_s)
+ end
+ end
+ end
+
+ context 'when failed' do
+ context 'when user is blocked' do
+ before do
+ user.block!
+ end
+
+ it 'returns state error message' do
+ response = auto_ban_user
+
+ expect(response[:status]).to eq(:error)
+ expect(response[:message]).to match('State cannot transition via \"ban\"')
+ end
+
+ it 'does not modify the BannedUser record or user state' do
+ expect { auto_ban_user }.not_to change { Users::BannedUser.count }
+ expect { auto_ban_user }.not_to change { user.state }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/in_product_marketing_email_records_spec.rb b/spec/services/users/in_product_marketing_email_records_spec.rb
index 059f0890b53..d214560b2a6 100644
--- a/spec/services/users/in_product_marketing_email_records_spec.rb
+++ b/spec/services/users/in_product_marketing_email_records_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Users::InProductMarketingEmailRecords, feature_category: :onboard
records.add(user, track: :team_short, series: 0)
records.add(user, track: :create, series: 1)
- records.add(user, campaign: Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
end
it 'bulk inserts added records' do
@@ -36,30 +35,20 @@ RSpec.describe Users::InProductMarketingEmailRecords, feature_category: :onboard
freeze_time do
records.add(user, track: :team_short, series: 0)
records.add(user, track: :create, series: 1)
- records.add(user, campaign: Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE)
- first, second, third = records.records
+ first, second = records.records
expect(first).to be_a Users::InProductMarketingEmail
- expect(first.campaign).to be_nil
expect(first.track.to_sym).to eq :team_short
expect(first.series).to eq 0
expect(first.created_at).to eq Time.zone.now
expect(first.updated_at).to eq Time.zone.now
expect(second).to be_a Users::InProductMarketingEmail
- expect(second.campaign).to be_nil
expect(second.track.to_sym).to eq :create
expect(second.series).to eq 1
expect(second.created_at).to eq Time.zone.now
expect(second.updated_at).to eq Time.zone.now
-
- expect(third).to be_a Users::InProductMarketingEmail
- expect(third.campaign).to eq Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE
- expect(third.track).to be_nil
- expect(third.series).to be_nil
- expect(third.created_at).to eq Time.zone.now
- expect(third.updated_at).to eq Time.zone.now
end
end
end
diff --git a/spec/services/users/signup_service_spec.rb b/spec/services/users/signup_service_spec.rb
deleted file mode 100644
index 29663411346..00000000000
--- a/spec/services/users/signup_service_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::SignupService, feature_category: :system_access do
- let(:user) { create(:user, setup_for_company: true) }
-
- describe '#execute' do
- context 'when updating name' do
- it 'updates the name attribute' do
- result = update_user(user, name: 'New Name')
-
- expect(result.success?).to be(true)
- expect(user.reload.name).to eq('New Name')
- end
-
- it 'returns an error result when name is missing' do
- result = update_user(user, name: '')
-
- expect(user.reload.name).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to include("Name can't be blank")
- end
- end
-
- context 'when updating role' do
- it 'updates the role attribute' do
- result = update_user(user, role: 'development_team_lead')
-
- expect(result.success?).to be(true)
- expect(user.reload.role).to eq('development_team_lead')
- end
-
- it 'returns an error result when role is missing' do
- result = update_user(user, role: '')
-
- expect(user.reload.role).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to eq("Role can't be blank")
- end
- end
-
- context 'when updating setup_for_company' do
- it 'updates the setup_for_company attribute' do
- result = update_user(user, setup_for_company: 'false')
-
- expect(result.success?).to be(true)
- expect(user.reload.setup_for_company).to be(false)
- end
-
- context 'when on SaaS', :saas do
- it 'returns an error result when setup_for_company is missing' do
- result = update_user(user, setup_for_company: '')
-
- expect(user.reload.setup_for_company).not_to be_blank
- expect(result.success?).to be(false)
- expect(result.message).to eq("Setup for company can't be blank")
- end
- end
-
- context 'when not on .com' do
- it 'returns success when setup_for_company is blank' do
- result = update_user(user, setup_for_company: '')
-
- expect(result.success?).to be(true)
- expect(user.reload.setup_for_company).to be(nil)
- end
- end
- end
-
- def update_user(user, opts)
- described_class.new(user, opts).execute
- end
- end
-end
diff --git a/spec/services/users/allow_possible_spam_service_spec.rb b/spec/services/users/trust_service_spec.rb
index 53618f0c8e9..1f71992ce9b 100644
--- a/spec/services/users/allow_possible_spam_service_spec.rb
+++ b/spec/services/users/trust_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::AllowPossibleSpamService, feature_category: :user_management do
+RSpec.describe Users::TrustService, feature_category: :user_management do
let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -18,7 +18,7 @@ RSpec.describe Users::AllowPossibleSpamService, feature_category: :user_manageme
operation
user.reload
- expect(user.custom_attributes.by_key(UserCustomAttribute::ALLOW_POSSIBLE_SPAM)).to be_present
+ expect(user.custom_attributes.by_key(UserCustomAttribute::TRUSTED_BY)).to be_present
end
end
end
diff --git a/spec/services/users/disallow_possible_spam_service_spec.rb b/spec/services/users/untrust_service_spec.rb
index 32a47e05525..054cb9b82dc 100644
--- a/spec/services/users/disallow_possible_spam_service_spec.rb
+++ b/spec/services/users/untrust_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::DisallowPossibleSpamService, feature_category: :user_management do
+RSpec.describe Users::UntrustService, feature_category: :user_management do
let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -16,19 +16,19 @@ RSpec.describe Users::DisallowPossibleSpamService, feature_category: :user_manag
UserCustomAttribute.upsert_custom_attributes(
[{
user_id: user.id,
- key: :allow_possible_spam,
+ key: UserCustomAttribute::TRUSTED_BY,
value: 'not important'
}]
)
end
it 'updates the custom attributes', :aggregate_failures do
- expect(user.custom_attributes.by_key(UserCustomAttribute::ALLOW_POSSIBLE_SPAM)).to be_present
+ expect(user.trusted_with_spam_attribute).to be_present
operation
user.reload
- expect(user.custom_attributes).to be_empty
+ expect(user.trusted_with_spam_attribute).to be nil
end
end
end
diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb
index d66d584d3d0..9f6e37ec10d 100644
--- a/spec/services/verify_pages_domain_service_spec.rb
+++ b/spec/services/verify_pages_domain_service_spec.rb
@@ -312,20 +312,4 @@ RSpec.describe VerifyPagesDomainService, feature_category: :pages do
def disallow_resolver!
expect(Resolv::DNS).not_to receive(:open)
end
-
- def stub_resolver(stubbed_lookups = {})
- resolver = instance_double('Resolv::DNS')
- allow(resolver).to receive(:timeouts=)
-
- expect(Resolv::DNS).to receive(:open).and_yield(resolver)
-
- allow(resolver).to receive(:getresources) { [] }
- stubbed_lookups.each do |domain, records|
- records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) }
- # Append '.' to domain_name, indicating absolute FQDN
- allow(resolver).to receive(:getresources).with(domain + '.', Resolv::DNS::Resource::IN::TXT) { records }
- end
-
- resolver
- end
end
diff --git a/spec/services/vs_code/settings/create_or_update_service_spec.rb b/spec/services/vs_code/settings/create_or_update_service_spec.rb
new file mode 100644
index 00000000000..aab8b2c95c6
--- /dev/null
+++ b/spec/services/vs_code/settings/create_or_update_service_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VsCode::Settings::CreateOrUpdateService, feature_category: :web_ide do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:opts) do
+ {
+ setting_type: "settings",
+ content: '{ "editor.fontSize": 12 }'
+ }
+ end
+
+ subject { described_class.new(current_user: user, params: opts).execute }
+
+ context 'when setting_type is machines' do
+ it 'returns default machine as a successful response' do
+ opts = { setting_type: "machines", machines: '[]' }
+ result = described_class.new(current_user: user, params: opts).execute
+
+ expect(result.payload).to eq(VsCode::Settings::DEFAULT_MACHINE)
+ end
+ end
+
+ it 'creates a new record when a record with the setting does not exist' do
+ expect { subject }.to change { User.find(user.id).vscode_settings.count }.from(0).to(1)
+ record = User.find(user.id).vscode_settings.by_setting_type('settings').first
+ expect(record.content).to eq('{ "editor.fontSize": 12 }')
+ end
+
+ it 'updates the existing record if setting exists' do
+ setting = create(:vscode_setting, user: user)
+
+ expect { subject }.to change {
+ VsCode::Settings::VsCodeSetting.find(setting.id).content
+ }.from(setting.content).to(opts[:content])
+ end
+
+ it 'fails if an invalid value is passed' do
+ invalid_opts = { setting_type: nil, content: nil }
+ result = described_class.new(current_user: user, params: invalid_opts).execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 259f5156d42..89346353db2 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -171,6 +171,23 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
end
end
+ context 'with SystemHook' do
+ let_it_be(:system_hook) { create(:system_hook) }
+ let(:service_instance) { described_class.new(system_hook, data, :push_hooks) }
+
+ before do
+ stub_full_request(system_hook.url, method: :post)
+ end
+
+ it 'POSTs to the webhook URL with correct headers' do
+ service_instance.execute
+
+ expect(WebMock).to have_requested(:post, stubbed_hostname(system_hook.url)).with(
+ headers: headers.merge({ 'X-Gitlab-Event' => 'System Hook' })
+ ).once
+ end
+ end
+
it 'POSTs the data as JSON and returns expected headers' do
stub_full_request(project_hook.url, method: :post)
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 41ae6398614..1ff48f4e269 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol
it 'returns error status' do
error = "#{issue.to_reference} cannot be added: is not allowed to add this type of parent. " \
- "#{other_project_task.to_reference} cannot be added: parent must be in the same project as child."
+ "#{other_project_task.to_reference} cannot be added: parent must be in the same project or group as child."
is_expected.to eq(service_error(error, http_status: 422))
end
diff --git a/spec/services/work_items/related_work_item_links/create_service_spec.rb b/spec/services/work_items/related_work_item_links/create_service_spec.rb
index 992beb705aa..62d60280902 100644
--- a/spec/services/work_items/related_work_item_links/create_service_spec.rb
+++ b/spec/services/work_items/related_work_item_links/create_service_spec.rb
@@ -28,7 +28,8 @@ RSpec.describe WorkItems::RelatedWorkItemLinks::CreateService, feature_category:
it_behaves_like 'issuable link creation', use_references: false do
let(:response_keys) { [:status, :created_references, :message] }
- let(:already_assigned_error_msg) { "Work items are already linked" }
+ let(:async_notes) { true }
+ let(:already_assigned_error_msg) { "Items are already linked" }
let(:no_found_error_msg) do
'No matching work item found. Make sure you are adding a valid ID and you have access to the item.'
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 38e5d4dc153..557617f61bb 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
end
it_behaves_like 'internal event tracking' do
- let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
let(:user) { current_user }
let(:namespace) { project.namespace }
subject(:service_action) { update_work_item[:status] }
diff --git a/spec/services/work_items/widgets/labels_service/update_service_spec.rb b/spec/services/work_items/widgets/labels_service/update_service_spec.rb
index 17daec2b1ea..43d9d46a268 100644
--- a/spec/services/work_items/widgets/labels_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/labels_service/update_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe WorkItems::Widgets::LabelsService::UpdateService, feature_categor
let_it_be(:label1) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let_it_be(:label3) { create(:label, project: project) }
- let_it_be(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user).tap { |user| project.add_reporter(user) } }
let(:work_item) { create(:work_item, project: project, labels: [label1, label2]) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Labels) } }
@@ -26,6 +26,14 @@ RSpec.describe WorkItems::Widgets::LabelsService::UpdateService, feature_categor
}
)
end
+
+ context "and user doesn't have permissions to update labels" do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'removes label params' do
+ expect(service.prepare_update_params(params: params)).to be_nil
+ end
+ end
end
context 'when widget does not exist in new type' do
diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
index 0196e7c2b02..f9708afd313 100644
--- a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, feature_category: :portfolio_management do
- let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::StartAndDueDate) } }
@@ -26,6 +26,14 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
change(work_item, :due_date).from(nil).to(due_date)
)
end
+
+ context "and user doesn't have permissions to update start and due date" do
+ let_it_be(:user) { create(:user) }
+
+ it 'removes start and due date params params' do
+ expect(update_params).to be_nil
+ end
+ end
end
context 'when date params are not present' do
diff --git a/spec/sidekiq/cron/job_gem_dependency_spec.rb b/spec/sidekiq/cron/job_gem_dependency_spec.rb
index 38c658feba6..c6103ed6231 100644
--- a/spec/sidekiq/cron/job_gem_dependency_spec.rb
+++ b/spec/sidekiq/cron/job_gem_dependency_spec.rb
@@ -6,10 +6,11 @@ RSpec.describe Sidekiq::Cron::Job do
describe 'cron jobs' do
context 'when Fugit depends on ZoTime or EoTime' do
before do
- described_class
- .create(name: 'TestCronWorker', # rubocop:disable Rails/SaveBang
- cron: Settings.cron_jobs[:pipeline_schedule_worker]['cron'],
- class: Settings.cron_jobs[:pipeline_schedule_worker]['job_class'])
+ described_class.create( # rubocop:disable Rails/SaveBang
+ name: 'TestCronWorker',
+ cron: Settings.cron_jobs[:pipeline_schedule_worker]['cron'],
+ class: Settings.cron_jobs[:pipeline_schedule_worker]['job_class']
+ )
end
it 'does not get any errors' do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index f53e930f529..02db905b8b1 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -145,12 +145,6 @@ RSpec.configure do |config|
metadata[:schema] = :latest if metadata[:level] == :background_migration
end
- # Do not overwrite type if it's already set
- unless metadata.key?(:type)
- match = location.match(%r{/spec/([^/]+)/})
- metadata[:type] = match[1].singularize.to_sym if match
- end
-
# Admin controller specs get auto admin mode enabled since they are
# protected by the 'EnforcesAdminAuthentication' concern
metadata[:enable_admin_mode] = true if %r{(ee)?/spec/controllers/admin/}.match?(location)
@@ -214,10 +208,12 @@ RSpec.configure do |config|
config.include Capybara::RSpecMatchers, type: :request
config.include PendingDirectUploadHelpers, :direct_uploads
config.include LabelsHelper, type: :feature
+ config.include UnlockPipelinesHelpers, :unlock_pipelines
config.include_context 'when rendered has no HTML escapes', type: :view
include StubFeatureFlags
+ include StubSaasFeatures
include StubSnowplow
include StubMember
@@ -329,10 +325,6 @@ RSpec.configure do |config|
stub_feature_flags(disable_anonymous_project_search: false)
stub_feature_flags(disable_cancel_redundant_pipelines_service: false)
- # Specs should not get a CAPTCHA challenge by default, this makes the sign-in flow simpler in
- # most cases. We do test the CAPTCHA flow in the appropriate specs.
- stub_feature_flags(arkose_labs_login_challenge: false)
-
# Specs should not require email verification by default, this makes the sign-in flow simpler in
# most cases. We do test the email verification flow in the appropriate specs.
stub_feature_flags(require_email_verification: false)
diff --git a/spec/support/ability_check_todo.yml b/spec/support/ability_check_todo.yml
index eafd595b137..a317f49ea94 100644
--- a/spec/support/ability_check_todo.yml
+++ b/spec/support/ability_check_todo.yml
@@ -66,8 +66,6 @@ ProjectPolicy:
- create_test_case
- read_group_saml_identity
UserPolicy:
-- admin_observability
- admin_terraform_state
-- read_observability
# Permanent excludes (please provide a reason):
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 65abbe12621..78d7e57c208 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -51,15 +51,6 @@ Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
file.close! # We just want the filename
TestEnv.with_workhorse(host, port, socket_path) do
- # In cases of multiple installations of chromedriver, prioritize the version installed by SeleniumManager
- # selenium-manager doesn't work with Linux arm64 yet:
- # https://github.com/SeleniumHQ/selenium/issues/11357
- if RUBY_PLATFORM.include?('x86_64-linux') || RUBY_PLATFORM.include?('darwin')
- chrome_options = Selenium::WebDriver::Chrome::Options.chrome
- chromedriver_path = File.dirname(Selenium::WebDriver::SeleniumManager.driver_path(chrome_options))
- ENV['PATH'] = "#{chromedriver_path}:#{ENV['PATH']}" # rubocop:disable RSpec/EnvAssignment
- end
-
Capybara.servers[:puma].call(app, nil, socket_path, **options)
end
end
diff --git a/spec/support/database/prevent_cross_joins.rb b/spec/support/database/prevent_cross_joins.rb
index 443216ba9df..3ff83e685ba 100644
--- a/spec/support/database/prevent_cross_joins.rb
+++ b/spec/support/database/prevent_cross_joins.rb
@@ -41,7 +41,7 @@ module Database
schemas = ::Gitlab::Database::GitlabSchema.table_schemas!(tables)
- unless ::Gitlab::Database::GitlabSchema.cross_joins_allowed?(schemas)
+ unless ::Gitlab::Database::GitlabSchema.cross_joins_allowed?(schemas, tables)
Thread.current[:has_cross_join_exception] = true
raise CrossJoinAcrossUnsupportedTablesError,
"Unsupported cross-join across '#{tables.join(", ")}' querying '#{schemas.to_a.join(", ")}' discovered " \
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 3131a22a20b..a1579ad1685 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -12,7 +12,10 @@ module DbCleaner
end
def deletion_except_tables
- %w[work_item_types work_item_hierarchy_restrictions work_item_widget_definitions]
+ %w[
+ work_item_types work_item_hierarchy_restrictions
+ work_item_widget_definitions work_item_related_link_restrictions
+ ]
end
def setup_database_cleaner
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index 860045c6ce6..0af4de11d51 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -58,6 +58,7 @@
- Repositories::BranchNamesFinder
- Repositories::ChangelogTagFinder
- Repositories::TreeFinder
+- Sbom::DependencyLicensesFinder
- Security::FindingsFinder
- Security::PipelineVulnerabilitiesFinder
- Security::ScanExecutionPoliciesFinder
diff --git a/spec/support/helpers/content_editor_helpers.rb b/spec/support/helpers/content_editor_helpers.rb
index 7597a13e475..7e7ecc197fc 100644
--- a/spec/support/helpers/content_editor_helpers.rb
+++ b/spec/support/helpers/content_editor_helpers.rb
@@ -1,14 +1,6 @@
# frozen_string_literal: true
module ContentEditorHelpers
- def close_rich_text_promo_popover_if_present
- return unless page.has_css?("[data-testid='rich-text-promo-popover']")
-
- page.within("[data-testid='rich-text-promo-popover']") do
- click_button "Close"
- end
- end
-
def switch_to_markdown_editor
click_button("Switch to plain text editing")
end
diff --git a/spec/support/helpers/content_security_policy_helpers.rb b/spec/support/helpers/content_security_policy_helpers.rb
index 50a1bb62bc5..b12ebcbd4b9 100644
--- a/spec/support/helpers/content_security_policy_helpers.rb
+++ b/spec/support/helpers/content_security_policy_helpers.rb
@@ -24,8 +24,8 @@ any_time: false)
# ```
# find_csp_directive('connect-src')
# ```
- def find_csp_directive(key)
- csp = response.headers['Content-Security-Policy']
+ def find_csp_directive(key, header: nil)
+ csp = header || response.headers['Content-Security-Policy']
# Transform "default-src foo bar; connect-src foo bar; script-src ..."
# into array of values for a single directive based on the given key
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index c60c14f10a3..be26c80d217 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -52,4 +52,20 @@ module DnsHelpers
def db_hosts
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
end
+
+ def stub_resolver(stubbed_lookups = {})
+ resolver = instance_double('Resolv::DNS')
+ allow(resolver).to receive(:timeouts=)
+
+ expect(Resolv::DNS).to receive(:open).and_yield(resolver)
+
+ allow(resolver).to receive(:getresources).and_return([])
+ stubbed_lookups.each do |domain, records|
+ records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) }
+ # Append '.' to domain_name, indicating absolute FQDN
+ allow(resolver).to receive(:getresources).with("#{domain}.", Resolv::DNS::Resource::IN::TXT) { records }
+ end
+
+ resolver
+ end
end
diff --git a/spec/support/helpers/fake_migration_classes.rb b/spec/support/helpers/fake_migration_classes.rb
deleted file mode 100644
index 6c066b3b199..00000000000
--- a/spec/support/helpers/fake_migration_classes.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-class FakeRenameReservedPathMigrationV1 < ActiveRecord::Migration[4.2]
- include Gitlab::Database::RenameReservedPathsMigration::V1
-
- def version
- '20170316163845'
- end
-
- def name
- "FakeRenameReservedPathMigrationV1"
- end
-end
diff --git a/spec/support/helpers/features/dom_helpers.rb b/spec/support/helpers/features/dom_helpers.rb
index ac6523f3360..cbbb80dde36 100644
--- a/spec/support/helpers/features/dom_helpers.rb
+++ b/spec/support/helpers/features/dom_helpers.rb
@@ -2,12 +2,12 @@
module Features
module DomHelpers
- def find_by_testid(testid)
- page.find("[data-testid='#{testid}']")
+ def find_by_testid(testid, **kwargs)
+ page.find("[data-testid='#{testid}']", **kwargs)
end
- def within_testid(testid, &block)
- page.within("[data-testid='#{testid}']", &block)
+ def within_testid(testid, **kwargs, &block)
+ page.within("[data-testid='#{testid}']", **kwargs, &block)
end
end
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 19a637d4893..5eba982e3da 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -80,11 +80,11 @@ module GraphqlHelpers
# All resolution goes through fields, so we need to create one here that
# uses our resolver. Thankfully, apart from the field name, resolvers
# contain all the configuration needed to define one.
- field_options = resolver_class.field_options.merge(
+ field = ::Types::BaseField.new(
+ resolver_class: resolver_class,
owner: resolver_parent,
name: 'field_value'
)
- field = ::Types::BaseField.new(**field_options)
# All mutations accept a single `:input` argument. Wrap arguments here.
args = { input: args } if resolver_class <= ::Mutations::BaseMutation && !args.key?(:input)
@@ -221,6 +221,7 @@ module GraphqlHelpers
def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema, subscription_update: false)
if ctx.is_a?(Hash)
q = double('Query', schema: schema, subscription_update?: subscription_update, warden: GraphQL::Schema::Warden::PassThruWarden)
+ allow(q).to receive(:after_lazy) { |value, &block| schema.after_lazy(value, &block) }
ctx = GraphQL::Query::Context.new(query: q, object: obj, values: ctx)
end
diff --git a/spec/support/helpers/integrations/test_helpers.rb b/spec/support/helpers/integrations/test_helpers.rb
new file mode 100644
index 00000000000..c7fde957316
--- /dev/null
+++ b/spec/support/helpers/integrations/test_helpers.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Integrations
+ module TestHelpers
+ def factory_for(integration)
+ return :integrations_slack if integration.is_a?(Integrations::Slack)
+
+ "#{integration.to_param}_integration".to_sym
+ end
+ end
+end
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 417bf4366c5..191defc09ef 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -39,7 +39,7 @@ module JavaScriptFixturesHelpers
end
def remove_repository(project)
- Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
+ project.repository.remove
end
# Public: Reads a GraphQL query from the filesystem as a string
diff --git a/spec/support/helpers/listbox_helpers.rb b/spec/support/helpers/listbox_helpers.rb
index e943790fc65..7a734d2b097 100644
--- a/spec/support/helpers/listbox_helpers.rb
+++ b/spec/support/helpers/listbox_helpers.rb
@@ -10,6 +10,10 @@ module ListboxHelpers
find('.gl-new-dropdown-item[role="option"]', text: text, exact_text: exact_text).click
end
+ def select_disclosure_dropdown_item(text, exact_text: false)
+ find('.gl-new-dropdown-item', text: text, exact_text: exact_text).click
+ end
+
def expect_listbox_item(text)
expect(page).to have_css('.gl-new-dropdown-item[role="option"]', text: text)
end
diff --git a/spec/support/helpers/migrations_helpers/work_item_types_helper.rb b/spec/support/helpers/migrations_helpers/work_item_types_helper.rb
index 40f84486537..9d114ae82b1 100644
--- a/spec/support/helpers/migrations_helpers/work_item_types_helper.rb
+++ b/spec/support/helpers/migrations_helpers/work_item_types_helper.rb
@@ -4,7 +4,11 @@ module MigrationHelpers
module WorkItemTypesHelper
def reset_work_item_types
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.upsert_types
+ WorkItems::HierarchyRestriction.reset_column_information
Gitlab::DatabaseImporters::WorkItems::HierarchyRestrictionsImporter.upsert_restrictions
+ return unless WorkItems::RelatedLinkRestriction.table_exists?
+
+ Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter.upsert_restrictions
end
end
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index 9a6af5fb8ae..fe39968b002 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -85,19 +85,6 @@ module NavbarStructureHelper
)
end
- def insert_observability_nav
- insert_after_nav_item(
- _('Kubernetes'),
- new_nav_item: {
- nav_item: _('Observability'),
- nav_sub_items: [
- _('Explore telemetry data'),
- _('Data sources')
- ]
- }
- )
- end
-
def insert_infrastructure_google_cloud_nav
insert_after_sub_nav_item(
s_('Terraform|Terraform states'),
diff --git a/spec/support/helpers/prometheus/metric_builders.rb b/spec/support/helpers/prometheus/metric_builders.rb
deleted file mode 100644
index 53329ee8dce..00000000000
--- a/spec/support/helpers/prometheus/metric_builders.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-module Prometheus
- module MetricBuilders
- def simple_query(suffix = 'a', **opts)
- { query_range: "query_range_#{suffix}" }.merge(opts)
- end
-
- def simple_queries
- [simple_query, simple_query('b', label: 'label', unit: 'unit')]
- end
-
- def simple_metric(title: 'title', required_metrics: [], queries: [simple_query])
- Gitlab::Prometheus::Metric.new(title: title, required_metrics: required_metrics, weight: 1, queries: queries)
- end
-
- def simple_metrics(added_metric_name: 'metric_a')
- [
- simple_metric(required_metrics: %W[#{added_metric_name} metric_b], queries: simple_queries),
- simple_metric(required_metrics: [added_metric_name], queries: [simple_query('empty')]),
- simple_metric(required_metrics: %w[metric_c])
- ]
- end
-
- def simple_metric_group(name: 'name', metrics: simple_metrics)
- Gitlab::Prometheus::MetricGroup.new(name: name, priority: 1, metrics: metrics)
- end
- end
-end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 4c997aceeee..562805cec3d 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -154,6 +154,11 @@ module StubConfiguration
stub_application_setting(maintenance_mode: value)
end
+ def stub_usage_ping_features(value)
+ stub_application_setting(usage_ping_enabled: value)
+ stub_application_setting(usage_ping_features_enabled: value)
+ end
+
private
# Modifies stubbed messages to also stub possible predicate versions
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 7cebda700d3..42bb9982144 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -25,6 +25,15 @@ module StubFeatureFlags
Feature.reset_flipper
end
+ def stub_with_new_feature_current_request
+ return unless Gitlab::SafeRequestStore.active?
+
+ new_request = Feature::FlipperRequest.new
+ allow(new_request).to receive(:id).and_return(SecureRandom.uuid)
+
+ allow(Feature).to receive(:current_request).and_return(new_request)
+ end
+
# Stub Feature flags with `flag_name: true/false`
#
# @param [Hash] features where key is feature name and value is boolean whether enabled or not.
diff --git a/spec/support/helpers/stub_saas_features.rb b/spec/support/helpers/stub_saas_features.rb
new file mode 100644
index 00000000000..e344888cb8c
--- /dev/null
+++ b/spec/support/helpers/stub_saas_features.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module StubSaasFeatures
+ # Stub SaaS feature with `feature_name: true/false`
+ #
+ # @param [Hash] features where key is feature name and value is boolean whether enabled or not.
+ #
+ # Examples
+ # - `stub_saas_features('onboarding' => false)` ... Disable `onboarding`
+ # SaaS feature globally.
+ # - `stub_saas_features('onboarding' => true)` ... Enable `onboarding`
+ # SaaS feature globally.
+ def stub_saas_features(features)
+ features.each do |feature_name, value|
+ raise ArgumentError, 'value must be boolean' unless value.in? [true, false]
+
+ allow(::Gitlab::Saas).to receive(:feature_available?).with(feature_name).and_return(value)
+ end
+ end
+end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index b95adb3fe4d..740abdb6cfa 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -394,8 +394,11 @@ module TestEnv
end
def seed_db
+ # Adjust `deletion_except_tables` method to exclude seeded tables from
+ # record deletions.
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.upsert_types
Gitlab::DatabaseImporters::WorkItems::HierarchyRestrictionsImporter.upsert_restrictions
+ Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter.upsert_restrictions
end
private
diff --git a/spec/support/helpers/unlock_pipelines_helpers.rb b/spec/support/helpers/unlock_pipelines_helpers.rb
new file mode 100644
index 00000000000..342c2d72980
--- /dev/null
+++ b/spec/support/helpers/unlock_pipelines_helpers.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module UnlockPipelinesHelpers
+ def pipeline_ids_waiting_to_be_unlocked
+ Ci::UnlockPipelineRequest.with_redis do |redis|
+ redis.zrange(Ci::UnlockPipelineRequest::QUEUE_REDIS_KEY, 0, -1).map(&:to_i)
+ end
+ end
+
+ def expect_to_have_pending_unlock_pipeline_request(pipeline_id, timestamp)
+ Ci::UnlockPipelineRequest.with_redis do |redis|
+ timestamp_stored = redis.zscore(Ci::UnlockPipelineRequest::QUEUE_REDIS_KEY, pipeline_id)
+ expect(timestamp_stored).not_to be_nil
+ expect(timestamp_stored.to_i).to eq(timestamp)
+ end
+ end
+
+ def timestamp_of_pending_unlock_pipeline_request(pipeline_id)
+ Ci::UnlockPipelineRequest.with_redis do |redis|
+ redis.zscore(Ci::UnlockPipelineRequest::QUEUE_REDIS_KEY, pipeline_id).to_i
+ end
+ end
+end
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 8ac3b0c134b..42e599c7510 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -76,7 +76,6 @@ module UsageDataHelpers
USAGE_DATA_KEYS = %i(
counts
- counts_monthly
recorded_at
mattermost_enabled
signup_enabled
diff --git a/spec/support/matchers/pushed_licensed_features_matcher.rb b/spec/support/matchers/pushed_licensed_features_matcher.rb
new file mode 100644
index 00000000000..b02863983bc
--- /dev/null
+++ b/spec/support/matchers/pushed_licensed_features_matcher.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :have_pushed_licensed_features do |expected|
+ def to_js(key, value)
+ "\"#{key}\":#{value}"
+ end
+
+ def html(actual)
+ actual.try(:html) || actual
+ end
+
+ match do |actual|
+ expected.all? do |licensed_feature_name, enabled|
+ html(actual).include?(to_js(licensed_feature_name, enabled))
+ end
+ end
+
+ failure_message do |actual|
+ missing = expected.select do |licensed_feature_name, enabled|
+ html(actual).exclude?(to_js(licensed_feature_name, enabled))
+ end
+
+ missing_licensed_features = missing.map do |licensed_feature_name, enabled|
+ to_js(licensed_feature_name, enabled)
+ end.join("\n")
+
+ "The following licensed feature(s) cannot be found in the frontend HTML source: #{missing_licensed_features}"
+ end
+end
diff --git a/spec/support/protected_branch_helpers.rb b/spec/support/protected_branch_helpers.rb
index db5118d6f88..49ede865876 100644
--- a/spec/support/protected_branch_helpers.rb
+++ b/spec/support/protected_branch_helpers.rb
@@ -3,7 +3,7 @@
module ProtectedBranchHelpers
def set_allowed_to(operation, option = 'Maintainers', form: '.js-new-protected-branch')
within(form) do
- within_select(".js-allowed-to-#{operation}") do
+ within_select(".js-allowed-to-#{operation}:not([disabled])") do
Array(option).each { |opt| click_on(opt) }
end
end
diff --git a/spec/support/rake.rb b/spec/support/rake.rb
new file mode 100644
index 00000000000..73590046f13
--- /dev/null
+++ b/spec/support/rake.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require_relative 'helpers/rake_helpers'
+
+RSpec.configure do |config|
+ config.include RakeHelpers, type: :task
+
+ config.before(:all, type: :task) do
+ require 'rake'
+
+ Rake.application.rake_require 'tasks/gitlab/helpers'
+ Rake::Task.define_task :environment
+ end
+
+ config.after(:all, type: :task) do
+ # Fast specs cannot load `spec/support/database_cleaner` and its RSpec
+ # helper DbCleaner.
+ delete_from_all_tables!(except: deletion_except_tables) if defined?(DbCleaner)
+ end
+end
diff --git a/spec/support/rspec.rb b/spec/support/rspec.rb
index 7f3aa55fb1d..f2f93fff07e 100644
--- a/spec/support/rspec.rb
+++ b/spec/support/rspec.rb
@@ -1,11 +1,12 @@
# frozen_string_literal: true
-require_relative "rspec_order"
-require_relative "system_exit_detected"
-require_relative "helpers/stub_configuration"
-require_relative "helpers/stub_metrics"
-require_relative "helpers/stub_object_storage"
-require_relative "helpers/fast_rails_root"
+require_relative 'rake'
+require_relative 'rspec_order'
+require_relative 'system_exit_detected'
+require_relative 'helpers/stub_configuration'
+require_relative 'helpers/stub_metrics'
+require_relative 'helpers/stub_object_storage'
+require_relative 'helpers/fast_rails_root'
require 'gitlab/rspec/all'
require 'gitlab/utils/all'
@@ -19,6 +20,15 @@ RSpec.configure do |config|
# Re-run failures locally with `--only-failures`
config.example_status_persistence_file_path = ENV.fetch('RSPEC_LAST_RUN_RESULTS_FILE', './spec/examples.txt')
+ config.define_derived_metadata(file_path: %r{(ee)?/spec/.+_spec\.rb\z}) do |metadata|
+ # Infer metadata tag `type` if not already inferred by
+ # `infer_spec_type_from_file_location!`.
+ unless metadata.key?(:type)
+ match = %r{/spec/([^/]+)/}.match(metadata[:location])
+ metadata[:type] = match[1].singularize.to_sym if match
+ end
+ end
+
# Makes diffs show entire non-truncated values.
config.around(:each, :unlimited_max_formatted_output_length) do |example|
old_max_formatted_output_length = RSpec::Support::ObjectFormatter.default_instance.max_formatted_output_length
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 298f4006c3b..51f3ff2c077 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -26,7 +26,6 @@
- './ee/spec/controllers/admin/licenses/usage_exports_controller_spec.rb'
- './ee/spec/controllers/admin/projects_controller_spec.rb'
- './ee/spec/controllers/admin/push_rules_controller_spec.rb'
-- './ee/spec/controllers/admin/runners_controller_spec.rb'
- './ee/spec/controllers/admin/users_controller_spec.rb'
- './ee/spec/controllers/autocomplete_controller_spec.rb'
- './ee/spec/controllers/concerns/ee/routable_actions/sso_enforcement_redirect_spec.rb'
@@ -87,7 +86,6 @@
- './ee/spec/controllers/groups/omniauth_callbacks_controller_spec.rb'
- './ee/spec/controllers/groups/push_rules_controller_spec.rb'
- './ee/spec/controllers/groups/roadmap_controller_spec.rb'
-- './ee/spec/controllers/groups/runners_controller_spec.rb'
- './ee/spec/controllers/groups/saml_group_links_controller_spec.rb'
- './ee/spec/controllers/groups/saml_providers_controller_spec.rb'
- './ee/spec/controllers/groups/scim_oauth_controller_spec.rb'
@@ -142,7 +140,6 @@
- './ee/spec/controllers/projects/quality/test_cases_controller_spec.rb'
- './ee/spec/controllers/projects/repositories_controller_spec.rb'
- './ee/spec/controllers/projects/requirements_management/requirements_controller_spec.rb'
-- './ee/spec/controllers/projects/runners_controller_spec.rb'
- './ee/spec/controllers/projects/security/api_fuzzing_configuration_controller_spec.rb'
- './ee/spec/controllers/projects/security/configuration_controller_spec.rb'
- './ee/spec/controllers/projects/security/dashboard_controller_spec.rb'
@@ -2355,7 +2352,6 @@
- './ee/spec/serializers/evidences/build_artifact_entity_spec.rb'
- './ee/spec/serializers/evidences/evidence_entity_spec.rb'
- './ee/spec/serializers/fork_namespace_entity_spec.rb'
-- './ee/spec/serializers/geo_project_registry_entity_spec.rb'
- './ee/spec/serializers/group_vulnerability_autocomplete_entity_spec.rb'
- './ee/spec/serializers/integrations/field_entity_spec.rb'
- './ee/spec/serializers/integrations/jira_serializers/issue_detail_entity_spec.rb'
@@ -2742,7 +2738,7 @@
- './ee/spec/services/gitlab_subscriptions/activate_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/check_future_renewal_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/create_service_spec.rb'
-- './ee/spec/services/gitlab_subscriptions/create_trial_or_lead_service_spec.rb'
+- './ee/spec/services/gitlab_subscriptions/create_company_lead_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/fetch_purchase_eligible_namespaces_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/fetch_subscription_plans_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/plan_upgrade_service_spec.rb'
@@ -3154,18 +3150,7 @@
- './ee/spec/workers/geo/prune_event_log_worker_spec.rb'
- './ee/spec/workers/geo/registry_sync_worker_spec.rb'
- './ee/spec/workers/geo/repositories_clean_up_worker_spec.rb'
-- './ee/spec/workers/geo/repository_cleanup_worker_spec.rb'
-- './ee/spec/workers/geo_repository_destroy_worker_spec.rb'
-- './ee/spec/workers/geo/repository_shard_sync_worker_spec.rb'
-- './ee/spec/workers/geo/repository_sync_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/primary/batch_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/primary/shard_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/primary/single_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/secondary/scheduler_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/secondary/shard_worker_spec.rb'
-- './ee/spec/workers/geo/repository_verification/secondary/single_worker_spec.rb'
- './ee/spec/workers/geo/reverification_batch_worker_spec.rb'
-- './ee/spec/workers/geo/scheduler/per_shard_scheduler_worker_spec.rb'
- './ee/spec/workers/geo/scheduler/scheduler_worker_spec.rb'
- './ee/spec/workers/geo/secondary/registry_consistency_worker_spec.rb'
- './ee/spec/workers/geo/secondary_usage_data_cron_worker_spec.rb'
@@ -3277,7 +3262,6 @@
- './spec/controllers/admin/jobs_controller_spec.rb'
- './spec/controllers/admin/plan_limits_controller_spec.rb'
- './spec/controllers/admin/projects_controller_spec.rb'
-- './spec/controllers/admin/runners_controller_spec.rb'
- './spec/controllers/admin/sessions_controller_spec.rb'
- './spec/controllers/admin/spam_logs_controller_spec.rb'
- './spec/controllers/admin/topics/avatars_controller_spec.rb'
@@ -3348,7 +3332,6 @@
- './spec/controllers/groups/packages_controller_spec.rb'
- './spec/controllers/groups/registry/repositories_controller_spec.rb'
- './spec/controllers/groups/releases_controller_spec.rb'
-- './spec/controllers/groups/runners_controller_spec.rb'
- './spec/controllers/groups/settings/applications_controller_spec.rb'
- './spec/controllers/groups/settings/ci_cd_controller_spec.rb'
- './spec/controllers/groups/settings/integrations_controller_spec.rb'
@@ -3474,7 +3457,6 @@
- './spec/controllers/projects/releases_controller_spec.rb'
- './spec/controllers/projects/releases/evidences_controller_spec.rb'
- './spec/controllers/projects/repositories_controller_spec.rb'
-- './spec/controllers/projects/runners_controller_spec.rb'
- './spec/controllers/projects/security/configuration_controller_spec.rb'
- './spec/controllers/projects/service_desk_controller_spec.rb'
- './spec/controllers/projects/service_ping_controller_spec.rb'
@@ -4989,8 +4971,6 @@
- './spec/helpers/breadcrumbs_helper_spec.rb'
- './spec/helpers/button_helper_spec.rb'
- './spec/helpers/calendar_helper_spec.rb'
-- './spec/helpers/ci/builds_helper_spec.rb'
-- './spec/helpers/ci/jobs_helper_spec.rb'
- './spec/helpers/ci/pipeline_editor_helper_spec.rb'
- './spec/helpers/ci/pipelines_helper_spec.rb'
- './spec/helpers/ci/secure_files_helper_spec.rb'
@@ -6250,17 +6230,6 @@
- './spec/lib/gitlab/email/hook/delivery_metrics_observer_spec.rb'
- './spec/lib/gitlab/email/hook/disable_email_interceptor_spec.rb'
- './spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb'
-- './spec/lib/gitlab/email/message/build_ios_app_guide_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/helper_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb'
-- './spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb'
- './spec/lib/gitlab/email/message/repository_push_spec.rb'
- './spec/lib/gitlab/email/receiver_spec.rb'
- './spec/lib/gitlab/email/reply_parser_spec.rb'
@@ -7274,7 +7243,6 @@
- './spec/mailers/emails/admin_notification_spec.rb'
- './spec/mailers/emails/auto_devops_spec.rb'
- './spec/mailers/emails/groups_spec.rb'
-- './spec/mailers/emails/in_product_marketing_spec.rb'
- './spec/mailers/emails/issues_spec.rb'
- './spec/mailers/emails/merge_requests_spec.rb'
- './spec/mailers/emails/pages_domains_spec.rb'
@@ -7763,7 +7731,6 @@
- './spec/models/loose_foreign_keys/modification_tracker_spec.rb'
- './spec/models/members/group_member_spec.rb'
- './spec/models/members/last_group_owner_assigner_spec.rb'
-- './spec/models/members/member_task_spec.rb'
- './spec/models/member_spec.rb'
- './spec/models/members/project_member_spec.rb'
- './spec/models/merge_request/approval_removal_settings_spec.rb'
@@ -7975,7 +7942,6 @@
- './spec/models/users/callout_spec.rb'
- './spec/models/users/credit_card_validation_spec.rb'
- './spec/models/users/group_callout_spec.rb'
-- './spec/models/users/in_product_marketing_email_spec.rb'
- './spec/models/users/merge_request_interaction_spec.rb'
- './spec/models/user_spec.rb'
- './spec/models/users/project_callout_spec.rb'
@@ -9387,8 +9353,6 @@
- './spec/services/pages_domains/create_acme_order_service_spec.rb'
- './spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb'
- './spec/services/pages_domains/retry_acme_order_service_spec.rb'
-- './spec/services/pages/migrate_from_legacy_storage_service_spec.rb'
-- './spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb'
- './spec/services/pages/zip_directory_service_spec.rb'
- './spec/services/personal_access_tokens/create_service_spec.rb'
- './spec/services/personal_access_tokens/last_used_service_spec.rb'
@@ -9433,14 +9397,10 @@
- './spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb'
- './spec/services/projects/hashed_storage/migrate_repository_service_spec.rb'
- './spec/services/projects/hashed_storage/migration_service_spec.rb'
-- './spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb'
-- './spec/services/projects/hashed_storage/rollback_repository_service_spec.rb'
-- './spec/services/projects/hashed_storage/rollback_service_spec.rb'
- './spec/services/projects/import_error_filter_spec.rb'
- './spec/services/projects/import_export/export_service_spec.rb'
- './spec/services/projects/import_export/relation_export_service_spec.rb'
- './spec/services/projects/import_service_spec.rb'
-- './spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb'
- './spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb'
- './spec/services/projects/lfs_pointers/lfs_download_service_spec.rb'
- './spec/services/projects/lfs_pointers/lfs_import_service_spec.rb'
@@ -9550,7 +9510,6 @@
- './spec/services/tags/create_service_spec.rb'
- './spec/services/tags/destroy_service_spec.rb'
- './spec/services/task_list_toggle_service_spec.rb'
-- './spec/services/tasks_to_be_done/base_service_spec.rb'
- './spec/services/terraform/remote_state_handler_spec.rb'
- './spec/services/terraform/states/destroy_service_spec.rb'
- './spec/services/terraform/states/trigger_destroy_service_spec.rb'
@@ -9590,7 +9549,6 @@
- './spec/services/users/dismiss_project_callout_service_spec.rb'
- './spec/services/users/email_verification/generate_token_service_spec.rb'
- './spec/services/users/email_verification/validate_token_service_spec.rb'
-- './spec/services/users/in_product_marketing_email_records_spec.rb'
- './spec/services/users/keys_count_service_spec.rb'
- './spec/services/users/last_push_event_service_spec.rb'
- './spec/services/users/refresh_authorized_projects_service_spec.rb'
@@ -9690,7 +9648,6 @@
- './spec/tasks/gitlab/sidekiq_rake_spec.rb'
- './spec/tasks/gitlab/smtp_rake_spec.rb'
- './spec/tasks/gitlab/snippets_rake_spec.rb'
-- './spec/tasks/gitlab/storage_rake_spec.rb'
- './spec/tasks/gitlab/task_helpers_spec.rb'
- './spec/tasks/gitlab/terraform/migrate_rake_spec.rb'
- './spec/tasks/gitlab/update_templates_rake_spec.rb'
@@ -10066,10 +10023,6 @@
- './spec/workers/group_export_worker_spec.rb'
- './spec/workers/group_import_worker_spec.rb'
- './spec/workers/groups/update_statistics_worker_spec.rb'
-- './spec/workers/hashed_storage/migrator_worker_spec.rb'
-- './spec/workers/hashed_storage/project_migrate_worker_spec.rb'
-- './spec/workers/hashed_storage/project_rollback_worker_spec.rb'
-- './spec/workers/hashed_storage/rollbacker_worker_spec.rb'
- './spec/workers/import_issues_csv_worker_spec.rb'
- './spec/workers/integrations/create_external_cross_reference_worker_spec.rb'
- './spec/workers/integrations/execute_worker_spec.rb'
diff --git a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
index 7074b073a0c..997ed448d4d 100644
--- a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
+++ b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
@@ -23,7 +23,6 @@ RSpec.shared_context 'bulk imports requests context' do |url|
headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://gitlab.example.com/api/v4/groups?min_access_level=50&page=1&per_page=20&private_token=demo-pat&search=test&top_level_only=true")
- .with(headers: request_headers)
.to_return(
status: 200,
body: [{
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index 848e333d88b..e4c97fa1143 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -5,12 +5,14 @@
# The following let binding should be defined:
# - `integration`: Integration name. See `Integration.available_integration_names`.
RSpec.shared_context 'with integration' do
+ include Integrations::TestHelpers
include JiraIntegrationHelpers
let(:dashed_integration) { integration.dasherize }
let(:integration_method) { Project.integration_association_name(integration) }
let(:integration_klass) { Integration.integration_name_to_model(integration) }
let(:integration_instance) { integration_klass.new }
+ let(:integration_factory) { factory_for(integration_instance) }
# Build a list of all attributes that an integration supports.
let(:integration_attrs_list) do
@@ -44,62 +46,6 @@ RSpec.shared_context 'with integration' do
}
end
- let(:integration_attrs) do
- integration_attrs_list.inject({}) do |hash, k|
- if k =~ /^(token*|.*_token|.*_key)/ && !integration.in?(%w[apple_app_store google_play])
- hash.merge!(k => 'secrettoken')
- elsif integration == 'confluence' && k == :confluence_url
- hash.merge!(k => 'https://example.atlassian.net/wiki')
- elsif integration == 'datadog' && k == :datadog_site
- hash.merge!(k => 'datadoghq.com')
- elsif integration == 'datadog' && k == :datadog_tags
- hash.merge!(k => 'key:value')
- elsif integration == 'packagist' && k == :server
- hash.merge!(k => 'https://packagist.example.com')
- elsif /^(.*_url|url|webhook)/.match?(k)
- hash.merge!(k => "http://example.com")
- elsif integration_klass.method_defined?("#{k}?")
- hash.merge!(k => true)
- elsif integration == 'irker' && k == :recipients
- hash.merge!(k => 'irc://irc.network.net:666/#channel')
- elsif integration == 'irker' && k == :server_port
- hash.merge!(k => 1234)
- elsif integration == 'jira' && k == :jira_issue_transition_id
- hash.merge!(k => '1,2,3')
- elsif integration == 'jira' && k == :jira_issue_transition_automatic # rubocop:disable Lint/DuplicateBranch
- hash.merge!(k => true)
- elsif integration == 'jira' && k == :jira_auth_type # rubocop:disable Lint/DuplicateBranch
- hash.merge!(k => 0)
- elsif integration == 'emails_on_push' && k == :recipients
- hash.merge!(k => 'foo@bar.com')
- elsif (integration == 'slack' || integration == 'mattermost') && k == :labels_to_be_notified_behavior
- hash.merge!(k => "match_any")
- elsif integration == 'campfire' && k == :room
- hash.merge!(k => '1234')
- elsif integration == 'apple_app_store' && k == :app_store_issuer_id
- hash.merge!(k => 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee')
- elsif integration == 'apple_app_store' && k == :app_store_private_key
- hash.merge!(k => File.read('spec/fixtures/ssl_key.pem'))
- elsif integration == 'apple_app_store' && k == :app_store_key_id
- hash.merge!(k => 'ABC1')
- elsif integration == 'apple_app_store' && k == :app_store_private_key_file_name
- hash.merge!(k => 'ssl_key.pem')
- elsif integration == 'apple_app_store' && k == :app_store_protected_refs # rubocop:disable Lint/DuplicateBranch
- hash.merge!(k => true)
- elsif integration == 'google_play' && k == :package_name
- hash.merge!(k => 'com.gitlab.foo.bar')
- elsif integration == 'google_play' && k == :service_account_key
- hash.merge!(k => File.read('spec/fixtures/service_account.json'))
- elsif integration == 'google_play' && k == :service_account_key_file_name
- hash.merge!(k => 'service_account.json')
- elsif integration == 'google_play' && k == :google_play_protected_refs # rubocop:disable Lint/DuplicateBranch
- hash.merge!(k => true)
- else
- hash.merge!(k => "someword")
- end
- end
- end
-
let(:licensed_features) do
{
'github' => :github_integration
@@ -111,15 +57,6 @@ RSpec.shared_context 'with integration' do
stub_jira_integration_test if integration == 'jira'
end
- def initialize_integration(integration, attrs = {})
- record = project.find_or_initialize_integration(integration)
- record.reset_updated_properties if integration == 'datadog'
- record.attributes = attrs
- record.properties = integration_attrs
- record.save!
- record
- end
-
private
def enable_license_for_integration(integration)
diff --git a/spec/support/shared_contexts/merge_request_create_shared_context.rb b/spec/support/shared_contexts/merge_request_create_shared_context.rb
index bf8eeeb7ab6..fc9a3767365 100644
--- a/spec/support/shared_contexts/merge_request_create_shared_context.rb
+++ b/spec/support/shared_contexts/merge_request_create_shared_context.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
RSpec.shared_context 'merge request create context' do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:target_project) { create(:project, :public, :repository) }
@@ -25,7 +23,5 @@ RSpec.shared_context 'merge request create context' do
source_branch: 'fix',
target_branch: 'master'
})
-
- close_rich_text_promo_popover_if_present
end
end
diff --git a/spec/support/shared_contexts/merge_request_edit_shared_context.rb b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
index 8fe0174b13e..f0e89b0c5f9 100644
--- a/spec/support/shared_contexts/merge_request_edit_shared_context.rb
+++ b/spec/support/shared_contexts/merge_request_edit_shared_context.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
RSpec.shared_context 'merge request edit context' do
- include ContentEditorHelpers
-
let(:user) { create(:user) }
let(:user2) { create(:user) }
let!(:milestone) { create(:milestone, project: target_project) }
@@ -27,6 +25,5 @@ RSpec.shared_context 'merge request edit context' do
sign_in(user)
visit edit_project_merge_request_path(target_project, merge_request)
- close_rich_text_promo_popover_if_present
end
end
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 70b48322efd..4564fa23236 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -19,7 +19,7 @@ RSpec.shared_context 'GroupPolicy context' do
let(:guest_permissions) do
%i[
- read_label read_group upload_file read_namespace read_group_activity
+ read_label read_group upload_file read_namespace read_namespace_via_membership read_group_activity
read_group_issues read_group_boards read_group_labels read_group_milestones
read_group_merge_requests
]
diff --git a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
index d9ea7bc7f82..11f6d816fc1 100644
--- a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
@@ -72,6 +72,31 @@ RSpec.shared_context 'ProjectPolicyTable context' do
:private | :disabled | :anonymous | nil | 0
end
+ # group_level, :membership, :admin_mode, :expected_count
+ # We need a new table because epics are at a group level only.
+ def permission_table_for_epics_access
+ :public | :admin | true | 1
+ :public | :admin | false | 1
+ :public | :reporter | nil | 1
+ :public | :guest | nil | 1
+ :public | :non_member | nil | 1
+ :public | :anonymous | nil | 1
+
+ :internal | :admin | true | 1
+ :internal | :admin | false | 0
+ :internal | :reporter | nil | 0
+ :internal | :guest | nil | 0
+ :internal | :non_member | nil | 0
+ :internal | :anonymous | nil | 0
+
+ :private | :admin | true | 1
+ :private | :admin | false | 0
+ :private | :reporter | nil | 0
+ :private | :guest | nil | 0
+ :private | :non_member | nil | 0
+ :private | :anonymous | nil | 0
+ end
+
# project_level, :feature_access_level, :membership, :admin_mode, :expected_count
def permission_table_for_guest_feature_access
:public | :enabled | :admin | true | 1
diff --git a/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb
index f877d6299bd..2543195e779 100644
--- a/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/nuget_packages_shared_context.rb
@@ -6,5 +6,7 @@ RSpec.shared_context 'nuget api setup' do
include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :public) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be_with_reload(:job) { create(:ci_build, user: user, status: :running, project: project) }
end
diff --git a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
index ef9830fbce8..0e7b909fce9 100644
--- a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
+++ b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb
@@ -11,7 +11,9 @@ RSpec.shared_examples 'unlicensed cycle analytics request params' do
}
end
- subject { described_class.new(params) }
+ let(:request_params) { described_class.new(params) }
+
+ subject { request_params }
before do
root_group.add_owner(user)
@@ -114,13 +116,13 @@ RSpec.shared_examples 'unlicensed cycle analytics request params' do
end
describe 'use_aggregated_data_collector param' do
- subject(:value) { described_class.new(params).to_data_collector_params[:use_aggregated_data_collector] }
+ subject(:value) { request_params.to_data_collector_params[:use_aggregated_data_collector] }
it { is_expected.to eq(false) }
end
describe 'feature availablity data attributes' do
- subject(:value) { described_class.new(params).to_data_attributes }
+ subject(:value) { request_params.to_data_attributes }
it 'disables all paid features' do
is_expected.to match(a_hash_including(enable_tasks_by_type_chart: 'false',
@@ -128,4 +130,28 @@ RSpec.shared_examples 'unlicensed cycle analytics request params' do
enable_projects_filter: 'false'))
end
end
+
+ describe '#to_data_collector_params' do
+ context 'when adding licensed parameters' do
+ subject(:data_collector_params) { request_params.to_data_collector_params }
+
+ before do
+ params.merge!(
+ weight: 1,
+ epic_id: 2,
+ iteration_id: 3,
+ my_reaction_emoji: 'thumbsup',
+ not: { assignee_username: 'test' }
+ )
+ end
+
+ it 'excludes the attributes from the data collector params' do
+ expect(data_collector_params).to exclude(:weight)
+ expect(data_collector_params).to exclude(:epic_id)
+ expect(data_collector_params).to exclude(:iteration_id)
+ expect(data_collector_params).to exclude(:my_reaction_emoji)
+ expect(data_collector_params).to exclude(:not)
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index ddd3bbd636a..c86fcf5ae20 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'multiple issue boards' do
+ include ListboxHelpers
+
context 'authorized user' do
before do
stub_feature_flags(apollo_boards: false)
@@ -27,7 +29,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -67,7 +69,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'adds a list to the none default board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -89,7 +91,7 @@ RSpec.shared_examples 'multiple issue boards' do
expect(page).to have_selector('.board', count: 3)
in_boards_switcher_dropdown do
- click_button board.name
+ select_listbox_item(board.name)
end
wait_for_requests
@@ -101,7 +103,7 @@ RSpec.shared_examples 'multiple issue boards' do
assert_boards_nav_active
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
assert_boards_nav_active
@@ -109,7 +111,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board back' do
in_boards_switcher_dropdown do
- click_button board.name
+ select_listbox_item(board.name)
end
wait_for_requests
@@ -142,7 +144,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board' do
in_boards_switcher_dropdown do
- click_button board2.name
+ select_listbox_item(board2.name)
end
wait_for_requests
@@ -165,7 +167,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- dropdown_selector = '[data-testid="boards-selector"] .dropdown-menu'
+ dropdown_selector = '[data-testid="boards-selector"] .gl-new-dropdown'
page.within(dropdown_selector) do
yield
end
diff --git a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
index a9edf18d562..5c1f505d300 100644
--- a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
+++ b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
@@ -52,7 +52,6 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
subject.run
- expect(tracker.failed?).to eq(true)
expect(tracker.entity.failures.first).to be_present
expect(tracker.entity.failures.first.exception_message).to eq('Only allowed schemes are http, https')
end
@@ -97,11 +96,7 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
context 'when response is not 403' do
let(:response_double) { instance_double(HTTParty::Response, forbidden?: false, not_found?: false, code: 301) }
- it 'marks tracker as failed' do
- subject.run
-
- expect(tracker.failed?).to eq(true)
- end
+ include_examples 'does not raise an error'
end
end
end
diff --git a/spec/support/shared_examples/ci/deployable_shared_examples.rb b/spec/support/shared_examples/ci/deployable_shared_examples.rb
index 4f43d38e604..0781eec1b4b 100644
--- a/spec/support/shared_examples/ci/deployable_shared_examples.rb
+++ b/spec/support/shared_examples/ci/deployable_shared_examples.rb
@@ -166,6 +166,28 @@ RSpec.shared_examples 'a deployable job' do
expect(deployment).to be_failed
end
+
+ context 'when the job is a stop job' do
+ before do
+ job.update!(environment: 'review', options: { environment: { action: 'stop' } })
+ end
+
+ it 'enqueues Environments::StopJobFailedWorker' do
+ expect(Environments::StopJobFailedWorker)
+ .to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when the job is not a stop job' do
+ it 'does not enqueue Environments::StopJobFailedWorker' do
+ expect(Environments::StopJobFailedWorker)
+ .not_to receive(:perform_async)
+
+ subject
+ end
+ end
end
context 'when transits to skipped' do
diff --git a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
index 14d0ac81250..53d80c64827 100644
--- a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
+++ b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
@@ -79,7 +79,7 @@ RSpec.shared_examples 'every metric definition' do
end
it 'is included in the Usage Ping hash structure' do
- msg = "see https://docs.gitlab.com/ee/development/service_ping/metrics_dictionary.html#metrics-added-dynamic-to-service-ping-payload"
+ msg = "see https://docs.gitlab.com/ee/development/internal_analytics/metrics/metrics_dictionary.html#metrics-added-dynamic-to-service-ping-payload"
expect(expected_metric_files_key_paths).to match_array(usage_ping_key_paths), msg
end
@@ -114,7 +114,8 @@ RSpec.shared_examples 'every metric definition' do
Gitlab::Usage::Metrics::Instrumentations::RedisMetric,
Gitlab::Usage::Metrics::Instrumentations::RedisHLLMetric,
Gitlab::Usage::Metrics::Instrumentations::NumbersMetric,
- Gitlab::Usage::Metrics::Instrumentations::PrometheusMetric
+ Gitlab::Usage::Metrics::Instrumentations::PrometheusMetric,
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric
]
end
@@ -125,10 +126,23 @@ RSpec.shared_examples 'every metric definition' do
].freeze
end
+ let(:removed_classes) do
+ [
+ Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric,
+ Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric
+ ].freeze
+ end
+
+ def metric_not_used?(constant)
+ parent_metric_classes.include?(constant) ||
+ ignored_classes.include?(constant) ||
+ removed_classes.include?(constant)
+ end
+
def assert_uses_all_nested_classes(parent_module)
parent_module.constants(false).each do |const_name|
constant = parent_module.const_get(const_name, false)
- next if parent_metric_classes.include?(constant) || ignored_classes.include?(constant)
+ next if metric_not_used?(constant)
case constant
when Class
diff --git a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
new file mode 100644
index 00000000000..5f236f25d35
--- /dev/null
+++ b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# Requires `request` subject to be defined
+#
+# subject(:request) { get root_path }
+RSpec.shared_examples 'Base action controller' do
+ describe 'security headers' do
+ describe 'Cross-Origin-Opener-Policy' do
+ it 'sets the header' do
+ request
+
+ expect(response.headers['Cross-Origin-Opener-Policy']).to eq('same-origin')
+ end
+
+ context 'when coop_header feature flag is disabled' do
+ it 'does not set the header' do
+ stub_feature_flags(coop_header: false)
+
+ request
+
+ expect(response.headers['Cross-Origin-Opener-Policy']).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb b/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb
new file mode 100644
index 00000000000..b448ea16128
--- /dev/null
+++ b/spec/support/shared_examples/controllers/concerns/onboarding/redirectable_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Onboarding::Redirectable do
+ it { is_expected.to redirect_to dashboard_projects_path }
+
+ context 'when the new user already has any accepted group membership' do
+ let!(:single_member) { create(:group_member, invite_email: email) }
+
+ it 'redirects to activity group path with a flash message' do
+ post_create
+
+ expect(response).to redirect_to activity_group_path(single_member.source)
+ expect(controller).to set_flash[:notice].to(/You have been granted/)
+ end
+
+ context 'when the new user already has more than 1 accepted group membership' do
+ let!(:last_member) { create(:group_member, invite_email: email) }
+
+ it 'redirects to the last member activity group path without a flash message' do
+ post_create
+
+ expect(response).to redirect_to activity_group_path(last_member.source)
+ expect(controller).not_to set_flash[:notice].to(/You have been granted/)
+ end
+ end
+
+ context 'when the member has an orphaned source at the time of registering' do
+ before do
+ single_member.source.delete
+ end
+
+ it { is_expected.to redirect_to dashboard_projects_path }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index af1843bae28..c921da10347 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -161,8 +161,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
group.add_owner(user)
client = stub_client(repos: repos, orgs: [org], org_repos: [org_repo])
allow(client).to receive(:each_page).and_return([double('client', objects: repos)].to_enum)
- # GitHub controller has filtering done using GitHub Search API
- stub_feature_flags(remove_legacy_github_client: false)
end
it 'filters list of repositories by name' do
diff --git a/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb b/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
index 05068cd60af..0c19865999f 100644
--- a/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
+++ b/spec/support/shared_examples/controllers/internal_event_tracking_examples.rb
@@ -2,7 +2,7 @@
# Requires a context containing:
# - subject
-# - action
+# - event
# - user
# Optionally, the context can contain:
# - project
@@ -36,13 +36,13 @@ RSpec.shared_examples 'internal event tracking' do
expect(Gitlab::Tracking::ServicePingContext)
.to have_received(:new)
- .with(data_source: :redis_hll, event: action)
+ .with(data_source: :redis_hll, event: event)
.at_least(:once)
expect(fake_tracker).to have_received(:event)
.with(
'InternalEventTracking',
- action,
+ event,
context: [
an_instance_of(SnowplowTracker::SelfDescribingJson),
an_instance_of(SnowplowTracker::SelfDescribingJson)
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 82bddb9f5a4..867981297ab 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -150,8 +150,6 @@ RSpec.shared_examples 'thread comments for commit and snippet' do |resource_name
end
RSpec.shared_examples 'thread comments for issue, epic and merge request' do |resource_name|
- include ContentEditorHelpers
-
let(:form_selector) { '.js-main-target-form' }
let(:dropdown_selector) { "#{form_selector} .comment-type-dropdown" }
let(:toggle_selector) { "#{dropdown_selector} .gl-new-dropdown-toggle" }
@@ -161,10 +159,6 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
let(:comments_selector) { '.timeline > .note.timeline-entry:not(.being-posted)' }
let(:comment) { 'My comment' }
- before do
- close_rich_text_promo_popover_if_present
- end
-
it 'clicking "Comment" will post a comment' do
expect(page).to have_selector toggle_selector
diff --git a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
index 9f01c69608d..b438a23aafd 100644
--- a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
@@ -1,23 +1,237 @@
# frozen_string_literal: true
RSpec.shared_examples 'variable list drawer' do
- it 'adds a new CI variable' do
- click_button('Add variable')
+ it 'renders the list drawer' do
+ open_drawer
- # For now, we just check that the drawer is displayed
expect(page).to have_selector('[data-testid="ci-variable-drawer"]')
+ end
+
+ it 'adds a new CI variable' do
+ open_drawer
+
+ fill_variable('NEW_KEY', 'NEW_VALUE')
+ click_add_variable
+
+ wait_for_requests
+
+ page.within('[data-testid="ci-variable-table"]') do
+ expect(first(".js-ci-variable-row td[data-label='#{s_('CiVariables|Key')}']")).to have_content('NEW_KEY')
+
+ click_button('Reveal values')
+
+ expect(first(".js-ci-variable-row td[data-label='#{s_('CiVariables|Value')}']")).to have_content('NEW_VALUE')
+ end
+ end
+
+ it 'allows variable with empty value to be created' do
+ open_drawer
+
+ fill_variable('NEW_KEY')
+
+ page.within('[data-testid="ci-variable-drawer"]') do
+ expect(find_button('Add variable', disabled: false)).to be_present
+ end
+ end
+
+ it 'defaults to unmasked, expanded' do
+ open_drawer
+
+ fill_variable('NEW_KEY')
+ click_add_variable
+
+ wait_for_requests
+
+ page.within('[data-testid="ci-variable-table"]') do
+ key_column = first(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")
+
+ expect(key_column).not_to have_content(s_('CiVariables|Masked'))
+ expect(key_column).to have_content(s_('CiVariables|Expanded'))
+ end
+ end
+
+ context 'with application setting for protected attribute' do
+ context 'when application setting is true' do
+ before do
+ stub_application_setting(protected_ci_variables: true)
+
+ visit page_path
+ end
+
+ it 'defaults to protected' do
+ open_drawer
+
+ page.within('[data-testid="ci-variable-drawer"]') do
+ expect(find('[data-testid="ci-variable-protected-checkbox"]')).to be_checked
+ end
+ end
+ end
+
+ context 'when application setting is false' do
+ before do
+ stub_application_setting(protected_ci_variables: false)
+
+ visit page_path
+ end
- # TODO: Add tests for ADDING a variable via drawer when feature is available
+ it 'defaults to unprotected' do
+ open_drawer
+
+ page.within('[data-testid="ci-variable-drawer"]') do
+ expect(find('[data-testid="ci-variable-protected-checkbox"]')).not_to be_checked
+ end
+ end
+ end
end
it 'edits a variable' do
+ key_column = first(".js-ci-variable-row td[data-label='#{s_('CiVariables|Key')}']")
+ value_column = first(".js-ci-variable-row td[data-label='#{s_('CiVariables|Value')}']")
+
+ expect(key_column).to have_content('test_key')
+ expect(key_column).not_to have_content(s_('CiVariables|Protected'))
+ expect(key_column).to have_content(s_('CiVariables|Masked'))
+ expect(key_column).to have_content(s_('CiVariables|Expanded'))
+
+ click_button('Edit')
+
+ fill_variable('EDITED_KEY', 'EDITED_VALUE')
+ toggle_protected
+ toggle_masked
+ toggle_expanded
+ click_button('Edit variable')
+
+ wait_for_requests
+
page.within('[data-testid="ci-variable-table"]') do
- click_button('Edit')
+ expect(key_column).to have_content('EDITED_KEY')
+ expect(key_column).to have_content(s_('CiVariables|Protected'))
+ expect(key_column).not_to have_content(s_('CiVariables|Masked'))
+ expect(key_column).not_to have_content(s_('CiVariables|Expanded'))
+
+ click_button('Reveal values')
+
+ expect(value_column).to have_content('EDITED_VALUE')
end
+ end
- # For now, we just check that the drawer is displayed
- expect(page).to have_selector('[data-testid="ci-variable-drawer"]')
+ it 'shows validation error for duplicate keys' do
+ open_drawer
+
+ fill_variable('NEW_KEY', 'NEW_VALUE')
+ click_add_variable
+
+ wait_for_requests
+
+ open_drawer
- # TODO: Add tests for EDITING a variable via drawer when feature is available
+ fill_variable('NEW_KEY', 'NEW_VALUE')
+ click_add_variable
+
+ wait_for_requests
+
+ expect(find('.flash-container')).to be_present
+ expect(find('[data-testid="alert-danger"]').text).to have_content('(NEW_KEY) has already been taken')
+ end
+
+ it 'shows validation error for unmaskable values' do
+ open_drawer
+
+ toggle_masked
+ fill_variable('EMPTY_MASK_KEY', '???')
+
+ expect(page).to have_content('This variable value does not meet the masking requirements.')
+ page.within('[data-testid="ci-variable-drawer"]') do
+ expect(find_button('Add variable', disabled: true)).to be_present
+ end
+ end
+
+ it 'handles multiple edits and a deletion' do
+ # Create two variables
+ open_drawer
+ fill_variable('akey', 'akeyvalue')
+ click_add_variable
+
+ wait_for_requests
+
+ open_drawer
+ fill_variable('zkey', 'zkeyvalue')
+ click_add_variable
+
+ wait_for_requests
+
+ expect(page).to have_selector('.js-ci-variable-row', count: 3)
+
+ # Remove the `akey` variable
+ page.within('[data-testid="ci-variable-table"]') do
+ page.within('.js-ci-variable-row:first-child') do
+ click_button('Edit')
+ end
+ end
+
+ page.within('[data-testid="ci-variable-drawer"]') do
+ click_button('Delete variable') # opens confirmation modal
+ end
+
+ page.within('[data-testid="ci-variable-drawer-confirm-delete-modal"]') do
+ click_button('Delete')
+ end
+
+ wait_for_requests
+
+ # Add another variable
+ open_drawer
+ fill_variable('ckey', 'ckeyvalue')
+ click_add_variable
+
+ wait_for_requests
+
+ # expect to find 3 rows of variables in alphabetical order
+ expect(page).to have_selector('.js-ci-variable-row', count: 3)
+ rows = all('.js-ci-variable-row')
+ expect(rows[0].find('td[data-label="Key"]')).to have_content('ckey')
+ expect(rows[1].find('td[data-label="Key"]')).to have_content('test_key')
+ expect(rows[2].find('td[data-label="Key"]')).to have_content('zkey')
+ end
+
+ private
+
+ def open_drawer
+ page.within('[data-testid="ci-variable-table"]') do
+ click_button('Add variable')
+ end
+ end
+
+ def click_add_variable
+ page.within('[data-testid="ci-variable-drawer"]') do
+ click_button('Add variable')
+ end
+ end
+
+ def fill_variable(key, value = '')
+ wait_for_requests
+
+ page.within('[data-testid="ci-variable-drawer"]') do
+ find('[data-testid="ci-variable-key"] input').set(key)
+ find('[data-testid="ci-variable-value"]').set(value) if value.present?
+ end
+ end
+
+ def toggle_protected
+ page.within('[data-testid="ci-variable-drawer"]') do
+ find('[data-testid="ci-variable-protected-checkbox"]').click
+ end
+ end
+
+ def toggle_masked
+ page.within('[data-testid="ci-variable-drawer"]') do
+ find('[data-testid="ci-variable-masked-checkbox"]').click
+ end
+ end
+
+ def toggle_expanded
+ page.within('[data-testid="ci-variable-drawer"]') do
+ find('[data-testid="ci-variable-expanded-checkbox"]').click
+ end
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index c3df89c8002..ed885d7a226 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -6,7 +6,6 @@
RSpec.shared_examples 'User creates wiki page' do
include WikiHelpers
- include ContentEditorHelpers
before do
sign_in(user)
@@ -19,7 +18,6 @@ RSpec.shared_examples 'User creates wiki page' do
wait_for_svg_to_be_loaded(example)
click_link "Create your first page"
- close_rich_text_promo_popover_if_present
end
it 'shows all available formats in the dropdown' do
@@ -192,7 +190,6 @@ RSpec.shared_examples 'User creates wiki page' do
context "via the `new wiki page` page", :js do
it "creates a page with a single word" do
click_link("New page")
- close_rich_text_promo_popover_if_present
page.within(".wiki-form") do
fill_in(:wiki_title, with: "foo")
@@ -211,7 +208,6 @@ RSpec.shared_examples 'User creates wiki page' do
it "creates a page with spaces in the name", :js do
click_link("New page")
- close_rich_text_promo_popover_if_present
page.within(".wiki-form") do
fill_in(:wiki_title, with: "Spaces in the name")
@@ -230,7 +226,6 @@ RSpec.shared_examples 'User creates wiki page' do
it "creates a page with hyphens in the name", :js do
click_link("New page")
- close_rich_text_promo_popover_if_present
page.within(".wiki-form") do
fill_in(:wiki_title, with: "hyphens-in-the-name")
@@ -254,7 +249,6 @@ RSpec.shared_examples 'User creates wiki page' do
context 'when a server side validation error is returned' do
it "still displays edit form", :js do
click_link("New page")
- close_rich_text_promo_popover_if_present
page.within(".wiki-form") do
fill_in(:wiki_title, with: "home")
@@ -272,7 +266,6 @@ RSpec.shared_examples 'User creates wiki page' do
it "shows the emoji autocompletion dropdown", :js do
click_link("New page")
- close_rich_text_promo_popover_if_present
page.within(".wiki-form") do
find("#wiki_content").native.send_keys("")
diff --git a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
index 827c875494a..ca68df9a89b 100644
--- a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
@@ -5,8 +5,6 @@
# user
RSpec.shared_examples 'User previews wiki changes' do
- include ContentEditorHelpers
-
let(:wiki_page) { build(:wiki_page, wiki: wiki) }
before do
@@ -76,7 +74,6 @@ RSpec.shared_examples 'User previews wiki changes' do
before do
wiki_page.create # rubocop:disable Rails/SaveBang
visit wiki_page_path(wiki, wiki_page, action: :edit)
- close_rich_text_promo_popover_if_present
end
it_behaves_like 'relative links' do
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index d06f04db1ce..784de102f4f 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -6,7 +6,6 @@
RSpec.shared_examples 'User updates wiki page' do
include WikiHelpers
- include ContentEditorHelpers
let(:diagramsnet_url) { 'https://embed.diagrams.net' }
@@ -23,7 +22,6 @@ RSpec.shared_examples 'User updates wiki page' do
wait_for_svg_to_be_loaded(example)
click_link "Create your first page"
- close_rich_text_promo_popover_if_present
end
it 'redirects back to the home edit page' do
@@ -47,7 +45,7 @@ RSpec.shared_examples 'User updates wiki page' do
first(:link, text: 'three').click
- expect(find('[data-testid="wiki_page_title"]')).to have_content('three')
+ expect(find('[data-testid="wiki-page-title"]')).to have_content('three')
click_on('Edit')
@@ -70,7 +68,6 @@ RSpec.shared_examples 'User updates wiki page' do
visit(wiki_path(wiki))
click_link('Edit')
- close_rich_text_promo_popover_if_present
end
it 'updates a page', :js do
@@ -164,7 +161,6 @@ RSpec.shared_examples 'User updates wiki page' do
before do
visit wiki_page_path(wiki, wiki_page, action: :edit)
- close_rich_text_promo_popover_if_present
end
it 'moves the page to the root folder', :js do
@@ -235,7 +231,6 @@ RSpec.shared_examples 'User updates wiki page' do
stub_application_setting(wiki_page_max_content_bytes: 10)
visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
- close_rich_text_promo_popover_if_present
end
it 'allows changing the title if the content does not change', :js do
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index 3ee7725305e..254682e1a3a 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -6,7 +6,6 @@
RSpec.shared_examples 'User views a wiki page' do
include WikiHelpers
- include ContentEditorHelpers
let(:path) { 'image.png' }
let(:wiki_page) do
@@ -58,7 +57,7 @@ RSpec.shared_examples 'User views a wiki page' do
first(:link, text: 'three').click
- expect(find('[data-testid="wiki_page_title"]')).to have_content('three')
+ expect(find('[data-testid="wiki-page-title"]')).to have_content('three')
click_on('Edit')
@@ -123,7 +122,7 @@ RSpec.shared_examples 'User views a wiki page' do
it 'shows the page history' do
visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_selector('[data-testid="wiki_edit_button"]')
+ expect(page).to have_selector('[data-testid="wiki-edit-button"]')
click_on('Page history')
@@ -135,7 +134,7 @@ RSpec.shared_examples 'User views a wiki page' do
it 'does not show the "Edit" button' do
visit(wiki_page_path(wiki, wiki_page, version_id: wiki_page.versions.last.id))
- expect(page).not_to have_selector('[data-testid="wiki_edit_button"]')
+ expect(page).not_to have_selector('[data-testid="wiki-edit-button"]')
end
context 'show the diff' do
@@ -210,7 +209,7 @@ RSpec.shared_examples 'User views a wiki page' do
it 'preserves the special characters' do
visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_css('[data-testid="wiki_page_title"]', text: title)
+ expect(page).to have_css('[data-testid="wiki-page-title"]', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
end
end
@@ -225,7 +224,7 @@ RSpec.shared_examples 'User views a wiki page' do
it 'safely displays the page' do
visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_selector('[data-testid="wiki_page_title"]', text: title)
+ expect(page).to have_selector('[data-testid="wiki-page-title"]', text: title)
expect(page).to have_content('foo bar')
end
end
@@ -252,7 +251,7 @@ RSpec.shared_examples 'User views a wiki page' do
end
it 'does not show "Edit" button' do
- expect(page).not_to have_selector('[data-testid="wiki_edit_button"]')
+ expect(page).not_to have_selector('[data-testid="wiki-edit-button"]')
end
it 'shows error' do
@@ -270,7 +269,6 @@ RSpec.shared_examples 'User views a wiki page' do
wait_for_svg_to_be_loaded
click_link "Create your first page"
- close_rich_text_promo_popover_if_present
expect(page).to have_content('Create New Page')
end
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index 18e0cfdad00..ff79f180c64 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -218,15 +218,33 @@ RSpec.shared_examples 'work items assignees' do
expect(work_item.reload.assignees).not_to include(user)
end
+
+ it 'updates the assignee in real-time' do
+ Capybara::Session.new(:other_session)
+
+ using_session :other_session do
+ visit work_items_path
+ expect(work_item.reload.assignees).not_to include(user)
+ end
+
+ find('[data-testid="work-item-assignees-input"]').hover
+ find('[data-testid="assign-self"]').click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user)
+
+ using_session :other_session do
+ expect(work_item.reload.assignees).to include(user)
+ end
+ end
end
RSpec.shared_examples 'work items labels' do
let(:label_title_selector) { '[data-testid="labels-title"]' }
+ let(:labels_input_selector) { '[data-testid="work-item-labels-input"]' }
it 'successfully assigns a label' do
- label = create(:label, project: work_item.project, title: "testing-label")
-
- find('[data-testid="work-item-labels-input"]').fill_in(with: label.title)
+ find(labels_input_selector).fill_in(with: label.title)
wait_for_requests
# submit and simulate blur to save
@@ -236,6 +254,88 @@ RSpec.shared_examples 'work items labels' do
expect(work_item.labels).to include(label)
end
+
+ it 'successfully assigns multiple labels' do
+ label2 = create(:label, project: project, title: "testing-label-2")
+
+ find(labels_input_selector).fill_in(with: label.title)
+ wait_for_requests
+ send_keys(:enter)
+
+ find(labels_input_selector).fill_in(with: label2.title)
+ wait_for_requests
+ send_keys(:enter)
+
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(work_item.labels).to include(label)
+ expect(work_item.labels).to include(label2)
+ end
+
+ it 'removes all labels on clear all button click' do
+ find(labels_input_selector).fill_in(with: label.title)
+ wait_for_requests
+
+ send_keys(:enter)
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(work_item.labels).to include(label)
+
+ within(labels_input_selector) do
+ find('input').click
+ find('[data-testid="clear-all-button"]').click
+ end
+
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(work_item.labels).not_to include(label)
+ end
+
+ it 'removes label on clicking badge cross button' do
+ find(labels_input_selector).fill_in(with: label.title)
+ wait_for_requests
+
+ send_keys(:enter)
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(page).to have_text(label.title)
+
+ within(labels_input_selector) do
+ find('[data-testid="close-icon"]').click
+ end
+
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(work_item.labels).not_to include(label)
+ end
+
+ it 'updates the labels in real-time' do
+ Capybara::Session.new(:other_session)
+
+ using_session :other_session do
+ visit work_items_path
+ expect(page).not_to have_text(label.title)
+ end
+
+ find(labels_input_selector).fill_in(with: label.title)
+ wait_for_requests
+
+ send_keys(:enter)
+ find(label_title_selector).click
+ wait_for_requests
+
+ expect(page).to have_text(label.title)
+
+ using_session :other_session do
+ wait_for_requests
+ expect(page).to have_text(label.title)
+ end
+ end
end
RSpec.shared_examples 'work items description' do
diff --git a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
index efbcfaf0e91..aa3a1d78df8 100644
--- a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
@@ -32,7 +32,7 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
let(:query) { GraphQL::Query.new(schema) }
let(:context) { query.context }
let(:field) { described_class.fields['image'] }
- let(:args) { GraphQL::Query::Arguments::NO_ARGS }
+ let(:args) { { parent: nil } }
let(:instance) { instantiate(object_id) }
let(:instance_b) { instantiate(object_id_b) }
@@ -42,13 +42,12 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
end
def resolve_image(instance)
- field.resolve_field(instance, args, context)
+ field.resolve(instance, args, context)
end
before do
context[:current_user] = current_user
allow(Ability).to receive(:allowed?).with(current_user, :read_design, anything).and_return(true)
- allow(context).to receive(:parent).and_return(nil)
end
it 'resolves to the design image URL' do
diff --git a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
index 64f811771ec..799f82a9ec5 100644
--- a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
@@ -14,9 +14,21 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
let(:user) { note.author }
context 'for regular notes' do
+ let!(:system_note) do
+ create(
+ :note,
+ system: true,
+ noteable: noteable,
+ project: (noteable.project if noteable.respond_to?(:project))
+ )
+ end
+
+ let(:filters) { "" }
+
let(:query) do
note_fields = <<~NOTES
- notes {
+ notes #{filters} {
+ count
edges {
node {
#{all_graphql_fields_for('Note', max_depth: 1)}
@@ -42,11 +54,12 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
end
end
- it 'includes the note' do
+ it 'includes all notes' do
post_graphql(query, current_user: user)
- expect(noteable_data['notes']['edges'].first['node']['body'])
- .to eq(note.note)
+ expect(noteable_data['notes']['count']).to eq(2)
+ expect(noteable_data['notes']['edges'][0]['node']['body']).to eq(system_note.note)
+ expect(noteable_data['notes']['edges'][1]['node']['body']).to eq(note.note)
end
it 'avoids N+1 queries' do
@@ -69,6 +82,42 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
expect { post_graphql(query, current_user: user) }.not_to exceed_query_limit(control)
expect_graphql_errors_to_be_empty
end
+
+ context 'when filter is provided' do
+ context 'when filter is set to ALL_NOTES' do
+ let(:filters) { "(filter: ALL_NOTES)" }
+
+ it 'returns all the notes' do
+ post_graphql(query, current_user: user)
+
+ expect(noteable_data['notes']['count']).to eq(2)
+ expect(noteable_data['notes']['edges'][0]['node']['body']).to eq(system_note.note)
+ expect(noteable_data['notes']['edges'][1]['node']['body']).to eq(note.note)
+ end
+ end
+
+ context 'when filter is set to ONLY_COMMENTS' do
+ let(:filters) { "(filter: ONLY_COMMENTS)" }
+
+ it 'returns only the comments' do
+ post_graphql(query, current_user: user)
+
+ expect(noteable_data['notes']['count']).to eq(1)
+ expect(noteable_data['notes']['edges'][0]['node']['body']).to eq(note.note)
+ end
+ end
+
+ context 'when filter is set to ONLY_ACTIVITY' do
+ let(:filters) { "(filter: ONLY_ACTIVITY)" }
+
+ it 'returns only the activity notes' do
+ post_graphql(query, current_user: user)
+
+ expect(noteable_data['notes']['count']).to eq(1)
+ expect(noteable_data['notes']['edges'][0]['node']['body']).to eq(system_note.note)
+ end
+ end
+ end
end
context "for discussions" do
diff --git a/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
index cef76bd4356..3119a03b1cb 100644
--- a/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
+++ b/spec/support/shared_examples/initializers/uses_gitlab_url_blocker_shared_examples.rb
@@ -31,29 +31,33 @@ RSpec.shared_examples 'a request using Gitlab::UrlBlocker' do
it 'raises error when it is a request that resolves to a local address' do
stub_full_request('https://example.com', method: http_method, ip_address: '172.16.0.0')
- expect { make_request('https://example.com') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to the local network are not allowed")
+ expect { make_request('https://example.com') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to the local network are not allowed"
+ )
end
it 'raises error when it is a request that resolves to a localhost address' do
stub_full_request('https://example.com', method: http_method, ip_address: '127.0.0.1')
- expect { make_request('https://example.com') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to localhost are not allowed")
+ expect { make_request('https://example.com') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to localhost are not allowed"
+ )
end
it 'raises error when it is a request to local address' do
- expect { make_request('http://172.16.0.0') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to the local network are not allowed")
+ expect { make_request('http://172.16.0.0') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to the local network are not allowed"
+ )
end
it 'raises error when it is a request to localhost address' do
- expect { make_request('http://127.0.0.1') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to localhost are not allowed")
+ expect { make_request('http://127.0.0.1') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to localhost are not allowed"
+ )
end
end
@@ -67,15 +71,17 @@ RSpec.shared_examples 'a request using Gitlab::UrlBlocker' do
end
it 'raises error when it is a request to local address' do
- expect { make_request('https://172.16.0.0:8080') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to the local network are not allowed")
+ expect { make_request('https://172.16.0.0:8080') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to the local network are not allowed"
+ )
end
it 'raises error when it is a request to localhost address' do
- expect { make_request('https://127.0.0.1:8080') }
- .to raise_error(url_blocked_error_class,
- "URL is blocked: Requests to localhost are not allowed")
+ expect { make_request('https://127.0.0.1:8080') }.to raise_error(
+ url_blocked_error_class,
+ "URL is blocked: Requests to localhost are not allowed"
+ )
end
end
diff --git a/spec/support/shared_examples/integrations/integration_settings_form.rb b/spec/support/shared_examples/integrations/integration_settings_form.rb
index 1d7f74837f2..c665f6a57f1 100644
--- a/spec/support/shared_examples/integrations/integration_settings_form.rb
+++ b/spec/support/shared_examples/integrations/integration_settings_form.rb
@@ -16,7 +16,7 @@ RSpec.shared_examples 'integration settings form' do
page.within('form.integration-settings-form') do
expect(page).to have_field('Active', type: 'checkbox', wait: 0),
- "#{integration.title} active field not present"
+ "#{integration.title} active field not present"
fields = parse_json(fields_for_integration(integration))
fields.each do |field|
@@ -24,7 +24,7 @@ RSpec.shared_examples 'integration settings form' do
field_name = field[:name]
expect(page).to have_field(field[:title], wait: 0),
- "#{integration.title} field #{field_name} not present"
+ "#{integration.title} field #{field_name} not present"
end
api_only_fields = integration.fields.select { _1[:api_only] }
@@ -43,7 +43,7 @@ RSpec.shared_examples 'integration settings form' do
end
expect(page).to have_field(trigger_title, type: 'checkbox', wait: 0),
- "#{integration.title} field #{trigger_title} checkbox not present"
+ "#{integration.title} field #{trigger_title} checkbox not present"
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 7cfab5c8295..0cc525d0575 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -113,7 +113,7 @@ RSpec.shared_examples 'common trace features' do
it "returns valid sections" do
expect(sections).not_to be_empty
expect(sections.size).to eq(sections_data.size),
- "expected #{sections_data.size} sections, got #{sections.size}"
+ "expected #{sections_data.size} sections, got #{sections.size}"
buff = StringIO.new(log)
sections.each_with_index do |s, i|
diff --git a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
index 286f10a186d..d1367bbe144 100644
--- a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
@@ -2,8 +2,11 @@
RSpec.shared_examples 'marks background migration job records' do
it 'marks each job record as succeeded after processing' do
- create(:background_migration_job, class_name: "::#{described_class.name.demodulize}",
- arguments: arguments)
+ create(
+ :background_migration_job,
+ class_name: "::#{described_class.name.demodulize}",
+ arguments: arguments
+ )
expect(::Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded).and_call_original
@@ -13,8 +16,11 @@ RSpec.shared_examples 'marks background migration job records' do
end
it 'returns the number of job records marked as succeeded' do
- create(:background_migration_job, class_name: "::#{described_class.name.demodulize}",
- arguments: arguments)
+ create(
+ :background_migration_job,
+ class_name: "::#{described_class.name.demodulize}",
+ arguments: arguments
+ )
jobs_updated = subject.perform(*arguments)
diff --git a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
index 0fef5269ab6..effa6a6f6f0 100644
--- a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
@@ -17,16 +17,18 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
context 'when there are remaining jobs' do
it 'reschedules itself' do
- expect(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({ '123' => 1 })
+ freeze_time do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage)
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage, Time.zone.now, 1)
- worker.perform(project.id, { '123' => 2 }, next_stage)
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
end
context 'when the project import is not running' do
@@ -74,6 +76,83 @@ RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
.to raise_error(KeyError)
end
end
+
+ context 'on worker timeouts' do
+ it 'refreshes timeout and updates counter if jobs have been processed' do
+ freeze_time do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 2 }, next_stage, Time.zone.now, 2)
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, 3.hours.ago, 5)
+ end
+ end
+
+ it 'converts string timeout argument to time' do
+ freeze_time do
+ expect_next_instance_of(described_class) do |klass|
+ expect(klass).to receive(:handle_timeout)
+ end
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, 3.hours.ago.to_s, 2)
+ end
+ end
+
+ context 'with an optimistic strategy' do
+ before do
+ project.build_or_assign_import_data(data: { timeout_strategy: "optimistic" })
+ project.save!
+ end
+
+ it 'advances to next stage' do
+ freeze_time do
+ next_worker = described_class::STAGES[next_stage]
+
+ expect(next_worker).to receive(:perform_async).with(project.id)
+
+ stuck_start_time = 3.hours.ago
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, stuck_start_time, 2)
+ end
+ end
+ end
+
+ context 'with a pessimistic strategy' do
+ let(:expected_error_message) { "Failing advance stage, timeout reached with pessimistic strategy" }
+
+ it 'logs error and fails import' do
+ freeze_time do
+ next_worker = described_class::STAGES[next_stage]
+
+ expect(next_worker).not_to receive(:perform_async).with(project.id)
+ expect_next_instance_of(described_class) do |klass|
+ expect(klass).to receive(:find_import_state).and_call_original
+ end
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ import_state: import_state,
+ exception: Gitlab::Import::AdvanceStage::AdvanceStageTimeoutError,
+ error_source: described_class.name,
+ fail_import: true
+ )
+ .and_call_original
+
+ stuck_start_time = 3.hours.ago
+
+ worker.perform(project.id, { '123' => 2 }, next_stage, stuck_start_time, 2)
+
+ expect(import_state.reload.status).to eq("failed")
+
+ if import_state.is_a?(ProjectImportState)
+ expect(import_state.reload.last_error).to eq(expected_error_message)
+ else
+ expect(import_state.reload.error_message).to eq(expected_error_message)
+ end
+ end
+ end
+ end
+ end
end
describe '#wait_for_jobs' do
diff --git a/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
index cef9860fe25..5c2f66e08db 100644
--- a/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
+++ b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb
@@ -3,7 +3,8 @@
RSpec.shared_examples 'a correct instrumented metric value' do |params|
let(:time_frame) { params[:time_frame] }
let(:options) { params[:options] }
- let(:metric) { described_class.new(time_frame: time_frame, options: options) }
+ let(:events) { params[:events] }
+ let(:metric) { described_class.new(time_frame: time_frame, options: options, events: events) }
around do |example|
freeze_time { example.run }
diff --git a/spec/support/shared_examples/migrations/swap_conversion_columns_shared_examples.rb b/spec/support/shared_examples/migrations/swap_conversion_columns_shared_examples.rb
new file mode 100644
index 00000000000..d333641b764
--- /dev/null
+++ b/spec/support/shared_examples/migrations/swap_conversion_columns_shared_examples.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+COLUMN_OPTIONS_TO_REMAIN =
+ %i[
+ null
+ serial?
+ collation
+ default
+ default_function
+ ].freeze
+
+SQL_TYPE_OPTIONS_TO_REMAIN =
+ %i[
+ precision
+ scale
+ ].freeze
+
+SQL_TYPE_OPTIONS_TO_CHANGE =
+ %i[
+ type
+ sql_type
+ limit
+ ].freeze
+
+RSpec.shared_examples 'swap conversion columns' do |table_name:, from:, to:|
+ it 'correctly swaps conversion columns' do
+ before_from_column = before_to_column = before_indexes = before_foreign_keys = nil
+ after_from_column = after_to_column = after_indexes = after_foreign_keys = nil
+
+ expect_column_type_is_changed_but_others_remain_unchanged = -> do
+ # SQL type is changed
+ SQL_TYPE_OPTIONS_TO_CHANGE.each do |sql_type_option|
+ expect(
+ after_from_column.sql_type_metadata.public_send(sql_type_option)
+ ).to eq(
+ before_to_column.sql_type_metadata.public_send(sql_type_option)
+ )
+
+ expect(
+ after_to_column.sql_type_metadata.public_send(sql_type_option)
+ ).to eq(
+ before_from_column.sql_type_metadata.public_send(sql_type_option)
+ )
+ end
+
+ # column metadata remains unchanged
+ COLUMN_OPTIONS_TO_REMAIN.each do |column_option|
+ expect(
+ after_from_column.public_send(column_option)
+ ).to eq(
+ before_from_column.public_send(column_option)
+ )
+
+ expect(
+ after_to_column.public_send(column_option)
+ ).to eq(
+ before_to_column.public_send(column_option)
+ )
+ end
+
+ SQL_TYPE_OPTIONS_TO_REMAIN.each do |sql_type_option|
+ expect(
+ after_from_column.sql_type_metadata.public_send(sql_type_option)
+ ).to eq(
+ before_from_column.sql_type_metadata.public_send(sql_type_option)
+ )
+
+ expect(
+ after_to_column.sql_type_metadata.public_send(sql_type_option)
+ ).to eq(
+ before_to_column.sql_type_metadata.public_send(sql_type_option)
+ )
+ end
+
+ # indexes remain unchanged
+ expect(before_indexes).to eq(after_indexes)
+
+ # foreign keys remain unchanged
+ expect(before_foreign_keys).to eq(after_foreign_keys)
+ end
+
+ find_column_by = ->(name) do
+ active_record_base.connection.columns(table_name).find { |c| c.name == name.to_s }
+ end
+
+ find_indexes = -> do
+ active_record_base.connection.indexes(table_name)
+ end
+
+ find_foreign_keys = -> do
+ Gitlab::Database::PostgresForeignKey.by_constrained_table_name(table_name)
+ end
+
+ reversible_migration do |migration|
+ migration.before -> {
+ before_from_column = find_column_by.call(from)
+ before_to_column = find_column_by.call(to)
+ before_indexes = find_indexes
+ before_foreign_keys = find_foreign_keys
+
+ next if after_from_column.nil?
+
+ # For migrate down
+ expect_column_type_is_changed_but_others_remain_unchanged.call
+ }
+
+ migration.after -> {
+ after_from_column = find_column_by.call(from)
+ after_to_column = find_column_by.call(to)
+ after_indexes = find_indexes
+ after_foreign_keys = find_foreign_keys
+
+ expect_column_type_is_changed_but_others_remain_unchanged.call
+ }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index 0ce54fbc31f..0ff2c135972 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples "chat integration" do |integration_name|
+RSpec.shared_examples "chat integration" do |integration_name, supports_deployments: false|
describe "Associations" do
it { is_expected.to belong_to :project }
end
@@ -26,8 +26,14 @@ RSpec.shared_examples "chat integration" do |integration_name|
end
describe '.supported_events' do
- it 'does not support deployment_events' do
- expect(described_class.supported_events).not_to include('deployment')
+ if supports_deployments
+ it 'supports deployment_events' do
+ expect(described_class.supported_events).to include('deployment')
+ end
+ else
+ it 'does not support deployment_events' do
+ expect(described_class.supported_events).not_to include('deployment')
+ end
end
end
@@ -375,7 +381,47 @@ RSpec.shared_examples "chat integration" do |integration_name|
let(:sample_data) { Gitlab::DataBuilder::Deployment.build(deployment, deployment.status, Time.now) }
- it_behaves_like "untriggered #{integration_name} integration"
+ if supports_deployments
+ it_behaves_like "triggered #{integration_name} integration"
+ else
+ it_behaves_like "untriggered #{integration_name} integration"
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'supports group mentions' do |integration_factory|
+ it 'supports group mentions' do
+ allow(subject).to receive(:webhook).and_return('http://example.com')
+ allow(subject).to receive(:group_id).and_return(1)
+ expect(subject).to receive(:notify).with(an_instance_of(Integrations::ChatMessage::GroupMentionMessage), {})
+
+ subject.execute(
+ object_kind: 'group_mention',
+ object_attributes: { action: 'new', object_kind: 'issue' },
+ mentioned: { name: 'John Doe', url: 'http://example.com' }
+ )
+ end
+
+ describe '#supported_events' do
+ context 'when used in a project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:integration) { build(integration_factory, project: project) }
+
+ it 'does not support group mentions', :aggregate_failures do
+ expect(integration.supported_events).not_to include('group_mention')
+ expect(integration.supported_events).not_to include('group_confidential_mention')
+ end
+ end
+
+ context 'when used in a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:integration) { build(integration_factory, group: group) }
+
+ it 'supports group mentions', :aggregate_failures do
+ expect(integration.supported_events).to include('group_mention')
+ expect(integration.supported_events).to include('group_confidential_mention')
+ end
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/protected_ref_access_examples.rb b/spec/support/shared_examples/models/concerns/protected_ref_access_examples.rb
index 0e9200f1fd9..bb438b0082f 100644
--- a/spec/support/shared_examples/models/concerns/protected_ref_access_examples.rb
+++ b/spec/support/shared_examples/models/concerns/protected_ref_access_examples.rb
@@ -52,7 +52,11 @@ RSpec.shared_examples 'protected ref access' do |association|
end
describe '#check_access' do
+ let_it_be(:group) { create(:group) }
+ # Making a project public to avoid false positives tests
+ let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:current_user) { create(:user) }
+ let_it_be(:protected_ref) { create(association, project: project) }
let(:access_level) { ::Gitlab::Access::DEVELOPER }
@@ -71,6 +75,47 @@ RSpec.shared_examples 'protected ref access' do |association|
it { expect(subject.check_access(nil)).to eq(false) }
end
+ context 'when current_user access exists without membership' do
+ let(:other_user) { create(:user) }
+ let(:user_access) do
+ described_class.new(association => protected_ref, access_level: access_level, user_id: other_user.id)
+ end
+
+ let(:enable_ff) { false }
+
+ before do
+ stub_feature_flags(check_membership_in_protected_ref_access: enable_ff)
+ end
+
+ it 'does not check membership if check_membership_in_protected_ref_access FF is disabled' do
+ expect(project).not_to receive(:member?).with(other_user)
+
+ user_access.check_access(other_user)
+ end
+
+ context 'when check_membership_in_protected_ref_access FF is enabled' do
+ let(:enable_ff) { true }
+
+ it 'does check membership' do
+ expect(project).to receive(:member?).with(other_user)
+
+ user_access.check_access(other_user)
+ end
+
+ it 'returns false' do
+ expect(user_access.check_access(other_user)).to be_falsey
+ end
+
+ context 'when user has inherited membership' do
+ let!(:inherited_membership) { create(:group_member, group: group, user: other_user) }
+
+ it do
+ expect(user_access.check_access(other_user)).to be_truthy
+ end
+ end
+ end
+ end
+
context 'when access_level is NO_ACCESS' do
let(:access_level) { ::Gitlab::Access::NO_ACCESS }
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index 3f1588c46b3..a9a13ddcd60 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -71,14 +71,21 @@ RSpec.shared_examples 'handles repository moves' do
end
context 'when the transition fails' do
- it 'does not trigger the corresponding repository storage worker and adds an error' do
+ before do
allow(storage_move.container).to receive(:set_repository_read_only!).and_raise(StandardError, 'foobar')
- expect(repository_storage_worker).not_to receive(:perform_async)
+ end
+ it 'does not trigger the corresponding repository storage worker and adds an error' do
+ expect(repository_storage_worker).not_to receive(:perform_async)
storage_move.schedule!
-
expect(storage_move.errors[error_key]).to include('foobar')
end
+
+ it 'sets the state to failed' do
+ expect(storage_move).to receive(:do_fail!).and_call_original
+ storage_move.schedule!
+ expect(storage_move.state_name).to eq(:failed)
+ end
end
end
diff --git a/spec/support/shared_examples/models/issuable_link_shared_examples.rb b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
index af96b77edaf..f28abb35128 100644
--- a/spec/support/shared_examples/models/issuable_link_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
@@ -52,6 +52,45 @@ RSpec.shared_examples 'issuable link' do
end
end
+ context 'when max number of links is exceeded' do
+ subject(:link) { create_issuable_link(issuable, issuable2) }
+
+ shared_examples 'invalid due to exceeding max number of links' do
+ let(:stubbed_limit) { 1 }
+ let(:issuable_name) { described_class.issuable_name }
+ let(:error_msg) do
+ "This #{issuable_name} would exceed the maximum number of " \
+ "linked #{issuable_name.pluralize} (#{stubbed_limit})."
+ end
+
+ before do
+ create(issuable_link_factory, source: source, target: target)
+ stub_const("IssuableLink::MAX_LINKS_COUNT", stubbed_limit)
+ end
+
+ specify do
+ is_expected.to be_invalid
+ expect(link.errors.messages[error_item]).to include(error_msg)
+ end
+ end
+
+ context 'when source exceeds max' do
+ let(:source) { issuable }
+ let(:target) { issuable3 }
+ let(:error_item) { :source }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+
+ context 'when target exceeds max' do
+ let(:source) { issuable2 }
+ let(:target) { issuable3 }
+ let(:error_item) { :target }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+ end
+
def create_issuable_link(source, target)
build(issuable_link_factory, source: source, target: target)
end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index e9e25dee746..731500c4510 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -486,59 +486,6 @@ RSpec.shared_examples_for "bulk member creation" do
end.to change { Member.count }.by(2)
end
end
-
- context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
- let(:task_project) { source.is_a?(Group) ? create(:project, group: source) : source }
-
- it 'creates a member_task with the correct attributes', :aggregate_failures do
- members = described_class.add_members(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id)
- member = members.last
-
- expect(member.tasks_to_be_done).to match_array([:ci, :code])
- expect(member.member_task.project).to eq(task_project)
- end
-
- context 'with an already existing member' do
- before do
- source.add_member(user1, :developer)
- end
-
- it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
- member = source.members.find_by(user_id: user1.id)
- create(:member_task, member: member, project: task_project, tasks_to_be_done: %w(code ci))
-
- expect do
- described_class.add_members(
- source,
- [user1.id],
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id
- )
- end.not_to change { MemberTask.count }
-
- member.reset
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project).to eq(task_project)
- end
-
- it 'adds tasks to be done if they do not exist', :aggregate_failures do
- expect do
- described_class.add_members(
- source,
- [user1.id],
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id
- )
- end.to change { MemberTask.count }.by(1)
-
- member = source.members.find_by(user_id: user1.id)
- expect(member.tasks_to_be_done).to match_array([:issues])
- expect(member.member_task.project).to eq(task_project)
- end
- end
- end
end
RSpec.shared_examples 'owner management' do
diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index 4dff4f68995..960160395f8 100644
--- a/spec/support/shared_examples/namespaces/traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -240,14 +240,6 @@ RSpec.shared_examples 'namespace traversal' do
describe '#ancestors_upto' do
include_examples '#ancestors_upto'
-
- context 'with use_traversal_ids disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- include_examples '#ancestors_upto'
- end
end
describe '#descendants' do
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index b308295b5fb..637068c5c8a 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -70,28 +70,10 @@ RSpec.shared_examples 'namespace traversal scopes' do
end
describe '.roots' do
- context "use_traversal_ids feature flag is true" do
- before do
- stub_feature_flags(use_traversal_ids: true)
- end
-
- it_behaves_like '.roots'
-
- it 'not make recursive queries' do
- expect { described_class.where(id: [nested_group_1]).roots.load }.not_to make_queries_matching(/WITH RECURSIVE/)
- end
- end
+ it_behaves_like '.roots'
- context "use_traversal_ids feature flag is false" do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like '.roots'
-
- it 'makes recursive queries' do
- expect { described_class.where(id: [nested_group_1]).roots.load }.to make_queries_matching(/WITH RECURSIVE/)
- end
+ it 'not make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).roots.load }.not_to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -263,7 +245,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendant_ids'
end
- shared_examples '.self_and_hierarchy' do
+ describe '.self_and_hierarchy' do
let(:base_scope) { Group.where(id: base_groups) }
subject { base_scope.self_and_hierarchy }
@@ -292,21 +274,4 @@ RSpec.shared_examples 'namespace traversal scopes' do
it { is_expected.to contain_exactly(group_1, nested_group_1, deep_nested_group_1) }
end
end
-
- describe '.self_and_hierarchy' do
- it_behaves_like '.self_and_hierarchy'
-
- context "use_traversal_ids_for_self_and_hierarchy_scopes feature flag is false" do
- before do
- stub_feature_flags(use_traversal_ids_for_self_and_hierarchy_scopes: false)
- end
-
- it_behaves_like '.self_and_hierarchy'
-
- it 'makes recursive queries' do
- base_groups = Group.where(id: nested_group_1)
- expect { base_groups.self_and_hierarchy.load }.to make_queries_matching(/WITH RECURSIVE/)
- end
- end
- end
end
diff --git a/spec/support/shared_examples/observability/embed_observabilities_examples.rb b/spec/support/shared_examples/observability/embed_observabilities_examples.rb
deleted file mode 100644
index c8d4e9e0d7e..00000000000
--- a/spec/support/shared_examples/observability/embed_observabilities_examples.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'embeds observability' do
- it 'renders iframe in description' do
- page.within('.description') do
- expect_observability_iframe(page.html)
- end
- end
-
- it 'renders iframe in comment' do
- expect(page).not_to have_css('.note-text')
-
- page.within('.js-main-target-form') do
- fill_in('note[note]', with: observable_url)
- click_button('Comment')
- end
-
- wait_for_requests
-
- page.within('.note-text') do
- expect_observability_iframe(page.html)
- end
- end
-end
-
-RSpec.shared_examples 'does not embed observability' do
- it 'does not render iframe in description' do
- page.within('.description') do
- expect_observability_iframe(page.html, to_be_nil: true)
- end
- end
-
- it 'does not render iframe in comment' do
- expect(page).not_to have_css('.note-text')
-
- page.within('.js-main-target-form') do
- fill_in('note[note]', with: observable_url)
- click_button('Comment')
- end
-
- wait_for_requests
-
- page.within('.note-text') do
- expect_observability_iframe(page.html, to_be_nil: true)
- end
- end
-end
-
-def expect_observability_iframe(html, to_be_nil: false)
- iframe = Nokogiri::HTML.parse(html).at_css('#observability-ui-iframe')
-
- expect(html).to include(observable_url)
-
- if to_be_nil
- expect(iframe).to be_nil
- else
- expect(iframe).not_to be_nil
- iframe_src = "#{expected_observable_url}&theme=light&username=#{user.username}&kiosk=inline-embed"
- expect(iframe.attributes['src'].value).to eq(iframe_src)
- end
-end
diff --git a/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb b/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb
deleted file mode 100644
index d196114b227..00000000000
--- a/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'additional metrics query' do
- include Prometheus::MetricBuilders
-
- let(:metric_group_class) { Gitlab::Prometheus::MetricGroup }
- let(:metric_class) { Gitlab::Prometheus::Metric }
-
- let(:metric_names) { %w[metric_a metric_b] }
-
- let(:query_range_result) do
- [{ metric: {}, values: [[1488758662.506, '0.00002996364761904785'], [1488758722.506, '0.00003090239047619091']] }]
- end
-
- let(:client) { instance_double('Gitlab::PrometheusClient') }
- let(:query_result) { described_class.new(client).query(*query_params) }
- let(:project) { create(:project, :repository) }
- let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
-
- before do
- allow(client).to receive(:label_values).and_return(metric_names)
- allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])])
- end
-
- describe 'metrics query context' do
- subject! { described_class.new(client) }
-
- shared_examples 'query context containing environment slug and filter' do
- it 'contains ci_environment_slug' do
- expect(subject)
- .to receive(:query_metrics).with(project, environment, hash_including(ci_environment_slug: environment.slug))
-
- subject.query(*query_params)
- end
-
- it 'contains environment filter' do
- expect(subject).to receive(:query_metrics).with(
- project,
- environment,
- hash_including(
- environment_filter: "container_name!=\"POD\",environment=\"#{environment.slug}\""
- )
- )
-
- subject.query(*query_params)
- end
- end
-
- describe 'project has Kubernetes service' do
- context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
- let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
- let(:kube_namespace) { environment.deployment_namespace }
-
- it_behaves_like 'query context containing environment slug and filter'
-
- it 'query context contains kube_namespace' do
- expect(subject)
- .to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: kube_namespace))
-
- subject.query(*query_params)
- end
- end
- end
-
- describe 'project without Kubernetes service' do
- it_behaves_like 'query context containing environment slug and filter'
-
- it 'query context contains empty kube_namespace' do
- expect(subject).to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: ''))
-
- subject.query(*query_params)
- end
- end
- end
-
- context 'with one group where two metrics is found' do
- before do
- allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
- end
-
- context 'when some queries return results' do
- before do
- allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
- allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result)
- allow(client).to receive(:query_range).with('query_range_empty', any_args).and_return([])
- end
-
- it 'return group data only for queries with results' do
- expected = [
- {
- group: 'name',
- priority: 1,
- metrics: [
- {
- title: 'title', weight: 1, y_label: 'Values', queries: [
- { query_range: 'query_range_a', result: query_range_result },
- { query_range: 'query_range_b', label: 'label', unit: 'unit', result: query_range_result }
- ]
- }
- ]
- }
- ]
-
- expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result')
- expect(query_result).to eq(expected)
- end
- end
- end
-
- context 'with two groups with one metric each' do
- let(:metrics) { [simple_metric(queries: [simple_query])] }
-
- before do
- allow(metric_group_class).to receive(:common_metrics).and_return(
- [
- simple_metric_group(name: 'group_a', metrics: [simple_metric(queries: [simple_query])]),
- simple_metric_group(name: 'group_b', metrics: [simple_metric(title: 'title_b', queries: [simple_query('b')])])
- ])
- allow(client).to receive(:label_values).and_return(metric_names)
- end
-
- context 'when both queries return results' do
- before do
- allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
- allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result)
- end
-
- it 'return group data both queries' do
- queries_with_result_a = { queries: [{ query_range: 'query_range_a', result: query_range_result }] }
- queries_with_result_b = { queries: [{ query_range: 'query_range_b', result: query_range_result }] }
-
- expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result')
-
- expect(query_result.count).to eq(2)
- expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 })
-
- expect(query_result[0][:metrics].first).to include(queries_with_result_a)
- expect(query_result[1][:metrics].first).to include(queries_with_result_b)
- end
- end
-
- context 'when one query returns result' do
- before do
- allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
- allow(client).to receive(:query_range).with('query_range_b', any_args).and_return([])
- end
-
- it 'return group data only for query with results' do
- queries_with_result = { queries: [{ query_range: 'query_range_a', result: query_range_result }] }
-
- expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result')
-
- expect(query_result.count).to eq(1)
- expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 })
-
- expect(query_result.first[:metrics].first).to include(queries_with_result)
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
index 4b27f1f2520..7cbaf40721a 100644
--- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
@@ -2,7 +2,6 @@
RSpec.shared_examples 'close quick action' do |issuable_type|
include Features::NotesHelpers
- include ContentEditorHelpers
before do
project.add_maintainer(maintainer)
@@ -77,7 +76,6 @@ RSpec.shared_examples 'close quick action' do |issuable_type|
context "preview of note on #{issuable_type}", :js do
it 'explains close quick action' do
visit public_send("project_#{issuable_type}_path", project, issuable)
- close_rich_text_promo_popover_if_present
preview_note("this is done, close\n/close") do
expect(page).not_to have_content '/close'
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 23ec4a632b7..1270efd4701 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -379,6 +379,24 @@ RSpec.shared_examples "redis_shared_examples" do
}
end
+ let(:resque_yaml_config_with_only_cert) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ cert_file: '/tmp/client.crt'
+ }
+ }
+ end
+
+ let(:resque_yaml_config_with_only_key) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ key_file: '/tmp/client.key'
+ }
+ }
+ end
+
let(:parsed_config_with_tls) do
{
url: 'rediss://localhost:6380',
@@ -389,6 +407,24 @@ RSpec.shared_examples "redis_shared_examples" do
}
end
+ let(:parsed_config_with_only_cert) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ cert: dummy_certificate
+ }
+ }
+ end
+
+ let(:parsed_config_with_only_key) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ key: dummy_key
+ }
+ }
+ end
+
before do
allow(::File).to receive(:exist?).and_call_original
allow(::File).to receive(:read).and_call_original
@@ -433,6 +469,34 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ context 'when only certificate file is specified' do
+ before do
+ allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(true)
+ allow(::File).to receive(:read).with("/tmp/client.crt").and_return("DUMMY_CERTIFICATE")
+ allow(OpenSSL::X509::Certificate).to receive(:new).with("DUMMY_CERTIFICATE").and_return(dummy_certificate)
+ allow(::File).to receive(:exist?).with("/tmp/client.key").and_return(false)
+ end
+
+ it 'renders resque.yml correctly' do
+ expect(subject.send(:parse_client_tls_options,
+ resque_yaml_config_with_only_cert)).to eq(parsed_config_with_only_cert)
+ end
+ end
+
+ context 'when only key file is specified' do
+ before do
+ allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(false)
+ allow(::File).to receive(:exist?).with("/tmp/client.key").and_return(true)
+ allow(::File).to receive(:read).with("/tmp/client.key").and_return("DUMMY_KEY")
+ allow(OpenSSL::PKey).to receive(:read).with("DUMMY_KEY").and_return(dummy_key)
+ end
+
+ it 'renders resque.yml correctly' do
+ expect(subject.send(:parse_client_tls_options,
+ resque_yaml_config_with_only_key)).to eq(parsed_config_with_only_key)
+ end
+ end
+
context 'when configuration valid TLS related options' do
before do
allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(true)
diff --git a/spec/support/shared_examples/ref_extraction_shared_examples.rb b/spec/support/shared_examples/ref_extraction_shared_examples.rb
new file mode 100644
index 00000000000..f51c3a16406
--- /dev/null
+++ b/spec/support/shared_examples/ref_extraction_shared_examples.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'extracts ref vars' do
+ describe '#extract!' do
+ context 'when ref contains %20' do
+ let(:ref) { 'foo%20bar' }
+
+ it 'is not converted to a space in @id' do
+ container.repository.add_branch(owner, 'foo%20bar', 'master')
+
+ ref_extractor.extract!
+
+ expect(ref_extractor.id).to start_with('foo%20bar/')
+ end
+ end
+
+ context 'when ref contains trailing space' do
+ let(:ref) { 'master ' }
+
+ it 'strips surrounding space' do
+ ref_extractor.extract!
+
+ expect(ref_extractor.ref).to eq('master')
+ end
+ end
+
+ context 'when ref contains leading space' do
+ let(:ref) { ' master ' }
+
+ it 'strips surrounding space' do
+ ref_extractor.extract!
+
+ expect(ref_extractor.ref).to eq('master')
+ end
+ end
+
+ context 'when path contains space' do
+ let(:ref) { '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' }
+ let(:path) { 'with space' }
+
+ it 'is not converted to %20 in @path' do
+ ref_extractor.extract!
+
+ expect(ref_extractor.path).to eq(path)
+ end
+ end
+
+ context 'when override_id is given' do
+ let(:ref_extractor) do
+ described_class.new(container, params, override_id: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e')
+ end
+
+ it 'uses override_id' do
+ ref_extractor.extract!
+
+ expect(ref_extractor.id).to eq('38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e')
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'extracts ref method' do
+ describe '#extract_ref' do
+ it 'returns an empty pair when no repository_container is set' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:repository_container).and_return(nil)
+ end
+ expect(ref_extractor.extract_ref('master/CHANGELOG')).to eq(['', ''])
+ end
+
+ context 'without a path' do
+ it 'extracts a valid branch' do
+ expect(ref_extractor.extract_ref('master')).to eq(['master', ''])
+ end
+
+ it 'extracts a valid tag' do
+ expect(ref_extractor.extract_ref('v2.0.0')).to eq(['v2.0.0', ''])
+ end
+
+ it 'extracts a valid commit ref' do
+ expect(ref_extractor.extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062')).to eq(
+ ['f4b14494ef6abf3d144c28e4af0c20143383e062', '']
+ )
+ end
+
+ it 'falls back to a primitive split for an invalid ref' do
+ expect(ref_extractor.extract_ref('stable')).to eq(['stable', ''])
+ end
+
+ it 'does not fetch ref names when there is no slash' do
+ expect(ref_extractor).not_to receive(:ref_names)
+
+ ref_extractor.extract_ref('master')
+ end
+
+ it 'fetches ref names when there is a slash' do
+ expect(ref_extractor).to receive(:ref_names).and_call_original
+
+ ref_extractor.extract_ref('release/app/v1.0.0')
+ end
+ end
+
+ context 'with a path' do
+ it 'extracts a valid branch' do
+ expect(ref_extractor.extract_ref('foo/bar/baz/CHANGELOG')).to eq(
+ ['foo/bar/baz', 'CHANGELOG'])
+ end
+
+ it 'extracts a valid tag' do
+ expect(ref_extractor.extract_ref('v2.0.0/CHANGELOG')).to eq(['v2.0.0', 'CHANGELOG'])
+ end
+
+ it 'extracts a valid commit SHA' do
+ expect(ref_extractor.extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG')).to eq(
+ %w[f4b14494ef6abf3d144c28e4af0c20143383e062 CHANGELOG]
+ )
+ end
+
+ it 'falls back to a primitive split for an invalid ref' do
+ expect(ref_extractor.extract_ref('stable/CHANGELOG')).to eq(%w[stable CHANGELOG])
+ end
+
+ it 'extracts the longest matching ref' do
+ expect(ref_extractor.extract_ref('release/app/v1.0.0/README.md')).to eq(
+ ['release/app/v1.0.0', 'README.md'])
+ end
+
+ context 'when the repository does not have ambiguous refs' do
+ before do
+ allow(container.repository).to receive(:has_ambiguous_refs?).and_return(false)
+ end
+
+ it 'does not fetch all ref names when the first path component is a ref' do
+ expect(ref_extractor).not_to receive(:ref_names)
+ expect(container.repository).to receive(:branch_names_include?).with('v1.0.0').and_return(false)
+ expect(container.repository).to receive(:tag_names_include?).with('v1.0.0').and_return(true)
+
+ expect(ref_extractor.extract_ref('v1.0.0/doc/README.md')).to eq(['v1.0.0', 'doc/README.md'])
+ end
+
+ it 'fetches all ref names when the first path component is not a ref' do
+ expect(ref_extractor).to receive(:ref_names).and_call_original
+ expect(container.repository).to receive(:branch_names_include?).with('release').and_return(false)
+ expect(container.repository).to receive(:tag_names_include?).with('release').and_return(false)
+
+ expect(ref_extractor.extract_ref('release/app/doc/README.md')).to eq(['release/app', 'doc/README.md'])
+ end
+ end
+
+ context 'when the repository has ambiguous refs' do
+ before do
+ allow(container.repository).to receive(:has_ambiguous_refs?).and_return(true)
+ end
+
+ it 'always fetches all ref names' do
+ expect(ref_extractor).to receive(:ref_names).and_call_original
+ expect(container.repository).not_to receive(:branch_names_include?)
+ expect(container.repository).not_to receive(:tag_names_include?)
+
+ expect(ref_extractor.extract_ref('v1.0.0/doc/README.md')).to eq(['v1.0.0', 'doc/README.md'])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
index 6a77de4266f..7e0efd05dd7 100644
--- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
@@ -1,42 +1,45 @@
# frozen_string_literal: true
-RSpec.shared_context 'Composer user type' do |user_type, add_member|
+RSpec.shared_context 'Composer user type' do |member_role: nil|
before do
- group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ if member_role
+ group.send("add_#{member_role}", user)
+ project.send("add_#{member_role}", user)
+ end
end
end
-RSpec.shared_examples 'Composer package index with version' do |schema_path|
+RSpec.shared_examples 'Composer package index with version' do |schema_path, expected_status|
it 'returns the package index' do
subject
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(expected_status)
- if status == :success
+ if expected_status == :success
expect(response).to match_response_schema(schema_path)
expect(json_response).to eq presenter.root
end
end
end
-RSpec.shared_examples 'Composer package index' do |user_type, status, add_member, include_package|
- include_context 'Composer user type', user_type, add_member do
- let(:expected_packages) { include_package == :include_package ? [package] : [] }
- let(:presenter) { ::Packages::Composer::PackagesPresenter.new(group, expected_packages ) }
+RSpec.shared_examples 'Composer package index' do |member_role:, expected_status:, package_returned:|
+ include_context 'Composer user type', member_role: member_role do
+ let_it_be(:expected_packages) { package_returned ? [package] : [] }
+ let_it_be(:presenter) { ::Packages::Composer::PackagesPresenter.new(group, expected_packages ) }
- it_behaves_like 'Composer package index with version', 'public_api/v4/packages/composer/index'
+ it_behaves_like 'Composer package index with version', 'public_api/v4/packages/composer/index', expected_status
context 'with version 2' do
+ let_it_be(:presenter) { ::Packages::Composer::PackagesPresenter.new(group, expected_packages, true ) }
let(:headers) { super().merge('User-Agent' => 'Composer/2.0.9 (Darwin; 19.6.0; PHP 7.4.8; cURL 7.71.1)') }
- it_behaves_like 'Composer package index with version', 'public_api/v4/packages/composer/index_v2'
+ it_behaves_like 'Composer package index with version', 'public_api/v4/packages/composer/index_v2', expected_status
end
end
end
-RSpec.shared_examples 'Composer empty provider index' do |user_type, status, add_member = true|
- include_context 'Composer user type', user_type, add_member do
+RSpec.shared_examples 'Composer empty provider index' do |member_role:, expected_status:|
+ include_context 'Composer user type', member_role: member_role do
it 'returns the package index' do
subject
@@ -47,24 +50,24 @@ RSpec.shared_examples 'Composer empty provider index' do |user_type, status, add
end
end
-RSpec.shared_examples 'Composer provider index' do |user_type, status, add_member = true|
- include_context 'Composer user type', user_type, add_member do
+RSpec.shared_examples 'Composer provider index' do |member_role:, expected_status:|
+ include_context 'Composer user type', member_role: member_role do
it 'returns the package index' do
subject
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(expected_status)
expect(response).to match_response_schema('public_api/v4/packages/composer/provider')
expect(json_response['providers']).to include(package.name)
end
end
end
-RSpec.shared_examples 'Composer package api request' do |user_type, status, add_member = true|
- include_context 'Composer user type', user_type, add_member do
+RSpec.shared_examples 'Composer package api request' do |member_role:, expected_status:|
+ include_context 'Composer user type', member_role: member_role do
it 'returns the package index' do
subject
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(expected_status)
expect(response).to match_response_schema('public_api/v4/packages/composer/package')
expect(json_response['packages']).to include(package.name)
expect(json_response['packages'][package.name]).to include(package.version)
@@ -72,18 +75,13 @@ RSpec.shared_examples 'Composer package api request' do |user_type, status, add_
end
end
-RSpec.shared_examples 'Composer package creation' do |user_type, status, add_member = true|
- context "for user type #{user_type}" do
- before do
- group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
- end
-
+RSpec.shared_examples 'Composer package creation' do |expected_status:, member_role: nil|
+ include_context 'Composer user type', member_role: member_role do
it 'creates package files' do
expect { subject }
.to change { project.packages.composer.count }.by(1)
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(expected_status)
end
it_behaves_like 'a package tracking event', described_class.name, 'push_package'
@@ -100,42 +98,38 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
end
end
-RSpec.shared_examples 'process Composer api request' do |user_type, status, add_member = true|
- context "for user type #{user_type}" do
- before do
- group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
- project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
- end
-
- it_behaves_like 'returning response status', status
- it_behaves_like 'bumping the package last downloaded at field' if status == :success
+RSpec.shared_examples 'process Composer api request' do |expected_status:, member_role: nil, **extra|
+ include_context 'Composer user type', member_role: member_role do
+ it_behaves_like 'returning response status', expected_status
+ it_behaves_like 'bumping the package last downloaded at field' if expected_status == :success
end
end
-RSpec.shared_context 'Composer auth headers' do |user_role, user_token, auth_method = :token|
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
-
+RSpec.shared_context 'Composer auth headers' do |token_type:, valid_token:, auth_method: :token|
let(:headers) do
- if user_role == :anonymous
- {}
- elsif auth_method == :token
- { 'Private-Token' => token }
+ if token_type == :user
+ token = valid_token ? personal_access_token.token : 'wrong'
+ auth_method == :token ? { 'Private-Token' => token } : basic_auth_header(user.username, token)
+ elsif token_type == :job && valid_token
+ auth_method == :token ? { 'Job-Token' => job.token } : job_basic_auth_header(job)
else
- basic_auth_header(user.username, token)
+ {} # Anonymous user
end
end
end
-RSpec.shared_context 'Composer api project access' do |project_visibility_level, user_role, user_token, auth_method|
- include_context 'Composer auth headers', user_role, user_token, auth_method do
+RSpec.shared_context 'Composer api project access' do |auth_method:, project_visibility_level:, token_type:,
+ valid_token: true|
+ include_context 'Composer auth headers', auth_method: auth_method, token_type: token_type, valid_token: valid_token do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
end
end
-RSpec.shared_context 'Composer api group access' do |project_visibility_level, user_role, user_token|
- include_context 'Composer auth headers', user_role, user_token do
+RSpec.shared_context 'Composer api group access' do |auth_method:, project_visibility_level:, token_type:,
+ valid_token: true|
+ include_context 'Composer auth headers', auth_method: auth_method, token_type: token_type, valid_token: valid_token do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
@@ -148,13 +142,13 @@ RSpec.shared_examples 'rejects Composer access with unknown group id' do
let(:group) { double(id: non_existing_record_id) }
context 'as anonymous' do
- it_behaves_like 'process Composer api request', :anonymous, :not_found
+ it_behaves_like 'process Composer api request', expected_status: :unauthorized
end
context 'as authenticated user' do
subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process Composer api request', :anonymous, :not_found
+ it_behaves_like 'process Composer api request', expected_status: :not_found
end
end
end
@@ -164,13 +158,13 @@ RSpec.shared_examples 'rejects Composer access with unknown project id' do
let(:project) { double(id: non_existing_record_id) }
context 'as anonymous' do
- it_behaves_like 'process Composer api request', :anonymous, :unauthorized
+ it_behaves_like 'process Composer api request', expected_status: :unauthorized
end
context 'as authenticated user' do
subject { get api(url), params: params, headers: basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process Composer api request', :anonymous, :not_found
+ it_behaves_like 'process Composer api request', expected_status: :not_found
end
end
end
@@ -191,7 +185,7 @@ RSpec.shared_examples 'Composer access with deploy tokens' do
context 'invalid token' do
let(:headers) { basic_auth_header(deploy_token.username, 'bar') }
- it_behaves_like 'returning response status', :not_found
+ it_behaves_like 'returning response status', :unauthorized
end
end
end
diff --git a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
index 04f340fef37..c6e4aba6968 100644
--- a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
@@ -408,28 +408,6 @@ RSpec.shared_examples 'graphql issue list request spec' do
include_examples 'N+1 query check'
end
- context 'when requesting participants' do
- let(:search_params) { { iids: [issue_a.iid.to_s, issue_c.iid.to_s] } }
- let(:requested_fields) { 'participants { nodes { name } }' }
-
- before do
- create(:award_emoji, :upvote, awardable: issue_a)
- create(:award_emoji, :upvote, awardable: issue_b)
- create(:award_emoji, :upvote, awardable: issue_c)
-
- note_with_emoji_a = create(:note_on_issue, noteable: issue_a, project: issue_a.project)
- note_with_emoji_b = create(:note_on_issue, noteable: issue_b, project: issue_b.project)
- note_with_emoji_c = create(:note_on_issue, noteable: issue_c, project: issue_c.project)
-
- create(:award_emoji, :upvote, awardable: note_with_emoji_a)
- create(:award_emoji, :upvote, awardable: note_with_emoji_b)
- create(:award_emoji, :upvote, awardable: note_with_emoji_c)
- end
-
- # Executes 3 extra queries to fetch participant_attrs
- include_examples 'N+1 query check', threshold: 3
- end
-
context 'when requesting labels', :use_sql_query_cache do
let(:requested_fields) { 'labels { nodes { id } }' }
let(:extra_iid_for_second_query) { same_project_issue2.iid.to_s }
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index 5f043cdd996..a4091d6bceb 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -68,22 +68,22 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
nil | :unscoped | false | :public | nil | :accept | :ok
nil | :non_existing | true | :public | nil | :redirect | :redirected
nil | :non_existing | false | :public | nil | :reject | :not_found
- nil | :scoped_naming_convention | true | :private | nil | :reject | :not_found
- nil | :scoped_naming_convention | false | :private | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | true | :private | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | false | :private | nil | :reject | :not_found
- nil | :unscoped | true | :private | nil | :reject | :not_found
- nil | :unscoped | false | :private | nil | :reject | :not_found
+ nil | :scoped_naming_convention | true | :private | nil | :reject | :unauthorized
+ nil | :scoped_naming_convention | false | :private | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | true | :private | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | false | :private | nil | :reject | :unauthorized
+ nil | :unscoped | true | :private | nil | :reject | :unauthorized
+ nil | :unscoped | false | :private | nil | :reject | :unauthorized
nil | :non_existing | true | :private | nil | :redirect | :redirected
- nil | :non_existing | false | :private | nil | :reject | :not_found
- nil | :scoped_naming_convention | true | :internal | nil | :reject | :not_found
- nil | :scoped_naming_convention | false | :internal | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | true | :internal | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | false | :internal | nil | :reject | :not_found
- nil | :unscoped | true | :internal | nil | :reject | :not_found
- nil | :unscoped | false | :internal | nil | :reject | :not_found
+ nil | :non_existing | false | :private | nil | :reject | :unauthorized
+ nil | :scoped_naming_convention | true | :internal | nil | :reject | :unauthorized
+ nil | :scoped_naming_convention | false | :internal | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | true | :internal | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | false | :internal | nil | :reject | :unauthorized
+ nil | :unscoped | true | :internal | nil | :reject | :unauthorized
+ nil | :unscoped | false | :internal | nil | :reject | :unauthorized
nil | :non_existing | true | :internal | nil | :redirect | :redirected
- nil | :non_existing | false | :internal | nil | :reject | :not_found
+ nil | :non_existing | false | :internal | nil | :reject | :unauthorized
:oauth | :scoped_naming_convention | true | :public | :guest | :accept | :ok
:oauth | :scoped_naming_convention | true | :public | :reporter | :accept | :ok
@@ -280,11 +280,15 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
end
end
- if (scope == :group && params[:package_name_type] == :non_existing) &&
- (!params[:request_forward] || (!params[:auth] && params[:request_forward] && params[:visibility] != :public))
+ if scope == :group && params[:package_name_type] == :non_existing && !params[:request_forward] && params[:auth]
status = :not_found
end
+ if scope == :group && params[:package_name_type] == :non_existing && params[:request_forward] && !params[:auth] && params[:visibility] != :public
+ example_name = 'reject metadata request'
+ status = :unauthorized
+ end
+
# Check the error message for :not_found
example_name = 'returning response status with error' if status == :not_found
@@ -522,14 +526,14 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project|
nil | :scoped_no_naming_convention | :public | nil | :accept | :ok
nil | :unscoped | :public | nil | :accept | :ok
nil | :non_existing | :public | nil | :reject | :not_found
- nil | :scoped_naming_convention | :private | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | :private | nil | :reject | :not_found
- nil | :unscoped | :private | nil | :reject | :not_found
- nil | :non_existing | :private | nil | :reject | :not_found
- nil | :scoped_naming_convention | :internal | nil | :reject | :not_found
- nil | :scoped_no_naming_convention | :internal | nil | :reject | :not_found
- nil | :unscoped | :internal | nil | :reject | :not_found
- nil | :non_existing | :internal | nil | :reject | :not_found
+ nil | :scoped_naming_convention | :private | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | :private | nil | :reject | :unauthorized
+ nil | :unscoped | :private | nil | :reject | :unauthorized
+ nil | :non_existing | :private | nil | :reject | :unauthorized
+ nil | :scoped_naming_convention | :internal | nil | :reject | :unauthorized
+ nil | :scoped_no_naming_convention | :internal | nil | :reject | :unauthorized
+ nil | :unscoped | :internal | nil | :reject | :unauthorized
+ nil | :non_existing | :internal | nil | :reject | :unauthorized
:oauth | :scoped_naming_convention | :public | :guest | :accept | :ok
:oauth | :scoped_naming_convention | :public | :reporter | :accept | :ok
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 1be99040ae5..f8e78c8c9b1 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -357,12 +357,7 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
end
context 'with normalized package version' do
- let(:normalized_version) { '0.1.0' }
- let(:url) { "/projects/#{target.id}/packages/nuget/download/#{package.name}/#{normalized_version}/#{package.name}.#{package.version}.#{format}" }
-
- before do
- package.nuget_metadatum.update_column(:normalized_version, normalized_version)
- end
+ let(:package_version) { '0.1.0' }
it_behaves_like 'returning response status', status
@@ -737,3 +732,19 @@ RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
end
end
end
+
+RSpec.shared_examples 'process nuget delete request' do |user_type, status|
+ context "for user type #{user_type}" do
+ before do
+ target.send("add_#{user_type}", user) if user_type
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it_behaves_like 'a package tracking event', 'API::NugetPackages', 'delete_package'
+
+ it 'marks package for deletion' do
+ expect { subject }.to change { package.reset.status }.from('default').to('pending_destruction')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/organizations_shared_examples.rb b/spec/support/shared_examples/requests/organizations_shared_examples.rb
new file mode 100644
index 00000000000..78e7c3c6bde
--- /dev/null
+++ b/spec/support/shared_examples/requests/organizations_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'organization - successful response' do
+ it 'renders 200 OK' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+end
+
+RSpec.shared_examples 'organization - not found response' do
+ it 'renders 404 NOT_FOUND' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+end
+
+RSpec.shared_examples 'organization - redirects to sign in page' do
+ it 'redirects to sign in page' do
+ gitlab_request
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+end
+
+RSpec.shared_examples 'organization - action disabled by `ui_for_organizations` feature flag' do
+ context 'when `ui_for_organizations` feature flag is disabled' do
+ before do
+ stub_feature_flags(ui_for_organizations: false)
+ end
+
+ it_behaves_like 'organization - not found response'
+ end
+end
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index 34188a8d18a..6abf8b242f1 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -284,6 +284,48 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
+ describe '.push_pull_nested_repositories_access_token' do
+ let_it_be(:project) { create(:project) }
+
+ let(:token) { described_class.push_pull_nested_repositories_access_token(project.full_path) }
+ let(:access) do
+ [
+ {
+ 'type' => 'repository',
+ 'name' => project.full_path,
+ 'actions' => %w[pull push],
+ 'meta' => { 'project_path' => project.full_path }
+ },
+ {
+ 'type' => 'repository',
+ 'name' => "#{project.full_path}/*",
+ 'actions' => %w[pull],
+ 'meta' => { 'project_path' => project.full_path }
+ }
+ ]
+ end
+
+ subject { { token: token } }
+
+ it 'has the correct scope' do
+ expect(payload).to include('access' => access)
+ end
+
+ it_behaves_like 'a valid token'
+ it_behaves_like 'not a container repository factory'
+
+ context 'with path ending with a slash' do
+ let(:token) { described_class.push_pull_nested_repositories_access_token("#{project.full_path}/") }
+
+ it 'has the correct scope' do
+ expect(payload).to include('access' => access)
+ end
+
+ it_behaves_like 'a valid token'
+ it_behaves_like 'not a container repository factory'
+ end
+ end
+
context 'user authorization' do
let_it_be(:current_user) { create(:user) }
@@ -780,12 +822,12 @@ RSpec.shared_examples 'a container registry auth service' do
context 'for project that disables repository' do
let_it_be(:project) { create(:project, :public, :repository_disabled) }
- context 'disallow when pulling' do
+ context 'allow when pulling' do
let(:current_params) do
{ scopes: ["repository:#{project.full_path}:pull"] }
end
- it_behaves_like 'an inaccessible'
+ it_behaves_like 'a pullable'
it_behaves_like 'not a container repository factory'
end
end
@@ -1301,7 +1343,7 @@ RSpec.shared_examples 'a container registry auth service' do
end
describe '#access_token' do
- let(:token) { described_class.access_token(['pull'], [bad_project.full_path]) }
+ let(:token) { described_class.access_token({ bad_project.full_path => ['pull'] }) }
let(:access) do
[{ 'type' => 'repository',
'name' => bad_project.full_path,
diff --git a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
index 83a2f3136b4..10dc185157c 100644
--- a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
@@ -3,6 +3,7 @@
RSpec.shared_examples 'issuable link creation' do |use_references: true|
let(:items_param) { use_references ? :issuable_references : :target_issuable }
let(:response_keys) { [:status, :created_references] }
+ let(:async_notes) { false }
let(:already_assigned_error_msg) { "#{issuable_type.capitalize}(s) already assigned" }
let(:permission_error_status) { issuable_type == :issue ? 403 : 404 }
let(:permission_error_msg) do
@@ -85,17 +86,27 @@ RSpec.shared_examples 'issuable link creation' do |use_references: true|
end
it 'creates notes' do
- # First two-way relation notes
- expect(SystemNoteService).to receive(:relate_issuable)
- .with(issuable, issuable2, user)
- expect(SystemNoteService).to receive(:relate_issuable)
- .with(issuable2, issuable, user)
-
- # Second two-way relation notes
- expect(SystemNoteService).to receive(:relate_issuable)
- .with(issuable, issuable3, user)
- expect(SystemNoteService).to receive(:relate_issuable)
- .with(issuable3, issuable, user)
+ if async_notes
+ expect(Issuable::RelatedLinksCreateWorker).to receive(:perform_async) do |args|
+ expect(args).to eq(
+ {
+ issuable_class: issuable.class.name,
+ issuable_id: issuable.id,
+ link_ids: issuable_link_class.where(source: issuable).last(2).pluck(:id),
+ link_type: 'relates_to',
+ user_id: user.id
+ }
+ )
+ end
+ else
+ # First two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable2, user)
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable2, issuable, user)
+
+ # Second two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable3, user)
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable3, issuable, user)
+ end
subject
end
@@ -105,10 +116,24 @@ RSpec.shared_examples 'issuable link creation' do |use_references: true|
let(:params) { set_params([issuable_a, issuable_b]) }
it 'creates notes only for new relations' do
- expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable_a, anything)
- expect(SystemNoteService).to receive(:relate_issuable).with(issuable_a, issuable, anything)
- expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable, issuable_b, anything)
- expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable_b, issuable, anything)
+ if async_notes
+ expect(Issuable::RelatedLinksCreateWorker).to receive(:perform_async) do |args|
+ expect(args).to eq(
+ {
+ issuable_class: issuable.class.name,
+ issuable_id: issuable.id,
+ link_ids: issuable_link_class.where(source: issuable).last(1).pluck(:id),
+ link_type: 'relates_to',
+ user_id: user.id
+ }
+ )
+ end
+ else
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable_a, anything)
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable_a, issuable, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable, issuable_b, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable_b, issuable, anything)
+ end
subject
end
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index 2070cac24b0..7d786dbeb87 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -85,14 +85,15 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
it 'unmarks the repository as read-only without updating the repository storage' do
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
- allow(project_repository_double).to receive(:replicate)
+ expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
- allow(project_repository_double).to receive(:checksum)
+ expect(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
- allow(repository_double).to receive(:replicate)
+ expect(repository_double).to receive(:replicate)
.with(repository.raw)
.and_raise(Gitlab::Git::CommandError)
+ expect(repository_double).to receive(:remove)
expect do
subject.execute
@@ -138,14 +139,15 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
- allow(repository_double).to receive(:replicate)
+ expect(repository_double).to receive(:replicate)
.with(repository.raw)
- allow(repository_double).to receive(:checksum)
+ expect(repository_double).to receive(:checksum)
.and_return('not matching checksum')
+ expect(repository_double).to receive(:remove)
expect do
subject.execute
- end.to raise_error(UpdateRepositoryStorageMethods::Error, /Failed to verify \w+ repository checksum from \w+ to not matching checksum/)
+ end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify \w+ repository checksum from \w+ to not matching checksum/)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
diff --git a/spec/support/shared_examples/services/protected_branches_shared_examples.rb b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
index ce607a6b956..15c63865720 100644
--- a/spec/support/shared_examples/services/protected_branches_shared_examples.rb
+++ b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
@@ -1,11 +1,24 @@
# frozen_string_literal: true
RSpec.shared_context 'with scan result policy blocking protected branches' do
+ include RepoHelpers
+
+ let(:policy_path) { Security::OrchestrationPolicyConfiguration::POLICY_PATH }
+ let_it_be(:policy_project) { create(:project, :repository) }
+ let(:default_branch) { policy_project.default_branch }
+
+ let(:policy_yaml) do
+ build(:orchestration_policy_yaml, scan_execution_policy: [], scan_result_policy: [scan_result_policy])
+ end
+
+ let(:scan_result_policy) do
+ build(:scan_result_policy, branches: [branch_name], approval_settings: { block_unprotecting_branches: true })
+ end
+
before do
- create(
- :scan_result_policy_read,
- :blocking_protected_branches,
- project: project)
+ policy_configuration.update_attribute(:security_policy_management_project, policy_project)
+
+ create_file_in_repo(policy_project, default_branch, default_branch, policy_path, policy_yaml)
stub_licensed_features(security_orchestration_policies: true)
end
diff --git a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
index d61458db3b3..0545be7c741 100644
--- a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
+++ b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
@@ -56,4 +56,19 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
expect(WorkItems::HierarchyRestriction.count).to eq(7)
end
end
+
+ context 'when restrictions contain attributes not present in the table' do
+ before do
+ allow(WorkItems::HierarchyRestriction)
+ .to receive(:column_names).and_return(%w[parent_type_id child_type_id])
+ end
+
+ it 'filters out missing columns' do
+ expect(WorkItems::HierarchyRestriction).to receive(:upsert_all) do |args|
+ expect(args[0].keys).to eq(%i[parent_type_id child_type_id])
+ end
+
+ subject
+ end
+ end
end
diff --git a/spec/support/shared_examples/work_item_related_link_restrictions_importer.rb b/spec/support/shared_examples/work_item_related_link_restrictions_importer.rb
new file mode 100644
index 00000000000..935ad2ba472
--- /dev/null
+++ b/spec/support/shared_examples/work_item_related_link_restrictions_importer.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'work item related links restrictions importer' do
+ shared_examples_for 'adds restrictions' do
+ it "adds all restrictions if they don't exist" do
+ expect { subject }.to change { WorkItems::RelatedLinkRestriction.count }.from(0).to(34)
+ end
+ end
+
+ context 'when restrictions are missing' do
+ before do
+ WorkItems::RelatedLinkRestriction.delete_all
+ end
+
+ it_behaves_like 'adds restrictions'
+ end
+
+ context 'when base types are missing' do
+ before do
+ WorkItems::Type.delete_all
+ end
+
+ it_behaves_like 'adds restrictions'
+ end
+
+ context 'when some restrictions are missing' do
+ before do
+ Gitlab::DatabaseImporters::WorkItems::RelatedLinksRestrictionsImporter.upsert_restrictions
+ WorkItems::RelatedLinkRestriction.limit(1).delete_all
+ end
+
+ it 'inserts missing restrictions and does nothing if some already existed' do
+ expect { subject }.to make_queries_matching(/INSERT/, 1).and(
+ change { WorkItems::RelatedLinkRestriction.count }.by(1)
+ )
+ expect(WorkItems::RelatedLinkRestriction.count).to eq(34)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb b/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
new file mode 100644
index 00000000000..af5bf33a9a6
--- /dev/null
+++ b/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Gitlab::GithubImport::StageMethods do
+ describe '.sidekiq_retries_exhausted' do
+ it 'tracks the exception and marks the import as failed' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track)
+ .with(
+ project_id: 1,
+ exception: StandardError,
+ fail_import: true,
+ error_source: anything
+ )
+
+ described_class.sidekiq_retries_exhausted_block.call({ 'args' => [1] }, StandardError.new)
+ end
+ end
+end
diff --git a/spec/support_specs/helpers/stub_saas_features_spec.rb b/spec/support_specs/helpers/stub_saas_features_spec.rb
new file mode 100644
index 00000000000..ed973071a6d
--- /dev/null
+++ b/spec/support_specs/helpers/stub_saas_features_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe StubSaasFeatures, feature_category: :shared do
+ describe '#stub_saas_features' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:feature_name) { '_some_saas_feature_' }
+
+ context 'when checking global state' do
+ where(:feature_value) do
+ [true, false]
+ end
+
+ with_them do
+ before do
+ stub_saas_features(feature_name => feature_value)
+ end
+
+ it { expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(feature_value) }
+ end
+ end
+
+ context 'when value is not boolean' do
+ it 'raises an error' do
+ expect do
+ stub_saas_features(feature_name => '_not_boolean_')
+ end.to raise_error(ArgumentError, /value must be boolean/)
+ end
+ end
+
+ it 'subsequent run changes state' do
+ # enable FF on all
+ stub_saas_features({ feature_name => true })
+ expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(true)
+
+ # disable FF on all
+ stub_saas_features({ feature_name => false })
+ expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(false)
+ end
+
+ it 'handles multiple features' do
+ stub_saas_features(feature_name => false, '_some_new_feature_' => true)
+
+ expect(::Gitlab::Saas.feature_available?(feature_name)).to eq(false)
+ expect(::Gitlab::Saas.feature_available?('_some_new_feature_')).to eq(true)
+ end
+ end
+end
diff --git a/spec/tasks/admin_mode_spec.rb b/spec/tasks/admin_mode_spec.rb
index 116d47aa503..f7527c0ed88 100644
--- a/spec/tasks/admin_mode_spec.rb
+++ b/spec/tasks/admin_mode_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'admin mode on tasks', :silence_stdout do
before do
diff --git a/spec/tasks/cache_rake_spec.rb b/spec/tasks/cache_rake_spec.rb
index 7e4397ce3f4..470d715bfd7 100644
--- a/spec/tasks/cache_rake_spec.rb
+++ b/spec/tasks/cache_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :clean_gitlab_redis_cache,
:silence_stdout, :use_null_store_as_repository_cache, feature_category: :redis do
diff --git a/spec/tasks/config_lint_rake_spec.rb b/spec/tasks/config_lint_rake_spec.rb
index 34899c84888..0c918239f97 100644
--- a/spec/tasks/config_lint_rake_spec.rb
+++ b/spec/tasks/config_lint_rake_spec.rb
@@ -1,29 +1,33 @@
# frozen_string_literal: true
-require 'rake_helper'
-Rake.application.rake_require 'tasks/config_lint'
+require 'spec_helper'
-RSpec.describe ConfigLint, :silence_stdout do
+RSpec.describe 'ConfigLint', :silence_stdout do
+ let(:config_lint) { ConfigLint }
let(:files) { ['lib/support/fake.sh'] }
+ before(:all) do
+ Rake.application.rake_require 'tasks/config_lint'
+ end
+
it 'errors out if any bash scripts have errors' do
- expect { described_class.run(files) { system('exit 1') } }.to raise_error(SystemExit)
+ expect { config_lint.run(files) { system('exit 1') } }.to raise_error(SystemExit)
end
it 'passes if all scripts are fine' do
- expect { described_class.run(files) { system('exit 0') } }.not_to raise_error
+ expect { config_lint.run(files) { system('exit 0') } }.not_to raise_error
end
-end
-RSpec.describe 'config_lint rake task', :silence_stdout do
- before do
- # Prevent `system` from actually being called
- allow(Kernel).to receive(:system).and_return(true)
- end
+ describe 'config_lint rake task', :silence_stdout do
+ before do
+ # Prevent `system` from actually being called
+ allow(Kernel).to receive(:system).and_return(true)
+ end
- it 'runs lint on shell scripts' do
- expect(Kernel).to receive(:system).with('bash', '-n', 'lib/support/init.d/gitlab')
+ it 'runs lint on shell scripts' do
+ expect(Kernel).to receive(:system).with('bash', '-n', 'lib/support/init.d/gitlab')
- run_rake_task('config_lint')
+ run_rake_task('config_lint')
+ end
end
end
diff --git a/spec/tasks/dev_rake_spec.rb b/spec/tasks/dev_rake_spec.rb
index f5490832982..7005357b3d6 100644
--- a/spec/tasks/dev_rake_spec.rb
+++ b/spec/tasks/dev_rake_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'dev rake tasks' do
- before do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/setup'
Rake.application.rake_require 'tasks/gitlab/shell'
Rake.application.rake_require 'tasks/dev'
diff --git a/spec/tasks/gettext_rake_spec.rb b/spec/tasks/gettext_rake_spec.rb
index c44c1734432..fbf374283dc 100644
--- a/spec/tasks/gettext_rake_spec.rb
+++ b/spec/tasks/gettext_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
require_relative '../../tooling/lib/tooling/gettext_extractor'
require_relative '../support/matchers/abort_matcher'
diff --git a/spec/tasks/gitlab/artifacts/check_rake_spec.rb b/spec/tasks/gitlab/artifacts/check_rake_spec.rb
index e0303170755..c70dd76dab9 100644
--- a/spec/tasks/gitlab/artifacts/check_rake_spec.rb
+++ b/spec/tasks/gitlab/artifacts/check_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:artifacts rake tasks', :silence_stdout do
describe 'check' do
diff --git a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
index 1c8a1c6a171..3fa1b8a0a52 100644
--- a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:artifacts namespace rake task', :silence_stdout do
before(:context) do
diff --git a/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb b/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb
index 14196ce4c5d..c9c3c83f30b 100644
--- a/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb
+++ b/spec/tasks/gitlab/audit_event_types/audit_event_types_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/check_docs_task'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
diff --git a/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
index b492289e99e..41b045a3a42 100644
--- a/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
+++ b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/check_docs_task'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
diff --git a/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
index 0ee85b1283b..7689029de58 100644
--- a/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
+++ b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
RSpec.describe Tasks::Gitlab::AuditEventTypes::CompileDocsTask, feature_category: :audit_events do
diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb
index 0a7ab214cc1..ba5618e2700 100644
--- a/spec/tasks/gitlab/background_migrations_rake_spec.rb
+++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gitlab_schemas_validate_connection,
feature_category: :database do
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index fda27d5827f..56560b06219 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: :backup_restore do
let(:enable_registry) { true }
diff --git a/spec/tasks/gitlab/check_rake_spec.rb b/spec/tasks/gitlab/check_rake_spec.rb
index 74cc5dd6d7c..4a73ef78022 100644
--- a/spec/tasks/gitlab/check_rake_spec.rb
+++ b/spec/tasks/gitlab/check_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'check.rake', :silence_stdout, feature_category: :gitaly do
before do
diff --git a/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
index fc9aae3597e..5875dcd9943 100644
--- a/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
+++ b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:ci_secure_files', factory_default: :keep, feature_category: :mobile_devops do
describe 'check' do
diff --git a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
index f3856969a6e..1a948d82cbe 100644
--- a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:ci_secure_files', feature_category: :mobile_devops do
let!(:local_file) { create(:ci_secure_file) }
diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb
index bd4d9643433..2e41c4f1219 100644
--- a/spec/tasks/gitlab/cleanup_rake_spec.rb
+++ b/spec/tasks/gitlab/cleanup_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
before do
@@ -165,6 +165,195 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
end
end
+ shared_examples 'does not remove any branches' do
+ it 'does not delete any branches' do
+ expect(project.repository.raw.find_branch(delete_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+
+ rake_task
+
+ expect(project.repository.raw.find_branch(delete_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+ end
+ end
+
+ describe 'gitlab:cleanup:remove_missed_source_branches' do
+ subject(:rake_task) { run_rake_task('gitlab:cleanup:remove_missed_source_branches', project.id, user.id, dry_run) }
+
+ let(:project) { create(:project, :repository) }
+ # Merged merge request with force source branch 1
+ # Merged merge request with force source branch 0
+ # Non merged merge request with force source branch 1
+ # Merged Merge request with delete not in project
+ # When can not delete source branch
+
+ let!(:mr1) do
+ project.repository.raw.create_branch(delete_branch_name, "master")
+
+ create(:merge_request, :merged, :remove_source_branch, source_project: project, target_project: project,
+ source_branch: delete_branch_name, target_branch: 'master')
+ end
+
+ let!(:mr2) do
+ project.repository.raw.create_branch(keep_branch_name, "master")
+
+ create(:merge_request, :merged, source_project: project, target_project: project, source_branch: keep_branch_name,
+ target_branch: 'master')
+ end
+
+ let!(:mr3) do
+ create(:merge_request, :remove_source_branch, source_project: project, target_project: project,
+ source_branch: keep_branch_name, target_branch: 'master')
+ end
+
+ let!(:mr4) do
+ create(:merge_request, :merged, :remove_source_branch, source_branch: keep_branch_name, target_branch: 'master')
+ end
+
+ let!(:mr5) do
+ create(:merge_request, :merged, :remove_source_branch, source_branch: 'test', source_project: project,
+ target_project: project, target_branch: 'master')
+ end
+
+ let!(:protected) do
+ create(:protected_branch, :create_branch_on_repository, project: project, name: mr5.source_branch)
+ end
+
+ let(:user) { create(:user, :admin) }
+ let(:dry_run) { true }
+ let(:delete_branch_name) { "to-be-deleted-soon" }
+ let(:delete_me_not) { "delete_me_not" }
+ let(:keep_branch_name) { "not-to-be-deleted-soon" }
+
+ before do
+ project.add_owner(user)
+ stub_env('USER_ID', user.id)
+ stub_env('PROJECT_ID', project.id)
+ end
+
+ context 'when dry run is true' do
+ it_behaves_like 'does not remove any branches'
+
+ context 'and when a valid batch size is given' do
+ it 'takes into account for the batch size' do
+ run_rake_task('gitlab:cleanup:remove_missed_source_branches', project.id, user.id, dry_run)
+
+ stub_env('BATCH_SIZE', '1')
+ count_1 = ActiveRecord::QueryRecorder.new do
+ run_rake_task('gitlab:cleanup:remove_missed_source_branches', project.id, user.id, dry_run)
+ end.count
+
+ stub_env('BATCH_SIZE', '2')
+ count_2 = ActiveRecord::QueryRecorder.new do
+ run_rake_task('gitlab:cleanup:remove_missed_source_branches', project.id, user.id, dry_run)
+ end.count
+
+ expect(count_1).to be > count_2
+ end
+ end
+ end
+
+ context 'when dry run is false' do
+ let!(:mr6) do
+ project.repository.raw.create_branch(delete_me_not, "master")
+
+ create(:merge_request, :merged, :remove_source_branch, source_project: project, target_project: project,
+ source_branch: delete_me_not, target_branch: 'master')
+ end
+
+ before do
+ stub_env('DRY_RUN', 'false')
+ end
+
+ it 'deletes the branches' do
+ expect(project.repository.raw.find_branch(delete_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(delete_me_not)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+
+ rake_task
+
+ expect(project.repository.raw.find_branch(delete_branch_name)).to be_nil
+ expect(project.repository.raw.find_branch(delete_me_not)).to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+ end
+
+ context 'when a limit is set' do
+ before do
+ stub_env('LIMIT_TO_DELETE', 1)
+ end
+
+ it 'deletes only one branch' do
+ expect(project.repository.raw.find_branch(delete_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(delete_me_not)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+
+ rake_task
+
+ expect(project.repository.raw.find_branch(delete_branch_name)).to be_nil
+ expect(project.repository.raw.find_branch(delete_me_not)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+ end
+ end
+
+ context 'when the branch has a merged and opened mr' do
+ let!(:mr7) do
+ project.repository.raw.create_branch(delete_me_not, "master")
+
+ create(:merge_request, :opened, :remove_source_branch, source_project: project, target_project: project,
+ source_branch: delete_me_not, target_branch: 'master')
+ end
+
+ it 'does not delete the branch of the merged/open mr' do
+ expect(project.repository.raw.find_branch(delete_me_not)).not_to be_nil
+
+ rake_task
+
+ expect(project.repository.raw.find_branch(delete_me_not)).not_to be_nil
+ end
+ end
+
+ context 'when an valid batch size is given' do
+ before do
+ stub_env('BATCH_SIZE', '1')
+ end
+
+ it 'deletes the branches' do
+ expect(project.repository.raw.find_branch(delete_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+
+ rake_task
+
+ expect(project.repository.raw.find_branch(delete_branch_name)).to be_nil
+ expect(project.repository.raw.find_branch(keep_branch_name)).not_to be_nil
+ expect(project.repository.raw.find_branch('test')).not_to be_nil
+ end
+ end
+
+ context 'when an invalid batch size is given' do
+ before do
+ stub_env('BATCH_SIZE', '-1')
+ end
+
+ it_behaves_like 'does not remove any branches'
+ end
+
+ context 'when an invalid limit to delete is given' do
+ before do
+ stub_env('LIMIT_TO_DELETE', '-1')
+ end
+
+ it_behaves_like 'does not remove any branches'
+ end
+ end
+ end
+
context 'sessions' do
describe 'gitlab:cleanup:sessions:active_sessions_lookup_keys', :clean_gitlab_redis_sessions do
subject(:rake_task) { run_rake_task('gitlab:cleanup:sessions:active_sessions_lookup_keys') }
diff --git a/spec/tasks/gitlab/container_registry_rake_spec.rb b/spec/tasks/gitlab/container_registry_rake_spec.rb
index d0c728bf36d..8154b152710 100644
--- a/spec/tasks/gitlab/container_registry_rake_spec.rb
+++ b/spec/tasks/gitlab/container_registry_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:container_registry namespace rake tasks', :silence_stdout do
let(:api_url) { 'http://registry.gitlab' }
diff --git a/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
index a1725d6fed7..ff11e4ee8cb 100644
--- a/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
@@ -1,14 +1,11 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:cells:bump_cell_sequences', :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/cells/bump_cell_sequences'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
let(:main_sequence_name) { 'users_id_seq' }
diff --git a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
index 352e3d944fc..2dba73886b8 100644
--- a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :cell do
let(:max_connections) { 500 }
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
index 3d4b977644f..41c9e837ead 100644
--- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -1,14 +1,11 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
let(:expected_error_message) do
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index 5baf13b9847..4c5e3d437ae 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, feature_category: :cell do
before(:all) do
@@ -8,9 +8,6 @@ RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, featu
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
Rake.application.rake_require 'tasks/gitlab/db/lock_writes'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
let(:table_locker) { instance_double(Gitlab::Database::TablesLocker) }
diff --git a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
index 9a101921b68..4ad149ef7d9 100644
--- a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
+++ b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'migration_fix_15_11', :reestablished_active_record_base, feature_category: :database do
let(:db) { ApplicationRecord.connection }
@@ -10,8 +10,6 @@ RSpec.describe 'migration_fix_15_11', :reestablished_active_record_base, feature
before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/db/migration_fix_15_11'
-
- Rake::Task.define_task :environment
end
describe 'migration_fix_15_11' do
diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
index 518acfc5d81..940bb9baa60 100644
--- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
+++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablished_active_record_base,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell do
@@ -14,9 +14,6 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
Rake.application.rake_require 'tasks/gitlab/db/truncate_legacy_tables'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
before do
diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
index e58667578b2..91d4190b815 100644
--- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb
+++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_schemas_validate_connection, feature_category: :cell do
# We don't need to delete this data since it only modifies `ar_internal_metadata`
@@ -11,9 +11,6 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_sc
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
context "when validating config" do
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index c35c162c99a..c2e53da8d4b 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -1,15 +1,12 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_category: :database do
before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
before do
diff --git a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
index 0bda879bd7c..d37bf6ff939 100644
--- a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:dependency_proxy namespace rake task', :silence_stdout do
before(:all) do
diff --git a/spec/tasks/gitlab/doctor/secrets_rake_spec.rb b/spec/tasks/gitlab/doctor/secrets_rake_spec.rb
new file mode 100644
index 00000000000..91ef3c57d73
--- /dev/null
+++ b/spec/tasks/gitlab/doctor/secrets_rake_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'gitlab:doctor:reset_encrypted_tokens', :silence_stdout, feature_category: :runner_fleet do
+ let(:model_names) { 'Project,Group' }
+ let(:token_names) { 'runners_token' }
+
+ let(:project_with_cipher_error) do
+ create(:project).tap do |project|
+ project.update_columns(runners_token_encrypted:
+ '|rXs75DSHXPE9MGAIgyxcut8pZc72gaa/2ojU0GS1+R+cXNqkbUB13Vb5BaMwf47d98980fc1')
+ end
+ end
+
+ before(:context) do
+ Rake.application.rake_require 'tasks/gitlab/doctor/secrets'
+ end
+
+ before do
+ stub_env('MODEL_NAMES', model_names)
+ stub_env('TOKEN_NAMES', token_names)
+ end
+
+ subject(:run!) do
+ run_rake_task('gitlab:doctor:reset_encrypted_tokens')
+ end
+
+ it 'properly parces parameters from the environment variables' do
+ expect_next_instance_of(::Gitlab::Doctor::ResetTokens, anything,
+ model_names: %w[Project Group],
+ token_names: %w[runners_token],
+ dry_run: true) do |service|
+ expect(service).to receive(:run!).and_call_original
+ end
+
+ run!
+ end
+
+ it "doesn't do anything in DRY_RUN mode(default)" do
+ expect do
+ run!
+ end.not_to change { project_with_cipher_error.reload.runners_token_encrypted }
+ end
+
+ it 'regenerates broken token if DRY_RUN is set to false' do
+ stub_env('DRY_RUN', false)
+
+ expect { project_with_cipher_error.runners_token }.to raise_error(OpenSSL::Cipher::CipherError)
+ expect do
+ run!
+ end.to change { project_with_cipher_error.reload.runners_token_encrypted }
+
+ expect { project_with_cipher_error.runners_token }.not_to raise_error
+ end
+end
diff --git a/spec/tasks/gitlab/external_diffs_rake_spec.rb b/spec/tasks/gitlab/external_diffs_rake_spec.rb
index 86242de4b90..10a1253a4a5 100644
--- a/spec/tasks/gitlab/external_diffs_rake_spec.rb
+++ b/spec/tasks/gitlab/external_diffs_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:external_diffs rake tasks', :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/feature_categories_rake_spec.rb b/spec/tasks/gitlab/feature_categories_rake_spec.rb
index f495c7e8911..84558ea7fb7 100644
--- a/spec/tasks/gitlab/feature_categories_rake_spec.rb
+++ b/spec/tasks/gitlab/feature_categories_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:feature_categories:index', :silence_stdout, feature_category: :scalability do
before do
diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb
index aab927a472e..52cfb50e9bc 100644
--- a/spec/tasks/gitlab/git_rake_spec.rb
+++ b/spec/tasks/gitlab/git_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:git rake tasks', :silence_stdout do
let(:base_path) { 'tmp/tests/default_storage' }
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index 5a395c8f6ef..e9de7ba5218 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
before(:all) do
diff --git a/spec/tasks/gitlab/incoming_email_rake_spec.rb b/spec/tasks/gitlab/incoming_email_rake_spec.rb
index 3e1cc663ddb..f9109fdf2b8 100644
--- a/spec/tasks/gitlab/incoming_email_rake_spec.rb
+++ b/spec/tasks/gitlab/incoming_email_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:incoming_email:secret rake tasks', :silence_stdout, feature_category: :build do
let(:encrypted_secret_file_dir) { Pathname.new(Dir.mktmpdir) }
diff --git a/spec/tasks/gitlab/ldap_rake_spec.rb b/spec/tasks/gitlab/ldap_rake_spec.rb
index b18c2c88a46..877bed7cacf 100644
--- a/spec/tasks/gitlab/ldap_rake_spec.rb
+++ b/spec/tasks/gitlab/ldap_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:ldap:rename_provider rake task', :silence_stdout do
it 'completes without error' do
diff --git a/spec/tasks/gitlab/lfs/check_rake_spec.rb b/spec/tasks/gitlab/lfs/check_rake_spec.rb
index ce0076826c4..ac15b2cc253 100644
--- a/spec/tasks/gitlab/lfs/check_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs/check_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:lfs rake tasks', :silence_stdout do
describe 'check' do
diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
index cbc39c6b093..d662fed9f5d 100644
--- a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:lfs namespace rake task', :silence_stdout do
before(:all) do
diff --git a/spec/tasks/gitlab/packages/migrate_rake_spec.rb b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
index cdc817cdf38..bb8a4bfdda8 100644
--- a/spec/tasks/gitlab/packages/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:packages namespace rake task', :silence_stdout do
before(:all) do
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
index 9e3d5c3ccf0..6e23f9823b9 100644
--- a/spec/tasks/gitlab/pages_rake_spec.rb
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:pages', :silence_stdout do
before(:context) do
diff --git a/spec/tasks/gitlab/password_rake_spec.rb b/spec/tasks/gitlab/password_rake_spec.rb
index 21a6dc102e6..2b7056344d3 100644
--- a/spec/tasks/gitlab/password_rake_spec.rb
+++ b/spec/tasks/gitlab/password_rake_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
- let!(:user_1) { create(:user, username: 'foobar', password: User.random_password) }
+ let!(:user_1) { create(:user, username: 'foobar', password: User.random_password, password_automatically_set: true) }
let(:password) { User.random_password }
def stub_username(username)
@@ -26,8 +26,14 @@ RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
describe ':reset' do
context 'when all inputs are correct' do
it 'updates the password properly' do
+ expect(user_1.password_automatically_set?).to eq(true)
+
run_rake_task('gitlab:password:reset', user_1.username)
- expect(user_1.reload.valid_password?(password)).to eq(true)
+
+ user_1.reload
+
+ expect(user_1.valid_password?(password)).to eq(true)
+ expect(user_1.password_automatically_set?).to eq(false)
end
end
diff --git a/spec/tasks/gitlab/praefect_rake_spec.rb b/spec/tasks/gitlab/praefect_rake_spec.rb
index 85e655ed72c..915179d24d9 100644
--- a/spec/tasks/gitlab/praefect_rake_spec.rb
+++ b/spec/tasks/gitlab/praefect_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:praefect:replicas', :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
index 60c0d80223e..fbb2630bd7b 100644
--- a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
+++ b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task', :silence_stdout, feature_category: :build_artifacts do
let(:rake_task) { 'gitlab:refresh_project_statistics_build_artifacts_size' }
diff --git a/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb b/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb
index 264dea815f4..c1964090dc5 100644
--- a/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb
+++ b/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
# We need to load the constants here, or else stubbed
# constants will be overwritten when `require 'git'`
diff --git a/spec/tasks/gitlab/seed/group_seed_rake_spec.rb b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
index 85d81103000..a69ddd85095 100644
--- a/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
+++ b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:seed:group_seed rake task', :silence_stdout, feature_category: :groups_and_projects do
let(:username) { 'group_seed' }
diff --git a/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
index e0390d2aa09..b8503e2bc1b 100644
--- a/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
+++ b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:seed:runner_fleet rake task', :silence_stdout, feature_category: :runner_fleet do
let(:registration_prefix) { 'rf-' }
diff --git a/spec/tasks/gitlab/service_desk_email_rake_spec.rb b/spec/tasks/gitlab/service_desk_email_rake_spec.rb
index 6a1a7473f4a..af7c1918105 100644
--- a/spec/tasks/gitlab/service_desk_email_rake_spec.rb
+++ b/spec/tasks/gitlab/service_desk_email_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:service_desk_email:secret rake tasks', :silence_stdout, feature_category: :build do
let(:encrypted_secret_file_dir) { Pathname.new(Dir.mktmpdir) }
diff --git a/spec/tasks/gitlab/setup_rake_spec.rb b/spec/tasks/gitlab/setup_rake_spec.rb
index 80e997fcf88..29dc4c1d3ed 100644
--- a/spec/tasks/gitlab/setup_rake_spec.rb
+++ b/spec/tasks/gitlab/setup_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:setup namespace rake tasks', :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/shell_rake_spec.rb b/spec/tasks/gitlab/shell_rake_spec.rb
index 30f512205f9..7bcf88aeea2 100644
--- a/spec/tasks/gitlab/shell_rake_spec.rb
+++ b/spec/tasks/gitlab/shell_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:shell rake tasks', :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/sidekiq_rake_spec.rb b/spec/tasks/gitlab/sidekiq_rake_spec.rb
index 0e5111c90a1..2419449814e 100644
--- a/spec/tasks/gitlab/sidekiq_rake_spec.rb
+++ b/spec/tasks/gitlab/sidekiq_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'sidekiq.rake', :aggregate_failures, :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/smtp_rake_spec.rb b/spec/tasks/gitlab/smtp_rake_spec.rb
index 572df8421d5..7c08ee25af6 100644
--- a/spec/tasks/gitlab/smtp_rake_spec.rb
+++ b/spec/tasks/gitlab/smtp_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:smtp:secret rake tasks' do
let(:smtp_secret_file) { 'tmp/tests/smtpenc/smtp_secret.yaml.enc' }
diff --git a/spec/tasks/gitlab/snippets_rake_spec.rb b/spec/tasks/gitlab/snippets_rake_spec.rb
index 231c2dae006..395eddab0ad 100644
--- a/spec/tasks/gitlab/snippets_rake_spec.rb
+++ b/spec/tasks/gitlab/snippets_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
let!(:user) { create(:user) }
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
deleted file mode 100644
index cd520673143..00000000000
--- a/spec/tasks/gitlab/storage_rake_spec.rb
+++ /dev/null
@@ -1,284 +0,0 @@
-# frozen_string_literal: true
-
-require 'rake_helper'
-
-RSpec.describe 'rake gitlab:storage:*', :silence_stdout, feature_category: :cell do
- before do
- Rake.application.rake_require 'tasks/gitlab/storage'
-
- stub_warn_user_is_not_gitlab
- end
-
- shared_examples "rake listing entities" do |entity_name, storage_type|
- context 'limiting to 2' do
- before do
- stub_env('LIMIT' => 2)
- end
-
- it "lists 2 out of 3 #{storage_type.downcase} #{entity_name}" do
- create_collection
-
- expect { run_rake_task(task) }.to output(/Found 3 #{entity_name} using #{storage_type} Storage.*Displaying first 2 #{entity_name}/m).to_stdout
- end
- end
-
- context "without any #{storage_type.downcase} #{entity_name.singularize}" do
- it 'displays message for empty results' do
- expect { run_rake_task(task) }.to output(/Found 0 #{entity_name} using #{storage_type} Storage/).to_stdout
- end
- end
- end
-
- shared_examples "rake entities summary" do |entity_name, storage_type|
- context "with existing 3 #{storage_type.downcase} #{entity_name}" do
- it "reports 3 #{storage_type.downcase} #{entity_name}" do
- create_collection
-
- expect { run_rake_task(task) }.to output(/Found 3 #{entity_name} using #{storage_type} Storage/).to_stdout
- end
- end
-
- context "without any #{storage_type.downcase} #{entity_name.singularize}" do
- it 'displays message for empty results' do
- expect { run_rake_task(task) }.to output(/Found 0 #{entity_name} using #{storage_type} Storage/).to_stdout
- end
- end
- end
-
- shared_examples "make sure database is writable" do
- context 'read-only database' do
- it 'does nothing' do
- expect(Gitlab::Database).to receive(:read_only?).and_return(true)
-
- expect(Project).not_to receive(:with_unmigrated_storage)
-
- expect { run_rake_task(task) }.to abort_execution.with_message(/This task requires database write access. Exiting./)
- end
- end
- end
-
- shared_examples "handles custom BATCH env var" do |worker_klass|
- context 'in batches of 1' do
- before do
- stub_env('BATCH' => 1)
- end
-
- it "enqueues one #{worker_klass} per project" do
- projects.each do |project|
- expect(worker_klass).to receive(:perform_async).with(project.id, project.id)
- end
-
- run_rake_task(task)
- end
- end
-
- context 'in batches of 2' do
- before do
- stub_env('BATCH' => 2)
- end
-
- it "enqueues one #{worker_klass} per 2 projects" do
- projects.map(&:id).sort.each_slice(2) do |first, last|
- last ||= first
- expect(worker_klass).to receive(:perform_async).with(first, last)
- end
-
- run_rake_task(task)
- end
- end
- end
-
- shared_examples 'wait until database is ready' do
- it 'checks if the database is ready once' do
- expect(ApplicationRecord.database).to receive(:exists?).once
-
- run_rake_task(task)
- end
-
- context 'handles custom env vars' do
- before do
- stub_env('MAX_DATABASE_CONNECTION_CHECKS' => 3)
- stub_env('MAX_DATABASE_CONNECTION_INTERVAL' => 0.1)
- end
-
- it 'tries for 3 times, polling every 0.1 seconds' do
- expect(ApplicationRecord.database).to receive(:exists?).exactly(3).times.and_return(false)
-
- run_rake_task(task)
- end
- end
- end
-
- describe 'gitlab:storage:migrate_to_hashed' do
- let(:task) { 'gitlab:storage:migrate_to_hashed' }
-
- context 'with rollback already scheduled', :redis do
- it 'does nothing' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::RollbackerWorker.perform_async(1, 5)
-
- expect(Project).not_to receive(:with_unmigrated_storage)
-
- expect { run_rake_task(task) }.to abort_execution.with_message(/There is already a rollback operation in progress/)
- end
- end
- end
-
- context 'with 0 legacy projects' do
- it 'does nothing' do
- expect(::HashedStorage::MigratorWorker).not_to receive(:perform_async)
-
- expect { run_rake_task(task) }.to abort_execution.with_message('There are no projects requiring storage migration. Nothing to do!')
- end
- end
-
- context 'with 3 legacy projects' do
- let(:projects) { create_list(:project, 3, :legacy_storage) }
-
- it 'enqueues migrations and count projects correctly' do
- projects.map(&:id).sort.tap do |ids|
- stub_env('ID_FROM', ids[0])
- stub_env('ID_TO', ids[1])
- end
-
- expect { run_rake_task(task) }.to output(/Enqueuing migration of 2 projects in batches/).to_stdout
- end
-
- it_behaves_like 'handles custom BATCH env var', ::HashedStorage::MigratorWorker
- end
-
- context 'with same id in range' do
- it 'displays message when project cant be found' do
- stub_env('ID_FROM', non_existing_record_id)
- stub_env('ID_TO', non_existing_record_id)
-
- expect { run_rake_task(task) }.to abort_execution.with_message(/There are no projects requiring storage migration with ID=#{non_existing_record_id}/)
- end
-
- it 'displays a message when project exists but its already migrated' do
- project = create(:project)
- stub_env('ID_FROM', project.id)
- stub_env('ID_TO', project.id)
-
- expect { run_rake_task(task) }.to abort_execution.with_message(/There are no projects requiring storage migration with ID=#{project.id}/)
- end
-
- it 'enqueues migration when project can be found' do
- project = create(:project, :legacy_storage)
- stub_env('ID_FROM', project.id)
- stub_env('ID_TO', project.id)
-
- expect { run_rake_task(task) }.to output(/Enqueueing storage migration .* \(ID=#{project.id}\)/).to_stdout
- end
- end
- end
-
- describe 'gitlab:storage:rollback_to_legacy' do
- let(:task) { 'gitlab:storage:rollback_to_legacy' }
-
- it_behaves_like 'make sure database is writable'
-
- context 'with migration already scheduled', :redis do
- it 'does nothing' do
- Sidekiq::Testing.disable! do
- ::HashedStorage::MigratorWorker.perform_async(1, 5)
-
- expect(Project).not_to receive(:with_unmigrated_storage)
-
- expect { run_rake_task(task) }.to abort_execution.with_message(/There is already a migration operation in progress/)
- end
- end
- end
-
- context 'with 0 hashed projects' do
- it 'does nothing' do
- expect(::HashedStorage::RollbackerWorker).not_to receive(:perform_async)
-
- expect { run_rake_task(task) }.to abort_execution.with_message('There are no projects that can have storage rolledback. Nothing to do!')
- end
- end
-
- context 'with 3 hashed projects' do
- let(:projects) { create_list(:project, 3) }
-
- it 'enqueues migrations and count projects correctly' do
- projects.map(&:id).sort.tap do |ids|
- stub_env('ID_FROM', ids[0])
- stub_env('ID_TO', ids[1])
- end
-
- expect { run_rake_task(task) }.to output(/Enqueuing rollback of 2 projects in batches/).to_stdout
- end
-
- it_behaves_like "handles custom BATCH env var", ::HashedStorage::RollbackerWorker
- end
- end
-
- describe 'gitlab:storage:legacy_projects' do
- it_behaves_like 'rake entities summary', 'projects', 'Legacy' do
- let(:task) { 'gitlab:storage:legacy_projects' }
- let(:create_collection) { create_list(:project, 3, :legacy_storage) }
- end
-
- it_behaves_like 'wait until database is ready' do
- let(:task) { 'gitlab:storage:legacy_projects' }
- end
- end
-
- describe 'gitlab:storage:list_legacy_projects' do
- it_behaves_like 'rake listing entities', 'projects', 'Legacy' do
- let(:task) { 'gitlab:storage:list_legacy_projects' }
- let(:create_collection) { create_list(:project, 3, :legacy_storage) }
- end
- end
-
- describe 'gitlab:storage:hashed_projects' do
- it_behaves_like 'rake entities summary', 'projects', 'Hashed' do
- let(:task) { 'gitlab:storage:hashed_projects' }
- let(:create_collection) { create_list(:project, 3, storage_version: 1) }
- end
- end
-
- describe 'gitlab:storage:list_hashed_projects' do
- it_behaves_like 'rake listing entities', 'projects', 'Hashed' do
- let(:task) { 'gitlab:storage:list_hashed_projects' }
- let(:create_collection) { create_list(:project, 3, storage_version: 1) }
- end
- end
-
- describe 'gitlab:storage:legacy_attachments' do
- it_behaves_like 'rake entities summary', 'attachments', 'Legacy' do
- let(:task) { 'gitlab:storage:legacy_attachments' }
- let(:project) { create(:project, storage_version: 1) }
- let(:create_collection) { create_list(:upload, 3, model: project) }
- end
-
- it_behaves_like 'wait until database is ready' do
- let(:task) { 'gitlab:storage:legacy_attachments' }
- end
- end
-
- describe 'gitlab:storage:list_legacy_attachments' do
- it_behaves_like 'rake listing entities', 'attachments', 'Legacy' do
- let(:task) { 'gitlab:storage:list_legacy_attachments' }
- let(:project) { create(:project, storage_version: 1) }
- let(:create_collection) { create_list(:upload, 3, model: project) }
- end
- end
-
- describe 'gitlab:storage:hashed_attachments' do
- it_behaves_like 'rake entities summary', 'attachments', 'Hashed' do
- let(:task) { 'gitlab:storage:hashed_attachments' }
- let(:project) { create(:project) }
- let(:create_collection) { create_list(:upload, 3, model: project) }
- end
- end
-
- describe 'gitlab:storage:list_hashed_attachments' do
- it_behaves_like 'rake listing entities', 'attachments', 'Hashed' do
- let(:task) { 'gitlab:storage:list_hashed_attachments' }
- let(:project) { create(:project) }
- let(:create_collection) { create_list(:upload, 3, model: project) }
- end
- end
-end
diff --git a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
index 3797c01a9cb..ad6db2f345e 100644
--- a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:terraform_states', :silence_stdout do
let!(:version) { create(:terraform_state_version) }
diff --git a/spec/tasks/gitlab/update_templates_rake_spec.rb b/spec/tasks/gitlab/update_templates_rake_spec.rb
index 47eeea239ea..18ef20fbc78 100644
--- a/spec/tasks/gitlab/update_templates_rake_spec.rb
+++ b/spec/tasks/gitlab/update_templates_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:update_project_templates rake task', :silence_stdout, feature_category: :importers do
let!(:tmpdir) { Dir.mktmpdir }
diff --git a/spec/tasks/gitlab/uploads/check_rake_spec.rb b/spec/tasks/gitlab/uploads/check_rake_spec.rb
index b3efe33c549..6d88917cc40 100644
--- a/spec/tasks/gitlab/uploads/check_rake_spec.rb
+++ b/spec/tasks/gitlab/uploads/check_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:uploads rake tasks', :silence_stdout do
describe 'check' do
diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
index 3a368a5011b..7431810d783 100644
--- a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:uploads:migrate and migrate_to_local rake tasks', :sidekiq_inline, :silence_stdout do
before do
diff --git a/spec/tasks/gitlab/usage_data_rake_spec.rb b/spec/tasks/gitlab/usage_data_rake_spec.rb
index 170b1319154..e0e6de99360 100644
--- a/spec/tasks/gitlab/usage_data_rake_spec.rb
+++ b/spec/tasks/gitlab/usage_data_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:usage data take tasks', :silence_stdout, :with_license, feature_category: :service_ping do
include StubRequests
diff --git a/spec/tasks/gitlab/user_management_rake_spec.rb b/spec/tasks/gitlab/user_management_rake_spec.rb
index e8de4511c1d..afe4ca58252 100644
--- a/spec/tasks/gitlab/user_management_rake_spec.rb
+++ b/spec/tasks/gitlab/user_management_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:user_management tasks', :silence_stdout, feature_category: :groups_and_projects do
before do
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
index 6ad65f55142..d6f341ef58b 100644
--- a/spec/tasks/gitlab/web_hook_rake_spec.rb
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:web_hook namespace rake tasks', :silence_stdout do
let!(:group) { create(:group) }
diff --git a/spec/tasks/gitlab/workhorse_rake_spec.rb b/spec/tasks/gitlab/workhorse_rake_spec.rb
index e87bef9f01f..2edbdd09fd0 100644
--- a/spec/tasks/gitlab/workhorse_rake_spec.rb
+++ b/spec/tasks/gitlab/workhorse_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:workhorse namespace rake task', :silence_stdout, feature_category: :source_code_management do
before(:all) do
diff --git a/spec/tasks/gitlab/x509/update_rake_spec.rb b/spec/tasks/gitlab/x509/update_rake_spec.rb
index 118b0b2b960..a5bcd0651d3 100644
--- a/spec/tasks/gitlab/x509/update_rake_spec.rb
+++ b/spec/tasks/gitlab/x509/update_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'gitlab:x509 namespace rake task', :silence_stdout do
before(:all) do
diff --git a/spec/tasks/import_rake_spec.rb b/spec/tasks/import_rake_spec.rb
index 31ce9e124c8..284aa09a24d 100644
--- a/spec/tasks/import_rake_spec.rb
+++ b/spec/tasks/import_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'import:github rake tasks', feature_category: :importers do
before do
diff --git a/spec/tasks/migrate/schema_check_rake_spec.rb b/spec/tasks/migrate/schema_check_rake_spec.rb
index 4d0f59295a6..e130ab47021 100644
--- a/spec/tasks/migrate/schema_check_rake_spec.rb
+++ b/spec/tasks/migrate/schema_check_rake_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-require 'rake'
RSpec.describe 'schema_version_check rake task', :silence_stdout do
include StubENV
@@ -10,9 +9,6 @@ RSpec.describe 'schema_version_check rake task', :silence_stdout do
before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/migrate/schema_check'
-
- # empty task as env is already loaded
- Rake::Task.define_task :environment
end
before do
diff --git a/spec/tasks/rubocop_rake_spec.rb b/spec/tasks/rubocop_rake_spec.rb
index eb360cdff93..33ebd9ed2b0 100644
--- a/spec/tasks/rubocop_rake_spec.rb
+++ b/spec/tasks/rubocop_rake_spec.rb
@@ -2,20 +2,17 @@
# rubocop:disable RSpec/VerifiedDoubles
require 'fast_spec_helper'
-require 'rake'
require 'tmpdir'
require 'fileutils'
require_relative '../support/silence_stdout'
require_relative '../support/helpers/next_instance_of'
-require_relative '../support/helpers/rake_helpers'
require_relative '../support/matchers/abort_matcher'
require_relative '../../rubocop/formatter/todo_formatter'
require_relative '../../rubocop/todo_dir'
require_relative '../../rubocop/check_graceful_task'
RSpec.describe 'rubocop rake tasks', :silence_stdout do
- include RakeHelpers
include NextInstanceOf
before do
diff --git a/spec/tasks/tokens_rake_spec.rb b/spec/tasks/tokens_rake_spec.rb
index 3f7271d4be1..82fe065cb52 100644
--- a/spec/tasks/tokens_rake_spec.rb
+++ b/spec/tasks/tokens_rake_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'rake_helper'
+require 'spec_helper'
RSpec.describe 'tokens rake tasks', :silence_stdout do
let!(:user) { create(:user) }
diff --git a/spec/tooling/danger/config_files_spec.rb b/spec/tooling/danger/config_files_spec.rb
index 65edcabb817..42fc08ad901 100644
--- a/spec/tooling/danger/config_files_spec.rb
+++ b/spec/tooling/danger/config_files_spec.rb
@@ -80,6 +80,8 @@ RSpec.describe Tooling::Danger::ConfigFiles do
config/feature_flags/first.yml
config/events/1234_new_event.yml
config/metrics/count_7d/new_metric.yml
+ ee/config/feature_flags/ee_feature_flag.yml
+ ee/config/saas_features/some_saas_feature.yml
]
end
@@ -91,6 +93,8 @@ RSpec.describe Tooling::Danger::ConfigFiles do
config/events/1234_new_event.yml
config/metrics/count_7d/new_metric.yml
app/assets/index.js
+ ee/config/feature_flags/ee_feature_flag.yml
+ ee/config/saas_features/some_saas_feature.yml
]
allow(config_file.helper).to receive(:added_files).and_return(all_new_files)
diff --git a/spec/tooling/danger/feature_flag_spec.rb b/spec/tooling/danger/feature_flag_spec.rb
index f4df2e1226c..4575d8ca981 100644
--- a/spec/tooling/danger/feature_flag_spec.rb
+++ b/spec/tooling/danger/feature_flag_spec.rb
@@ -83,28 +83,6 @@ RSpec.describe Tooling::Danger::FeatureFlag do
end
end
- describe '#stage_label' do
- before do
- allow(fake_helper).to receive(:mr_labels).and_return(labels)
- end
-
- context 'when there is no stage label' do
- let(:labels) { [] }
-
- it 'returns nil' do
- expect(feature_flag.stage_label).to be_nil
- end
- end
-
- context 'when there is a stage label' do
- let(:labels) { ['devops::verify', 'group::pipeline execution'] }
-
- it 'returns the stage label' do
- expect(feature_flag.stage_label).to eq(labels.first)
- end
- end
- end
-
describe described_class::Found do
let(:feature_flag_path) { 'config/feature_flags/development/entry.yml' }
let(:group) { 'group::source code' }
diff --git a/spec/tooling/danger/rubocop_inline_disable_suggestion_spec.rb b/spec/tooling/danger/rubocop_inline_disable_suggestion_spec.rb
new file mode 100644
index 00000000000..94dd5192d74
--- /dev/null
+++ b/spec/tooling/danger/rubocop_inline_disable_suggestion_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/rubocop_inline_disable_suggestion'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::RubocopInlineDisableSuggestion, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger }
+ let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') }
+ let(:filename) { 'spec/foo_spec.rb' }
+
+ let(:template) do
+ <<~SUGGESTION_MARKDOWN.chomp
+
+ Consider removing this inline disabling and adhering to the rubocop rule.
+ If that isn't possible, please provide context as a reply for reviewers.
+ See [rubocop best practices](https://docs.gitlab.com/ee/development/rubocop_development_guide.html).
+
+ ----
+
+ [Improve this message](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tooling/danger/rubocop_inline_disable_suggestion.rb)
+ or [have feedback](https://gitlab.com/gitlab-org/gitlab/-/issues/428157)?
+ SUGGESTION_MARKDOWN
+ end
+
+ let(:file_lines) do
+ <<~RUBY.split("\n")
+ def validate_credit_card?(project)
+ !current_user.has_required_credit_card_to_enable_shared_runners?(project)
+ return true if Gitlab.com? # rubocop:disable Some/Cop
+ end
+
+ def show_buy_pipeline_minutes?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+
+ def show_pipeline_minutes_notification_dot?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+ return false if notification_dot_acknowledged?
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+
+ def show_dot?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+ return false if notification_dot_acknowledged?
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+
+ def show_other_dot?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop: disable Gitlab/AvoidGitlabInstanceChecks
+ return false if notification_dot_acknowledged?
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+
+ def show_my_dot?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop:todo Gitlab/AvoidGitlabInstanceChecks
+ return false if notification_dot_acknowledged?
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+
+ def show_my_other_dot?(project, namespace)
+ return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks
+ return false if notification_dot_acknowledged?
+
+ show_out_of_pipeline_minutes_notification?(project, namespace)
+ end
+ RUBY
+ end
+
+ let(:changed_lines) do
+ <<~DIFF.split("\n")
+ + return true if Gitlab.com? # rubocop:disable Some/Cop
+ +end
+ + return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+ + return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+ + return false unless ::Gitlab.com? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks
+ + return false unless ::Gitlab.com? # rubocop: disable Gitlab/AvoidGitlabInstanceChecks
+ + return false unless ::Gitlab.com? # rubocop:todo Gitlab/AvoidGitlabInstanceChecks
+ + return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks
+ DIFF
+ end
+
+ subject(:rubocop) { fake_danger.new(helper: fake_helper) }
+
+ before do
+ allow(rubocop).to receive(:project_helper).and_return(fake_project_helper)
+ allow(rubocop.helper).to receive(:changed_lines).with(filename).and_return(changed_lines)
+ allow(rubocop.project_helper).to receive(:file_lines).and_return(file_lines)
+
+ rubocop.define_singleton_method(:add_suggestions_for) do |filename|
+ Tooling::Danger::RubocopInlineDisableSuggestion.new(filename, context: self).suggest
+ end
+ end
+
+ it 'adds comments at the correct lines', :aggregate_failures do
+ [3, 7, 13, 20, 27, 34, 41].each do |line_number|
+ expect(rubocop).to receive(:markdown).with(template, file: filename, line: line_number)
+ end
+
+ rubocop.add_suggestions_for(filename)
+ end
+end
diff --git a/spec/tooling/danger/saas_feature_spec.rb b/spec/tooling/danger/saas_feature_spec.rb
new file mode 100644
index 00000000000..7ce9116ea5f
--- /dev/null
+++ b/spec/tooling/danger/saas_feature_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+require 'gitlab-dangerfiles'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/saas_feature'
+
+RSpec.describe Tooling::Danger::SaasFeature, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+
+ subject(:saas_feature) { fake_danger.new(helper: fake_helper) }
+
+ describe '#files' do
+ let(:feature_flag_paths) do
+ [
+ 'ee/config/saas_features/entry.yml'
+ ]
+ end
+
+ let(:other_file_paths) do
+ %w[app/models/model.rb app/assets/javascripts/file.js]
+ end
+
+ shared_examples 'an array of Found objects' do |change_type|
+ it 'returns an array of Found objects' do
+ found_files = saas_feature.files(change_type: change_type)
+
+ expect(found_files).to contain_exactly(an_instance_of(described_class::Found))
+ expect(found_files.map(&:path)).to eq(feature_flag_paths)
+ end
+ end
+
+ shared_examples 'an empty array' do |change_type|
+ it 'returns an array of Found objects' do
+ expect(saas_feature.files(change_type: change_type)).to be_empty
+ end
+ end
+
+ describe 'retrieves added files' do
+ context 'when added files contain SaaS feature files' do
+ let(:added_files) { feature_flag_paths + other_file_paths }
+
+ include_examples 'an array of Found objects', :added
+ end
+
+ context 'when added files does not contain SaaS feature files' do
+ let(:added_files) { other_file_paths }
+
+ include_examples 'an empty array', :added
+ end
+ end
+
+ describe 'retrieves modified files' do
+ context 'when modified files contain SaaS feature files' do
+ let(:modified_files) { feature_flag_paths }
+
+ include_examples 'an array of Found objects', :modified
+ end
+
+ context 'when modified files does not contain SaaS feature files' do
+ let(:modified_files) { other_file_paths }
+
+ include_examples 'an empty array', :modified
+ end
+ end
+
+ describe 'retrieves deleted files' do
+ context 'when deleted files contain SaaS feature files' do
+ let(:deleted_files) { feature_flag_paths }
+
+ include_examples 'an array of Found objects', :deleted
+ end
+
+ context 'when deleted files does not contain SaaS feature files' do
+ let(:deleted_files) { other_file_paths }
+
+ include_examples 'an empty array', :deleted
+ end
+ end
+ end
+
+ describe described_class::Found do
+ let(:path) { 'ee/config/saas_features/entry.yml' }
+ let(:group) { 'group::source code' }
+ let(:yaml) do
+ {
+ 'group' => group,
+ 'introduced_by_url' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/2',
+ 'milestone' => '15.9',
+ 'name' => 'entry'
+ }
+ end
+
+ let(:raw_yaml) { YAML.dump(yaml) }
+
+ subject(:found) { described_class.new(path) }
+
+ before do
+ allow(File).to receive(:read).and_call_original
+ expect(File).to receive(:read).with(path).and_return(raw_yaml) # rubocop:disable RSpec/ExpectInHook
+ end
+
+ described_class::ATTRIBUTES.each do |attribute|
+ describe "##{attribute}" do
+ it 'returns value from the YAML' do
+ expect(found.public_send(attribute)).to eq(yaml[attribute])
+ end
+ end
+ end
+
+ describe '#raw' do
+ it 'returns the raw YAML' do
+ expect(found.raw).to eq(raw_yaml)
+ end
+ end
+
+ describe '#group_match_mr_label?' do
+ context 'when group is nil' do
+ let(:group) { nil }
+
+ it 'is true only if MR has no group label' do
+ expect(found.group_match_mr_label?(group)).to eq true
+ expect(found.group_match_mr_label?('group::source code')).to eq false
+ end
+ end
+
+ context 'when group is not nil' do
+ it 'is true only if MR has the same group label' do
+ expect(found.group_match_mr_label?(group)).to eq true
+ expect(found.group_match_mr_label?(nil)).to eq false
+ expect(found.group_match_mr_label?('group::authentication and authorization')).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tooling/danger/sidekiq_args_spec.rb b/spec/tooling/danger/sidekiq_args_spec.rb
index bfa9ef169de..29bf32a9a02 100644
--- a/spec/tooling/danger/sidekiq_args_spec.rb
+++ b/spec/tooling/danger/sidekiq_args_spec.rb
@@ -71,6 +71,13 @@ RSpec.describe Tooling::Danger::SidekiqArgs, feature_category: :tooling do
specs.add_comment_for_matched_line(filename)
end
+
+ it 'adds a top level warning' do
+ allow(specs).to receive(:markdown)
+ expect(specs).to receive(:warn).with(described_class::MR_WARNING_COMMENT)
+
+ specs.add_comment_for_matched_line(filename)
+ end
end
context 'when args are not changed' do
@@ -84,6 +91,12 @@ RSpec.describe Tooling::Danger::SidekiqArgs, feature_category: :tooling do
specs.add_comment_for_matched_line(filename)
end
+
+ it 'does not add a top level warning' do
+ expect(specs).not_to receive(:warn)
+
+ specs.add_comment_for_matched_line(filename)
+ end
end
end
diff --git a/spec/tooling/lib/tooling/job_metrics_spec.rb b/spec/tooling/lib/tooling/job_metrics_spec.rb
new file mode 100644
index 00000000000..c7f4d08394c
--- /dev/null
+++ b/spec/tooling/lib/tooling/job_metrics_spec.rb
@@ -0,0 +1,721 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tempfile'
+require 'time'
+
+require_relative '../../../../tooling/lib/tooling/job_metrics'
+
+RSpec.describe Tooling::JobMetrics, feature_category: :tooling do
+ include StubENV
+
+ attr_accessor :job_metrics_file, :job_metrics_file_path
+
+ around do |example|
+ self.job_metrics_file = Tempfile.new('test-folder/job-metrics.json')
+ self.job_metrics_file_path = job_metrics_file.path
+
+ # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/
+ # Tempfile.html#class-Tempfile-label-Explicit+close
+ begin
+ example.run
+ ensure
+ job_metrics_file.close
+ job_metrics_file.unlink
+ end
+ end
+
+ let(:instance) { described_class.new(metrics_file_path: job_metrics_file_path) }
+ let(:pipeline_created_at) { '2023-05-03T12:35:39.932Z' }
+
+ before do
+ stub_env(
+ 'CI_JOB_ID' => '1234',
+ 'CI_JOB_NAME' => 'rspec unit pg13 1/24',
+ 'CI_JOB_STAGE' => 'test',
+ 'CI_JOB_STARTED_AT' => (Time.now - 3600).iso8601, # 1h ago
+ 'CI_JOB_STATUS' => 'success',
+ 'CI_MERGE_REQUEST_IID' => '23412',
+ 'CI_PIPELINE_CREATED_AT' => pipeline_created_at,
+ 'CI_PIPELINE_ID' => '3393923023',
+ 'CI_PROJECT_ID' => '7489',
+ 'CI_SERVER_HOST' => 'localhost:300',
+ 'JOB_METRICS_FILE_PATH' => job_metrics_file_path
+ )
+ end
+
+ describe '#initialize' do
+ context 'when a path is given' do
+ subject { described_class.new(metrics_file_path: job_metrics_file_path) }
+
+ it 'instantiates the object' do
+ expect(subject).to be_a(described_class)
+ end
+
+ it 'sets the correct path for the metrics file' do
+ expect(subject.metrics_file_path).to eq(job_metrics_file_path)
+ end
+ end
+
+ context 'when a path is not given' do
+ subject { described_class.new }
+
+ context 'when the JOB_METRICS_FILE_PATH env variable is set' do
+ before do
+ stub_env(
+ 'JOB_METRICS_FILE_PATH' => job_metrics_file_path
+ )
+ end
+
+ it 'instantiates the object' do
+ expect(subject).to be_a(described_class)
+ end
+
+ it 'sets the correct path for the metrics file' do
+ expect(subject.metrics_file_path).to eq(ENV['JOB_METRICS_FILE_PATH'])
+ end
+ end
+
+ context 'when the JOB_METRICS_FILE_PATH env variable is not set' do
+ before do
+ stub_env(
+ 'JOB_METRICS_FILE_PATH' => nil
+ )
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('Please specify a path for the job metrics file.')
+ end
+ end
+ end
+ end
+
+ describe '#create_metrics_file' do
+ subject { instance.create_metrics_file }
+
+ context 'when a valid metrics file exists' do
+ before do
+ allow(instance).to receive(:warn)
+ allow(instance).to receive(:valid_metrics_file?).and_return(true)
+ end
+
+ it 'prints a message to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "A valid job metrics file already exists. We're not going to overwrite it.\n"
+ ).to_stderr
+ end
+
+ it 'does not overwrite the existing metrics file' do
+ expect(instance).not_to receive(:persist_metrics_file)
+
+ subject
+ end
+ end
+
+ context 'when a valid metrics file does not exist' do
+ before do
+ allow(instance).to receive(:valid_metrics_file?).and_return(false)
+ end
+
+ it 'persists the metrics file' do
+ expect(instance).to receive(:persist_metrics_file).with(instance.default_metrics)
+
+ subject
+ end
+ end
+ end
+
+ describe '#update_field' do
+ subject { instance.update_field(field_name, field_value) }
+
+ let(:field_name) { instance.default_fields.each_key.first }
+ let(:field_value) { 'test_value' }
+
+ context 'when the field to update is not in the default fields list' do
+ let(:field_name) { 'not-in-default-list' }
+
+ before do
+ allow(instance).to receive(:warn)
+ end
+
+ it 'returns a warning to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "[job-metrics] ERROR: Could not update field #{field_name}, as it is not part of the allowed fields.\n"
+ ).to_stderr
+ end
+
+ it 'does not write to the metrics file' do
+ expect(instance).not_to receive(:persist_metrics_file)
+
+ subject
+ end
+ end
+
+ context 'when the field to update is in the default fields list' do
+ it 'calls the update_file method with the correct arguments' do
+ expect(instance).to receive(:update_file).with(field_name, field_value, type: :field)
+
+ subject
+ end
+ end
+ end
+
+ describe '#update_tag' do
+ subject { instance.update_tag(tag_name, tag_value) }
+
+ let(:tag_name) { instance.default_tags.each_key.first }
+ let(:tag_value) { 'test_value' }
+
+ context 'when the tag to update is not in the default tags list' do
+ let(:tag_name) { 'not-in-default-list' }
+
+ before do
+ allow(instance).to receive(:warn)
+ end
+
+ it 'returns a warning to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "[job-metrics] ERROR: Could not update tag #{tag_name}, as it is not part of the allowed tags.\n"
+ ).to_stderr
+ end
+
+ it 'does not write to the metrics file' do
+ expect(instance).not_to receive(:persist_metrics_file)
+
+ subject
+ end
+ end
+
+ context 'when the tag to update is in the default tags list' do
+ it 'calls the update_file method with the correct arguments' do
+ expect(instance).to receive(:update_file).with(tag_name, tag_value, type: :tag)
+
+ subject
+ end
+ end
+ end
+
+ describe '#update_file' do
+ subject { instance.update_file(tag_name, tag_value, type: type) }
+
+ let(:type) { :tag }
+ let(:tag_name) { instance.default_tags.each_key.first }
+ let(:tag_value) { 'test_value' }
+
+ context 'when the metrics file is not valid' do
+ before do
+ allow(instance).to receive(:valid_metrics_file?).and_return(false)
+ allow(instance).to receive(:warn)
+ end
+
+ it 'returns a warning to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "[job-metrics] ERROR: Invalid job metrics file.\n"
+ ).to_stderr
+ end
+
+ it 'does not write to the metrics file' do
+ expect(instance).not_to receive(:persist_metrics_file)
+
+ subject
+ end
+ end
+
+ context 'when the metrics file is valid' do
+ let(:metrics_hash) do
+ {
+ name: 'job-metrics',
+ time: ENV['CI_PIPELINE_CREATED_AT'].to_time,
+ tags: tags_hash,
+ fields: fields_hash
+ }
+ end
+
+ let(:tags_hash) { instance.default_tags }
+ let(:fields_hash) { instance.default_fields }
+
+ before do
+ allow(instance).to receive(:valid_metrics_file?).and_return(true)
+ allow(instance).to receive(:load_metrics_file).and_return(metrics_hash)
+ end
+
+ context 'when updating a tag' do
+ let(:type) { :tag }
+
+ it 'updates the tag value' do
+ expect(instance).to receive(:persist_metrics_file).with(
+ hash_including(
+ tags: hash_including(tag_name)
+ )
+ )
+
+ subject
+ end
+ end
+
+ context 'when updating a field' do
+ let(:type) { :field }
+
+ let(:field_name) { instance.default_fields.each_key.first }
+ let(:field_value) { 'test_value' }
+
+ it 'updates the field value' do
+ expect(instance).to receive(:persist_metrics_file).with(
+ hash_including(
+ fields: hash_including(field_name)
+ )
+ )
+
+ subject
+ end
+ end
+ end
+ end
+
+ describe '#push_metrics' do
+ subject { instance.push_metrics }
+
+ context 'when the metrics file is not valid' do
+ before do
+ allow(instance).to receive(:valid_metrics_file?).and_return(false)
+ allow(instance).to receive(:warn)
+ end
+
+ it 'returns a warning to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "[job-metrics] ERROR: Invalid job metrics file. We will not push the metrics to InfluxDB\n"
+ ).to_stderr
+ end
+
+ it 'does not write to the metrics file' do
+ expect(instance).not_to receive(:persist_metrics_file)
+
+ subject
+ end
+ end
+
+ context 'when the metrics file is valid' do
+ let(:metrics_hash) do
+ {
+ name: 'job-metrics',
+ time: ENV['CI_PIPELINE_CREATED_AT'].to_time,
+ tags: tags_hash,
+ fields: fields_hash
+ }
+ end
+
+ let(:tags_hash) { instance.default_tags }
+ let(:fields_hash) { instance.default_fields }
+ let(:influx_write_api) { double('influx_write_api') } # rubocop:disable RSpec:VerifiedDoubles
+
+ before do
+ allow(instance).to receive(:influx_write_api).and_return(influx_write_api)
+ allow(instance).to receive(:valid_metrics_file?).and_return(true)
+ allow(instance).to receive(:load_metrics_file).and_return(metrics_hash)
+ allow(instance).to receive(:warn)
+ allow(instance).to receive(:puts)
+ end
+
+ context 'when we are missing ENV variables to push to influxDB' do
+ before do
+ stub_env(
+ 'QA_INFLUXDB_URL' => 'https://test.com',
+ 'EP_CI_JOB_METRICS_TOKEN' => nil
+ )
+ end
+
+ it 'displays an error to the user' do
+ allow(instance).to receive(:influx_write_api).and_call_original
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "[job-metrics] Failed to push CI job metrics to InfluxDB, " \
+ "error: Missing EP_CI_JOB_METRICS_TOKEN env variable\n"
+ ).to_stderr
+ end
+ end
+
+ context 'when pushing the data to InfluxDB raises an exception' do
+ it 'displays an error to the user' do
+ allow(instance).to receive(:warn).and_call_original
+ expect(influx_write_api).to receive(:write).and_raise("connectivity issues")
+
+ expect { subject }.to output(
+ "[job-metrics] Failed to push CI job metrics to InfluxDB, error: connectivity issues\n"
+ ).to_stderr
+ end
+ end
+
+ context 'when some tags/fields are empty/nil' do
+ before do
+ allow(instance).to receive(:load_metrics_file).and_return({
+ name: 'job-metrics',
+ time: ENV['CI_PIPELINE_CREATED_AT'].to_time,
+ tags: {
+ first_tag: '',
+ third_tag: 'hello'
+ },
+ fields: {
+ second_tag: nil
+ }
+ })
+ end
+
+ it 'removes the metrics with empty/nil values from the metrics list' do
+ expect(influx_write_api).to receive(:write).with(data: {
+ name: 'job-metrics',
+ time: anything,
+ tags: { third_tag: 'hello' },
+ fields: {
+ job_duration_seconds: anything # Added right before pushing to influxDB
+ }
+ })
+
+ subject
+ end
+ end
+
+ it 'pushes the data to InfluxDB' do
+ expect(influx_write_api).to receive(:write).with(data: metrics_hash)
+
+ subject
+ end
+
+ it 'sets the job_duration_seconds field' do
+ # We want the job to last for 10 minutes (600 seconds)
+ allow(Time).to receive(:now).and_return(Time.parse(ENV.fetch('CI_JOB_STARTED_AT')) + 600)
+
+ expect(influx_write_api).to receive(:write).with(
+ data: hash_including(
+ fields: hash_including(
+ job_duration_seconds: 600
+ )
+ )
+ )
+
+ subject
+ end
+ end
+ end
+
+ describe '#load_metrics_file' do
+ subject { instance.load_metrics_file }
+
+ context 'when the metrics file does not exist on disk' do
+ before do
+ allow(File).to receive(:exist?).with(job_metrics_file_path).and_return(false)
+ end
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when the metrics file exists on disk' do
+ context 'when the metrics file does not contain valid JSON' do
+ before do
+ File.write(job_metrics_file_path, 'THIS IS NOT JSON CONTENT!')
+ end
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when the metrics file contains valid JSON' do
+ before do
+ File.write(job_metrics_file_path, { 'key' => 'value' }.to_json)
+ end
+
+ it 'returns the content of the file as a hash with symbolized keys' do
+ expect(subject).to eq({ key: 'value' })
+ end
+ end
+ end
+ end
+
+ describe '#valid_metrics_file?' do
+ subject { instance.valid_metrics_file? }
+
+ context 'when the metrics file cannot be loaded in memory' do
+ before do
+ allow(instance).to receive(:load_metrics_file).and_return(nil)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when the metrics file can be loaded in memory' do
+ let(:metrics_file_content) do
+ { key: 'value' }
+ end
+
+ before do
+ allow(instance).to receive(:load_metrics_file).and_return(metrics_file_content)
+ end
+
+ context 'when the metrics file validation succeeds' do
+ before do
+ allow(instance).to receive(:valid_metrics?).with(metrics_file_content).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when the metrics file validation fails' do
+ before do
+ allow(instance).to receive(:valid_metrics?).with(metrics_file_content).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+ end
+
+ describe '#valid_metrics?' do
+ subject { instance.valid_metrics?(metrics_hash) }
+
+ let(:metrics_hash) do
+ {
+ name: 'job-metrics',
+ time: ENV['CI_PIPELINE_CREATED_AT'].to_time,
+ tags: tags_hash,
+ fields: fields_hash
+ }
+ end
+
+ let(:tags_hash) { instance.default_tags }
+ let(:fields_hash) { instance.default_fields }
+
+ describe 'metrics hash keys' do
+ context 'when it is missing a key' do
+ before do
+ metrics_hash.delete(:time)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when it has an extra key' do
+ before do
+ metrics_hash[:extra_key] = ''
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe 'metrics hash tags keys' do
+ context 'when it is missing a key' do
+ before do
+ tags_hash.delete(tags_hash.each_key.first)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when it has an extra key' do
+ before do
+ tags_hash[:extra_key] = ''
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe 'metrics hash fields keys' do
+ context 'when it is missing a key' do
+ before do
+ fields_hash.delete(fields_hash.each_key.first)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when it has an extra key' do
+ before do
+ fields_hash[:extra_key] = ''
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ context 'when the metrics hash is valid' do
+ it 'returns true' do
+ expect(subject).to be_truthy
+ end
+ end
+ end
+
+ describe '#persist_metrics_file' do
+ let(:metrics_hash) do
+ { key: 'value' }.to_json
+ end
+
+ subject { instance.persist_metrics_file(metrics_hash) }
+
+ context 'when the metrics hash is not valid' do
+ before do
+ allow(instance).to receive(:valid_metrics?).and_return(false)
+ allow(instance).to receive(:warn)
+ end
+
+ it 'returns a warning to the user' do
+ allow(instance).to receive(:warn).and_call_original
+
+ expect { subject }.to output(
+ "cannot persist the metrics, as it doesn't have the correct data structure.\n"
+ ).to_stderr
+ end
+
+ it 'does not write to the metrics file' do
+ expect(File).not_to receive(:write).with(job_metrics_file_path, any_args)
+
+ subject
+ end
+ end
+
+ context 'when the metrics hash is valid' do
+ before do
+ allow(instance).to receive(:valid_metrics?).and_return(true)
+ end
+
+ it 'persists the metrics file' do
+ expect { subject }.to change { File.read(job_metrics_file_path) }.from('').to(metrics_hash.to_json)
+ end
+ end
+ end
+
+ describe '#default_metrics' do
+ subject { instance.default_metrics }
+
+ let(:returned_time) { ENV['CI_PIPELINE_CREATED_AT'].to_time }
+ let(:default_tags) { instance.default_tags }
+ let(:default_fields) { instance.default_fields }
+
+ it 'returns the expected metrics keys' do
+ expect(subject).to eq(
+ name: 'job-metrics',
+ time: returned_time,
+ tags: default_tags,
+ fields: default_fields
+ )
+ end
+ end
+
+ describe '#default_tags' do
+ subject { instance.default_tags }
+
+ it 'returns the expected tags keys' do
+ expect(subject).to eq(
+ job_name: ENV['CI_JOB_NAME'],
+ job_stage: ENV['CI_JOB_STAGE'],
+ job_status: ENV['CI_JOB_STATUS'],
+ project_id: ENV['CI_PROJECT_ID'],
+ rspec_retried_in_new_process: 'false',
+ server_host: ENV['CI_SERVER_HOST']
+ )
+ end
+
+ context 'when an ENV variable is not set' do
+ before do
+ stub_env('CI_JOB_NAME' => nil)
+ end
+
+ it 'replaces the value with nil' do
+ expect(subject).to eq(
+ job_name: nil,
+ job_stage: ENV['CI_JOB_STAGE'],
+ job_status: ENV['CI_JOB_STATUS'],
+ project_id: ENV['CI_PROJECT_ID'],
+ rspec_retried_in_new_process: 'false',
+ server_host: ENV['CI_SERVER_HOST']
+ )
+ end
+ end
+ end
+
+ describe '#default_fields' do
+ subject { instance.default_fields }
+
+ it 'returns the expected fields keys' do
+ expect(subject).to eq(
+ job_id: ENV['CI_JOB_ID'],
+ job_duration_seconds: nil,
+ merge_request_iid: ENV['CI_MERGE_REQUEST_IID'],
+ pipeline_id: ENV['CI_PIPELINE_ID']
+ )
+ end
+
+ context 'when an ENV variable is not set' do
+ before do
+ stub_env('CI_JOB_ID' => nil)
+ end
+
+ it 'replaces the value with nil' do
+ expect(subject).to eq(
+ job_id: nil,
+ job_duration_seconds: nil,
+ merge_request_iid: ENV['CI_MERGE_REQUEST_IID'],
+ pipeline_id: ENV['CI_PIPELINE_ID']
+ )
+ end
+ end
+ end
+
+ describe '#time' do
+ subject { instance.time }
+
+ let(:current_time) { '2011-01-01' }
+
+ before do
+ stub_env('CI_PIPELINE_CREATED_AT' => pipeline_created_at)
+ allow(DateTime).to receive(:now).and_return(current_time)
+ end
+
+ context 'when the CI_PIPELINE_CREATED_AT env variable is set' do
+ let(:pipeline_created_at) { '2000-01-01T00:00:00Z' }
+
+ it 'returns the correct time' do
+ expect(subject).to eq(pipeline_created_at)
+ end
+ end
+
+ context 'when the CI_PIPELINE_CREATED_AT env variable is not set' do
+ let(:pipeline_created_at) { nil }
+
+ it 'returns the current time' do
+ expect(subject).to eq(current_time)
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/packages/nuget/symbol_uploader_spec.rb b/spec/uploaders/packages/nuget/symbol_uploader_spec.rb
index bdcb5245c1c..1a93871d94a 100644
--- a/spec/uploaders/packages/nuget/symbol_uploader_spec.rb
+++ b/spec/uploaders/packages/nuget/symbol_uploader_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::SymbolUploader, feature_category: :package_registry do
+ let(:file_path) { 'file/Path.pdb' }
let(:object_storage_key) { 'object/storage/key' }
- let(:symbol) { build_stubbed(:nuget_symbol, object_storage_key: object_storage_key) }
+ let(:symbol) { build_stubbed(:nuget_symbol, object_storage_key: object_storage_key, file_path: file_path) }
subject { described_class.new(symbol, :file) }
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index c1ed8d4f4ef..81275fa8750 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -36,14 +36,15 @@ RSpec.describe 'admin/sessions/new.html.haml' do
context 'omniauth authentication enabled' do
before do
allow(view).to receive(:omniauth_enabled?).and_return(true)
- allow(view).to receive(:button_based_providers_enabled?).and_return(true)
+ allow(view).to receive(:password_authentication_enabled_for_web?).and_return(true)
end
it 'shows omniauth form' do
render
expect(rendered).not_to have_content _('No authentication methods configured.')
- expect(rendered).to have_content _('or')
+ expect(rendered).to have_css('.omniauth-divider')
+ expect(rendered).to have_content(_('or sign in with'))
expect(rendered).to have_css('.omniauth-container')
end
end
diff --git a/spec/views/ci/status/_badge.html.haml_spec.rb b/spec/views/ci/status/_badge.html.haml_spec.rb
index ff8cfe2cca0..65497de1608 100644
--- a/spec/views/ci/status/_badge.html.haml_spec.rb
+++ b/spec/views/ci/status/_badge.html.haml_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'ci/status/_badge' do
render_status(build)
- expect(rendered).to have_link 'passed', href: details_path
+ expect(rendered).to have_link 'Passed', href: details_path
end
end
@@ -32,11 +32,11 @@ RSpec.describe 'ci/status/_badge' do
end
it 'contains build status text' do
- expect(rendered).to have_content 'passed'
+ expect(rendered).to have_content 'Passed'
end
it 'does not contain links' do
- expect(rendered).not_to have_link 'passed'
+ expect(rendered).not_to have_link 'Passed'
end
end
end
@@ -60,11 +60,11 @@ RSpec.describe 'ci/status/_badge' do
end
it 'contains valid commit status text' do
- expect(rendered).to have_content 'running'
+ expect(rendered).to have_content 'Running'
end
it 'has link to external status page' do
- expect(rendered).to have_link 'running', href: 'http://gitlab.com'
+ expect(rendered).to have_link 'Running', href: 'http://gitlab.com'
end
end
@@ -76,11 +76,11 @@ RSpec.describe 'ci/status/_badge' do
end
it 'contains valid commit status text' do
- expect(rendered).to have_content 'canceled'
+ expect(rendered).to have_content 'Canceled'
end
it 'has link to external status page' do
- expect(rendered).not_to have_link 'canceled'
+ expect(rendered).not_to have_link 'Canceled'
end
end
end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index 70ca0bb2195..5f611ae1d8f 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -47,8 +47,6 @@ RSpec.describe 'devise/sessions/new' do
disable_captcha
disable_sign_up
disable_other_signin_methods
-
- allow(view).to receive(:experiment_enabled?).and_return(false)
end
it 'is shown when enabled' do
@@ -69,36 +67,6 @@ RSpec.describe 'devise/sessions/new' do
expect(rendered).not_to have_field(_('Username'))
end
end
-
- describe 'Google Tag Manager' do
- let!(:gtm_id) { 'GTM-WWKMTWS' }
-
- subject { rendered }
-
- before do
- stub_devise
- disable_captcha
- stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id })
- end
-
- describe 'when Google Tag Manager is enabled' do
- before do
- enable_gtm
- render
- end
-
- it { is_expected.to match /www.googletagmanager.com/ }
- end
-
- describe 'when Google Tag Manager is disabled' do
- before do
- disable_gtm
- render
- end
-
- it { is_expected.not_to match /www.googletagmanager.com/ }
- end
- end
end
end
@@ -133,12 +101,4 @@ RSpec.describe 'devise/sessions/new' do
allow(view).to receive(:captcha_enabled?).and_return(false)
allow(view).to receive(:captcha_on_login_required?).and_return(false)
end
-
- def disable_gtm
- allow(view).to receive(:google_tag_manager_enabled?).and_return(false)
- end
-
- def enable_gtm
- allow(view).to receive(:google_tag_manager_enabled?).and_return(true)
- end
end
diff --git a/spec/views/groups/observability/observability.html.haml_spec.rb b/spec/views/groups/observability/observability.html.haml_spec.rb
deleted file mode 100644
index 0561737cb39..00000000000
--- a/spec/views/groups/observability/observability.html.haml_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'groups/observability/observability.html.haml' do
- let(:iframe_url) { "foo.test" }
-
- before do
- allow(view).to receive(:observability_iframe_src).and_return(iframe_url)
- end
-
- it 'renders as expected' do
- render
- page = Capybara.string(rendered)
- div = page.find('#js-observability-app')
- expect(div['data-observability-iframe-src']).to eq(iframe_url)
- end
-end
diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb
index c4142f4a245..e0ee60e9f91 100644
--- a/spec/views/groups/show.html.haml_spec.rb
+++ b/spec/views/groups/show.html.haml_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'groups/show', feature_category: :groups_and_projects do
describe 'group README' do
let_it_be(:group) { build_stubbed(:group) }
- let_it_be(:readme_project) { build_stubbed(:project, :readme) }
+ let_it_be(:readme_project) { build_stubbed(:project, :public, :readme) }
before do
assign(:group, group)
@@ -14,6 +14,7 @@ RSpec.describe 'groups/show', feature_category: :groups_and_projects do
context 'with readme project' do
before do
allow(group).to receive(:group_readme).and_return(readme_project)
+ allow(group).to receive(:readme_project).and_return(readme_project)
end
it 'renders #js-group-readme' do
@@ -21,11 +22,21 @@ RSpec.describe 'groups/show', feature_category: :groups_and_projects do
expect(rendered).to have_selector('#js-group-readme')
end
+
+ context 'with private readme project' do
+ let_it_be(:readme_project) { build_stubbed(:project, :private, :readme) }
+
+ it 'does not render #js-group-readme' do
+ render
+
+ expect(rendered).not_to have_selector('#js-group-readme')
+ end
+ end
end
context 'without readme project' do
before do
- allow(group).to receive(:group_readme).and_return(nil)
+ allow(group).to receive(:readme_project).and_return(nil)
end
it 'does not render #js-group-readme' do
diff --git a/spec/views/layouts/terms.html.haml_spec.rb b/spec/views/layouts/terms.html.haml_spec.rb
index 7bf97debbf9..a22eb8eff57 100644
--- a/spec/views/layouts/terms.html.haml_spec.rb
+++ b/spec/views/layouts/terms.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'layouts/terms' do
+RSpec.describe 'layouts/terms', feature_category: :user_profile do
let_it_be(:user) { create(:user) }
before do
diff --git a/spec/views/notify/user_deactivated_email.html.haml_spec.rb b/spec/views/notify/user_deactivated_email.html.haml_spec.rb
index 25d18e37cb9..cc005fc0502 100644
--- a/spec/views/notify/user_deactivated_email.html.haml_spec.rb
+++ b/spec/views/notify/user_deactivated_email.html.haml_spec.rb
@@ -28,18 +28,6 @@ RSpec.describe 'notify/user_deactivated_email.html.haml', feature_category: :use
expect(rendered).to have_content(/So long and thanks for all the fish!$/)
end
end
-
- context 'when additional text feature flag is disabled' do
- before do
- stub_feature_flags(deactivation_email_additional_text: false)
- end
-
- it 'does not display the additional text' do
- render
-
- expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
- end
- end
end
context 'when additional text setting is not set' do
diff --git a/spec/views/notify/user_deactivated_email.text.erb_spec.rb b/spec/views/notify/user_deactivated_email.text.erb_spec.rb
index 8cf56816b92..60379714ff2 100644
--- a/spec/views/notify/user_deactivated_email.text.erb_spec.rb
+++ b/spec/views/notify/user_deactivated_email.text.erb_spec.rb
@@ -30,18 +30,6 @@ RSpec.describe 'notify/user_deactivated_email.text.erb', feature_category: :user
expect(rendered).to have_content(/So long and thanks for all the fish!$/)
end
end
-
- context 'when additional text feature flag is disabled' do
- before do
- stub_feature_flags(deactivation_email_additional_text: false)
- end
-
- it 'does not display the additional text' do
- render
-
- expect(rendered).to have_content(/Please contact your GitLab administrator if you think this is an error\.$/)
- end
- end
end
context 'when additional text setting is not set' do
diff --git a/spec/views/projects/commits/_commit.html.haml_spec.rb b/spec/views/projects/commits/_commit.html.haml_spec.rb
index 90df0d381ed..d45f1da86e8 100644
--- a/spec/views/projects/commits/_commit.html.haml_spec.rb
+++ b/spec/views/projects/commits/_commit.html.haml_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
commit: commit
}
- expect(rendered).not_to have_css('.ci-status-link')
+ expect(rendered).not_to have_css("[data-testid='ci-status-badge-legacy']")
end
end
@@ -91,7 +91,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
commit: commit
}
- expect(rendered).to have_css('.ci-status-link')
+ expect(rendered).to have_css("[data-testid='ci-status-badge-legacy']")
end
end
@@ -103,7 +103,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
commit: commit
}
- expect(rendered).not_to have_css('.ci-status-link')
+ expect(rendered).not_to have_css("[data-testid='ci-status-badge-legacy']")
end
end
end
diff --git a/spec/views/projects/merge_requests/edit.html.haml_spec.rb b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
index bb8a4455775..5affe2dd9b4 100644
--- a/spec/views/projects/merge_requests/edit.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
@@ -46,65 +46,28 @@ RSpec.describe 'projects/merge_requests/edit.html.haml' do
end
end
- context 'with the visible_label_selection_on_metadata feature flag enabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: true)
- end
-
- context 'when a merge request without fork' do
- it_behaves_like 'merge request shows editable fields'
-
- it "shows editable fields" do
- unlink_project.execute
- closed_merge_request.reload
-
- render
+ context 'when a merge request without fork' do
+ it_behaves_like 'merge request shows editable fields'
- expect(rendered).not_to have_selector('#merge_request_target_branch', visible: false)
- expect(rendered).to have_selector('.js-issuable-form-label-selector')
- end
- end
-
- context 'when a merge request with an existing source project is closed' do
- it_behaves_like 'merge request shows editable fields'
+ it "shows editable fields" do
+ unlink_project.execute
+ closed_merge_request.reload
- it "shows editable fields" do
- render
+ render
- expect(rendered).to have_selector('#merge_request_target_branch', visible: false)
- expect(rendered).to have_selector('.js-issuable-form-label-selector')
- end
+ expect(rendered).not_to have_selector('#merge_request_target_branch', visible: false)
+ expect(rendered).to have_selector('.js-issuable-form-label-selector')
end
end
- context 'with the visible_label_selection_on_metadata feature flag disabled' do
- before do
- stub_feature_flags(visible_label_selection_on_metadata: false)
- end
-
- context 'when a merge request without fork' do
- it_behaves_like 'merge request shows editable fields'
-
- it "shows editable fields" do
- unlink_project.execute
- closed_merge_request.reload
-
- render
-
- expect(rendered).not_to have_selector('#merge_request_target_branch', visible: false)
- expect(rendered).not_to have_selector('.js-issuable-form-label-selector')
- end
- end
-
- context 'when a merge request with an existing source project is closed' do
- it_behaves_like 'merge request shows editable fields'
+ context 'when a merge request with an existing source project is closed' do
+ it_behaves_like 'merge request shows editable fields'
- it "shows editable fields" do
- render
+ it "shows editable fields" do
+ render
- expect(rendered).to have_selector('#merge_request_target_branch', visible: false)
- expect(rendered).not_to have_selector('.js-issuable-form-label-selector')
- end
+ expect(rendered).to have_selector('#merge_request_target_branch', visible: false)
+ expect(rendered).to have_selector('.js-issuable-form-label-selector')
end
end
end
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index dfa27afb72f..01e8d23fb9f 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -91,14 +91,14 @@ RSpec.describe 'projects/tags/index.html.haml' do
render
- expect(page.find('.tags .content-list li', text: tag)).to have_css 'a.ci-status-icon-success'
- expect(page.all('.tags .content-list li')).to all(have_css('svg.s24'))
+ expect(page.find('.tags .content-list li', text: tag)).to have_css '.gl-badge .ci-status-icon-success'
+ expect(page.all('.tags .content-list li')).to all(have_css('svg.s16'))
end
it 'shows no build status or placeholder when no pipelines present' do
render
- expect(page.all('.tags .content-list li')).not_to have_css 'svg.s24'
+ expect(page.all('.tags .content-list li')).not_to have_css 'svg.s16'
end
it 'shows no build status or placeholder when pipelines are private' do
@@ -107,7 +107,7 @@ RSpec.describe 'projects/tags/index.html.haml' do
render
- expect(page.all('.tags .content-list li')).not_to have_css 'svg.s24'
+ expect(page.all('.tags .content-list li')).not_to have_css 'svg.s16'
end
end
diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb
index 5a1ae715f8f..942c352c6b6 100644
--- a/spec/views/projects/tree/show.html.haml_spec.rb
+++ b/spec/views/projects/tree/show.html.haml_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'projects/tree/show' do
include Devise::Test::ControllerHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository, create_branch: 'bar') }
let(:repository) { project.repository }
let(:ref) { 'master' }
let(:commit) { repository.commit(ref) }
@@ -38,4 +38,24 @@ RSpec.describe 'projects/tree/show' do
expect(rendered).to have_css('#js-tree-ref-switcher')
end
end
+
+ context 'when on root ref' do
+ let(:ref) { repository.root_ref }
+
+ it 'hides compare button' do
+ render
+
+ expect(rendered).not_to include('Compare')
+ end
+ end
+
+ context 'when not on root ref' do
+ let(:ref) { 'bar' }
+
+ it 'shows a compare button' do
+ render
+
+ expect(rendered).to include('Compare')
+ end
+ end
end
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
deleted file mode 100644
index b652defbd1f..00000000000
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'registrations/welcome/show', feature_category: :onboarding do
- let_it_be(:user) { create(:user) }
-
- before do
- allow(view).to receive(:onboarding_status).and_return(Onboarding::Status.new({}, {}, user))
- allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:welcome_update_params).and_return({})
-
- render
- end
-
- subject { rendered }
-
- it { is_expected.not_to have_selector('label[for="user_setup_for_company"]') }
- it { is_expected.to have_button('Get started!') }
-end
diff --git a/spec/views/shared/groups/_dropdown.html.haml_spec.rb b/spec/views/shared/groups/_dropdown.html.haml_spec.rb
index 71fa3a30711..2c6f3b4370e 100644
--- a/spec/views/shared/groups/_dropdown.html.haml_spec.rb
+++ b/spec/views/shared/groups/_dropdown.html.haml_spec.rb
@@ -5,11 +5,37 @@ require 'spec_helper'
RSpec.describe 'shared/groups/_dropdown.html.haml' do
describe 'render' do
describe 'when a sort option is not selected' do
- it 'renders a default sort option' do
+ before do
render 'shared/groups/dropdown'
+ end
+ it 'renders a default sort option' do
expect(rendered).to have_content 'Last created'
end
+
+ it 'renders correct sort by options' do
+ html_rendered = Nokogiri::HTML(rendered)
+ sort_options = Gitlab::Json.parse(html_rendered.css('div.dropdown')[0]['data-items'])
+
+ expect(sort_options.size).to eq(6)
+ expect(sort_options[0]['value']).to eq('name_asc')
+ expect(sort_options[0]['text']).to eq(s_('SortOptions|Name'))
+
+ expect(sort_options[1]['value']).to eq('name_desc')
+ expect(sort_options[1]['text']).to eq(s_('SortOptions|Name, descending'))
+
+ expect(sort_options[2]['value']).to eq('created_desc')
+ expect(sort_options[2]['text']).to eq(s_('SortOptions|Last created'))
+
+ expect(sort_options[3]['value']).to eq('created_asc')
+ expect(sort_options[3]['text']).to eq(s_('SortOptions|Oldest created'))
+
+ expect(sort_options[4]['value']).to eq('latest_activity_desc')
+ expect(sort_options[4]['text']).to eq(_('Updated date'))
+
+ expect(sort_options[5]['value']).to eq('latest_activity_asc')
+ expect(sort_options[5]['text']).to eq(s_('SortOptions|Oldest updated'))
+ end
end
describe 'when a sort option is selected' do
diff --git a/spec/workers/auto_devops/disable_worker_spec.rb b/spec/workers/auto_devops/disable_worker_spec.rb
index 8f7f305b186..1d2b93b9287 100644
--- a/spec/workers/auto_devops/disable_worker_spec.rb
+++ b/spec/workers/auto_devops/disable_worker_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe AutoDevops::DisableWorker, '#perform', feature_category: :auto_devops do
diff --git a/spec/workers/background_migration/ci_database_worker_spec.rb b/spec/workers/background_migration/ci_database_worker_spec.rb
index 496e7830c94..952c9ebfce8 100644
--- a/spec/workers/background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/background_migration/ci_database_worker_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state,
- :clean_gitlab_redis_cluster_shared_state, feature_category: :database do
+RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_cluster_shared_state,
+ feature_category: :database do
before do
skip_if_shared_database(:ci)
end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 4cffbe5be97..76509b4b227 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state,
- :clean_gitlab_redis_cluster_shared_state, feature_category: :database do
+RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_cluster_shared_state,
+ feature_category: :database do
it_behaves_like 'it runs background migration jobs', 'main'
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index c96e5ace124..8b73549e071 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -3,306 +3,28 @@
require 'spec_helper'
RSpec.describe BulkImportWorker, feature_category: :importers do
- describe '#perform' do
- context 'when no bulk import is found' do
- it 'does nothing' do
- expect(described_class).not_to receive(:perform_in)
-
- subject.perform(non_existing_record_id)
- end
- end
-
- context 'when bulk import is finished' do
- it 'does nothing' do
- bulk_import = create(:bulk_import, :finished)
-
- expect(described_class).not_to receive(:perform_in)
-
- subject.perform(bulk_import.id)
- end
- end
-
- context 'when bulk import is failed' do
- it 'does nothing' do
- bulk_import = create(:bulk_import, :failed)
-
- expect(described_class).not_to receive(:perform_in)
-
- subject.perform(bulk_import.id)
- end
- end
-
- context 'when all entities are processed' do
- it 'marks bulk import as finished' do
- bulk_import = create(:bulk_import, :started)
- create(:bulk_import_entity, :finished, bulk_import: bulk_import)
- create(:bulk_import_entity, :failed, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(bulk_import.reload.finished?).to eq(true)
- end
- end
-
- context 'when all entities are failed' do
- it 'marks bulk import as failed' do
- bulk_import = create(:bulk_import, :started)
- create(:bulk_import_entity, :failed, bulk_import: bulk_import)
- create(:bulk_import_entity, :failed, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(bulk_import.reload.failed?).to eq(true)
- end
- end
-
- context 'when maximum allowed number of import entities in progress' do
- it 'reenqueues itself' do
- bulk_import = create(:bulk_import, :started)
- create(:bulk_import_entity, :created, bulk_import: bulk_import)
- (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :started, bulk_import: bulk_import) }
-
- expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
- expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
-
- subject.perform(bulk_import.id)
- end
- end
-
- context 'when bulk import is created' do
- it 'marks bulk import as started' do
- bulk_import = create(:bulk_import, :created)
- create(:bulk_import_entity, :created, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(bulk_import.reload.started?).to eq(true)
- end
-
- it 'creates all the required pipeline trackers' do
- bulk_import = create(:bulk_import, :created)
- entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
- entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
-
- expect { subject.perform(bulk_import.id) }
- .to change { BulkImports::Tracker.count }
- .by(BulkImports::Groups::Stage.new(entity_1).pipelines.size * 2)
-
- expect(entity_1.trackers).not_to be_empty
- expect(entity_2.trackers).not_to be_empty
- end
-
- context 'when there are created entities to process' do
- let_it_be(:bulk_import) { create(:bulk_import, :created) }
-
- before do
- stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
- end
-
- it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
- create(:bulk_import_entity, :created, bulk_import: bulk_import)
- create(:bulk_import_entity, :created, bulk_import: bulk_import)
-
- expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
- expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
-
- subject.perform(bulk_import.id)
-
- expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
- end
-
- context 'when there are project entities to process' do
- it 'enqueues ExportRequestWorker' do
- create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:job_args) { [bulk_import.id] }
- expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
-
- subject.perform(bulk_import.id)
- end
- end
- end
-
- context 'when exception occurs' do
- it 'tracks the exception & marks import as failed' do
- bulk_import = create(:bulk_import, :created)
- create(:bulk_import_entity, :created, bulk_import: bulk_import)
-
- allow(BulkImports::ExportRequestWorker).to receive(:perform_async).and_raise(StandardError)
-
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(kind_of(StandardError), bulk_import_id: bulk_import.id)
-
- subject.perform(bulk_import.id)
-
- expect(bulk_import.reload.failed?).to eq(true)
- end
- end
- end
-
- context 'when importing a group' do
- it 'creates trackers for group entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
- )
- )
- end
- end
-
- context 'when importing a project' do
- it 'creates trackers for project entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
- )
- )
- end
- end
-
- context 'when tracker configuration has a minimum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker mininum version' do
- let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
- let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
-
- it 'creates trackers as skipped if version requirement does not meet' do
- subject.perform(bulk_import.id)
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:skipped, 'PipelineClass4'],
- [:skipped, 'PipelineClass5']
- )
- end
-
- it 'logs an info message for the skipped pipelines' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass4',
- minimum_source_version: '15.1.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
-
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass5',
- minimum_source_version: '16.0.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
- end
-
- subject.perform(bulk_import.id)
- end
+ describe '#perform' do
+ it 'executes the BulkImports::ProcessService' do
+ expect_next_instance_of(BulkImports::ProcessService) do |process_service|
+ expect(process_service).to receive(:execute)
end
- context 'when the source instance version is undefined' do
- it 'creates trackers as created' do
- bulk_import = create(:bulk_import, source_version: nil)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
- end
+ described_class.new.perform(bulk_import.id)
end
- context 'when tracker configuration has a maximum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker maximum version' do
- it 'creates trackers as skipped if version requirement does not meet' do
- bulk_import = create(:bulk_import, source_version: '15.0.0')
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- subject.perform(bulk_import.id)
+ context 'when no BulkImport is found' do
+ let(:job_args) { nil }
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:skipped, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
+ it 'returns without error' do
+ expect { described_class.new.perform(bulk_import.id) }.not_to raise_error
end
- context 'when the source instance version is a patch version' do
- it 'creates trackers with the same status as the non-patch source version' do
- bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
- entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
-
- bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
- entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
-
- described_class.perform_inline(bulk_import_1.id)
- described_class.perform_inline(bulk_import_2.id)
-
- trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
- trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
-
- expect(trackers_1).to eq(trackers_2)
+ it 'does not executes the BulkImports::ProcessService' do
+ expect_any_instance_of(BulkImports::ProcessService) do |process_service|
+ expect(process_service).not_to receive(:execute)
end
end
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index 8238721df01..5f948906c08 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
- let_it_be(:entity) { create(:bulk_import_entity) }
+ subject(:worker) { described_class.new }
- let_it_be(:pipeline_tracker) do
+ let_it_be(:entity) { create(:bulk_import_entity, :started) }
+
+ let_it_be_with_reload(:pipeline_tracker) do
create(
:bulk_import_tracker,
entity: entity,
@@ -14,173 +16,127 @@ RSpec.describe BulkImports::EntityWorker, feature_category: :importers do
)
end
- let(:job_args) { entity.id }
+ let_it_be_with_reload(:pipeline_tracker_2) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Stage1::Pipeline',
+ stage: 1
+ )
+ end
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { entity.id }
- it 'updates pipeline trackers to enqueued state when selected' do
- worker = described_class.new
+ before do
+ allow(described_class).to receive(:perform_in)
+ allow(BulkImports::PipelineWorker).to receive(:perform_async)
+ end
- next_tracker = worker.send(:next_pipeline_trackers_for, entity.id).first
+ it 'enqueues the pipeline workers of the first stage and then re-enqueues itself' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:info).with(hash_including('message' => 'Stage starting', 'entity_stage' => 0))
+ expect(logger).to receive(:info).with(hash_including('message' => 'Stage running', 'entity_stage' => 0))
+ end
- next_tracker.reload
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .with(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- expect(next_tracker.enqueued?).to be_truthy
+ expect(described_class).to receive(:perform_in).twice.with(described_class::PERFORM_DELAY, entity.id)
- expect(worker.send(:next_pipeline_trackers_for, entity.id))
- .not_to include(next_tracker)
+ expect { subject }.to change { pipeline_tracker.reload.status_name }.from(:created).to(:enqueued)
+ end
end
- include_examples 'an idempotent worker' do
- it 'enqueues the first stage pipelines work' do
+ context 'when pipeline workers from a stage are running' do
+ before do
+ pipeline_tracker.enqueue!
+ end
+
+ it 'does not enqueue the pipeline workers from the next stage and re-enqueues itself' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- # the worker runs twice but only executes once
- expect(logger)
- .to receive(:info).twice
- .with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'current_stage' => nil,
- 'message' => 'Stage starting',
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
- )
- )
+ expect(logger).to receive(:info).with(hash_including('message' => 'Stage running', 'entity_stage' => 0))
end
- expect(BulkImports::PipelineWorker)
- .to receive(:perform_async)
- .with(
- pipeline_tracker.id,
- pipeline_tracker.stage,
- entity.id
- )
+ expect(BulkImports::PipelineWorker).not_to receive(:perform_async)
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, entity.id)
- subject
+ worker.perform(entity.id)
end
+ end
- it 'logs and tracks the raised exceptions' do
- exception = StandardError.new('Error!')
-
- expect(BulkImports::PipelineWorker)
- .to receive(:perform_async)
- .and_raise(exception)
+ context 'when there are no pipeline workers from the previous stage running' do
+ before do
+ pipeline_tracker.fail_op!
+ end
+ it 'enqueues the pipeline workers from the next stage and re-enqueues itself' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info).twice
- .with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'current_stage' => nil,
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
- )
- )
+ expect(logger).to receive(:info).with(hash_including('message' => 'Stage starting', 'entity_stage' => 1))
+ end
- expect(logger)
- .to receive(:error)
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
.with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'current_stage' => nil,
- 'message' => 'Entity failed',
- 'exception.backtrace' => anything,
- 'exception.class' => 'StandardError',
- 'exception.message' => 'Error!',
- 'importer' => 'gitlab_migration',
- 'source_version' => entity.bulk_import.source_version_info.to_s
- )
+ pipeline_tracker_2.id,
+ pipeline_tracker_2.stage,
+ entity.id
)
- end
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- exception,
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- source_version: entity.bulk_import.source_version_info.to_s,
- importer: 'gitlab_migration'
- )
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, entity.id)
- subject
+ worker.perform(entity.id)
+ end
+ end
- expect(entity.reload.failed?).to eq(true)
+ context 'when there are no next stage to run' do
+ before do
+ pipeline_tracker.fail_op!
+ pipeline_tracker_2.fail_op!
end
- context 'in first stage' do
- let(:job_args) { [entity.id, 0] }
+ it 'does not enqueue any pipeline worker and re-enqueues itself' do
+ expect(BulkImports::PipelineWorker).not_to receive(:perform_async)
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, entity.id)
- it 'do not enqueue a new pipeline job if the current stage still running' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info).twice
- .with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'current_stage' => 0,
- 'message' => 'Stage running',
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
- )
- )
- end
+ worker.perform(entity.id)
+ end
+ end
- expect(BulkImports::PipelineWorker)
- .not_to receive(:perform_async)
+ context 'when entity status is not started' do
+ let(:entity) { create(:bulk_import_entity, :finished) }
- subject
- end
+ it 'does not re-enqueues itself' do
+ expect(described_class).not_to receive(:perform_in)
+
+ worker.perform(entity.id)
+ end
+ end
- it 'enqueues the next stage pipelines when the current stage is finished' do
- next_stage_pipeline_tracker = create(
- :bulk_import_tracker,
- entity: entity,
- pipeline_name: 'Stage1::Pipeline',
- stage: 1
- )
+ it 'logs and tracks the raised exceptions' do
+ exception = StandardError.new('Error!')
- pipeline_tracker.fail_op!
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .and_raise(exception)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info).twice
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
.with(
+ exception,
hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
- 'current_stage' => 0,
- 'source_version' => entity.bulk_import.source_version_info.to_s,
- 'importer' => 'gitlab_migration'
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ source_version: entity.bulk_import.source_version_info.to_s,
+ importer: 'gitlab_migration'
)
)
- end
- expect(BulkImports::PipelineWorker)
- .to receive(:perform_async)
- .with(
- next_stage_pipeline_tracker.id,
- next_stage_pipeline_tracker.stage,
- entity.id
- )
+ worker.perform(entity.id)
- subject
- end
- end
+ expect(entity.reload.failed?).to eq(true)
end
end
diff --git a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
index 6fe6b420f2b..5beb11c64aa 100644
--- a/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/finish_batched_pipeline_worker_spec.rb
@@ -13,18 +13,33 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
subject(:worker) { described_class.new }
describe '#perform' do
- it 'finishes pipeline and enqueues entity worker' do
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
+ context 'when job version is nil' do
+ before do
+ allow(subject).to receive(:job_version).and_return(nil)
+ end
+
+ it 'finishes pipeline and enqueues entity worker' do
+ expect(BulkImports::EntityWorker).to receive(:perform_async)
+ .with(entity.id)
- subject.perform(pipeline_tracker.id)
+ subject.perform(pipeline_tracker.id)
+
+ expect(pipeline_tracker.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when job version is present' do
+ it 'finishes pipeline and does not enqueues entity worker' do
+ expect(BulkImports::EntityWorker).not_to receive(:perform_async)
+
+ subject.perform(pipeline_tracker.id)
- expect(pipeline_tracker.reload.finished?).to eq(true)
+ expect(pipeline_tracker.reload.finished?).to eq(true)
+ end
end
context 'when import is in progress' do
- it 're-enqueues' do
+ it 're-enqueues for any started batches' do
create(:bulk_import_batch_tracker, :started, tracker: pipeline_tracker)
expect(described_class)
@@ -33,6 +48,16 @@ RSpec.describe BulkImports::FinishBatchedPipelineWorker, feature_category: :impo
subject.perform(pipeline_tracker.id)
end
+
+ it 're-enqueues for any created batches' do
+ create(:bulk_import_batch_tracker, :created, tracker: pipeline_tracker)
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::REQUEUE_DELAY, pipeline_tracker.id)
+
+ subject.perform(pipeline_tracker.id)
+ end
end
context 'when pipeline tracker is stale' do
diff --git a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
index 3c33910b62c..78ce52c41b4 100644
--- a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
@@ -102,6 +102,7 @@ RSpec.describe BulkImports::PipelineBatchWorker, feature_category: :importers do
end
expect(described_class).to receive(:perform_in).with(60, batch.id)
+ expect(BulkImports::FinishBatchedPipelineWorker).not_to receive(:perform_async).with(tracker.id)
subject.perform(batch.id)
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 6318e925da6..e1259d5666d 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -38,38 +38,60 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- shared_examples 'successfully runs the pipeline' do
- it 'runs the given pipeline successfully' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_name' => 'FakePipeline',
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
+ it 'runs the given pipeline successfully' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ hash_including(
+ 'pipeline_name' => 'FakePipeline',
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path
)
- end
+ )
+ end
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
+ allow(subject).to receive(:jid).and_return('jid')
- allow(subject).to receive(:jid).and_return('jid')
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+
+ context 'when job version is nil' do
+ before do
+ allow(subject).to receive(:job_version).and_return(nil)
+ end
+
+ it 'runs the given pipeline successfully and enqueues entity worker' do
+ expect(BulkImports::EntityWorker).to receive(:perform_async).with(entity.id)
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
pipeline_tracker.reload
expect(pipeline_tracker.status_name).to eq(:finished)
- expect(pipeline_tracker.jid).to eq('jid')
end
- end
- it_behaves_like 'successfully runs the pipeline'
+ context 'when an error occurs' do
+ it 'enqueues entity worker' do
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline)
+ .to receive(:run)
+ .and_raise(StandardError, 'Error!')
+ end
+
+ expect(BulkImports::EntityWorker).to receive(:perform_async).with(entity.id)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+ end
context 'when exclusive lease cannot be obtained' do
it 'does not run the pipeline' do
@@ -132,10 +154,6 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
)
)
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
-
expect(BulkImports::Failure)
.to receive(:create)
.with(
@@ -157,37 +175,6 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.jid).to eq('jid')
end
- shared_examples 'successfully runs the pipeline' do
- it 'runs the given pipeline successfully' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
- .with(
- hash_including(
- 'pipeline_name' => 'FakePipeline',
- 'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_id' => entity.id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path
- )
- )
- end
-
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
-
- allow(subject).to receive(:jid).and_return('jid')
-
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
-
- pipeline_tracker.reload
-
- expect(pipeline_tracker.status_name).to eq(:finished)
- expect(pipeline_tracker.jid).to eq('jid')
- end
- end
-
context 'when enqueued pipeline cannot be found' do
shared_examples 'logs the error' do
it 'logs the error' do
@@ -212,10 +199,6 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
)
end
- expect(BulkImports::EntityWorker)
- .to receive(:perform_async)
- .with(entity.id, pipeline_tracker.stage)
-
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
end
end
diff --git a/spec/workers/ci/initial_pipeline_process_worker_spec.rb b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
index c7bbe83433e..9a94f1cbb4c 100644
--- a/spec/workers/ci/initial_pipeline_process_worker_spec.rb
+++ b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::InitialPipelineProcessWorker, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :repository) }
let(:job) { build(:ci_build, project: project) }
- let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
+ let(:stage) { build(:ci_stage, project: project, statuses: [job], position: 1) }
let(:pipeline) { create(:ci_pipeline, stages: [stage], status: :created, project: project, builds: [job]) }
describe '#perform' do
@@ -42,19 +42,16 @@ RSpec.describe Ci::InitialPipelineProcessWorker, feature_category: :continuous_i
end
context 'when a pipeline contains a deployment job' do
- let(:job) { build(:ci_build, :start_review_app, project: project) }
+ before do
+ allow(::Deployments::CreateForJobService).to receive(:new).and_call_original
+ allow(::Ci::PipelineProcessing::AtomicProcessingService).to receive(:new).and_call_original
+ end
+
+ let(:job) { build(:ci_build, :created, :start_review_app, project: project, stage_idx: 1) }
let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
it 'creates a deployment record' do
expect { subject }.to change { Deployment.count }.by(1)
-
- expect(job.deployment).to have_attributes(
- project: job.project,
- ref: job.ref,
- sha: job.sha,
- deployable: job,
- deployable_type: 'CommitStatus',
- environment: job.persisted_environment)
end
context 'when the corresponding environment does not exist' do
@@ -66,6 +63,39 @@ RSpec.describe Ci::InitialPipelineProcessWorker, feature_category: :continuous_i
expect(job.deployment).to be_nil
end
end
+
+ it 'kicks off atomic processing before a deployment is created' do
+ expect(::Ci::PipelineProcessing::AtomicProcessingService).to receive(:new).ordered
+ expect(::Deployments::CreateForJobService).to receive(:new).ordered
+
+ subject
+ end
+
+ context 'when `create_deployment_only_for_processable_jobs` FF is disabled' do
+ before do
+ stub_feature_flags(create_deployment_only_for_processable_jobs: false)
+ end
+
+ it 'creates a deployment record' do
+ expect { subject }.to change { Deployment.count }.by(1)
+
+ expect(job.deployment).to have_attributes(
+ project: job.project,
+ ref: job.ref,
+ sha: job.sha,
+ deployable: job,
+ deployable_type: 'CommitStatus',
+ environment: job.persisted_environment
+ )
+ end
+
+ it 'a deployment is created before atomic processing is kicked off' do
+ expect(::Deployments::CreateForJobService).to receive(:new).ordered
+ expect(::Ci::PipelineProcessing::AtomicProcessingService).to receive(:new).ordered
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb b/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
index ede4dad1272..dcdb96242c2 100644
--- a/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
+++ b/spec/workers/ci/ref_delete_unlock_artifacts_worker_spec.rb
@@ -2,31 +2,23 @@
require 'spec_helper'
-RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_artifacts do
+RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
describe '#perform' do
subject(:perform) { worker.perform(project_id, user_id, ref) }
let(:worker) { described_class.new }
-
let(:ref) { 'refs/heads/master' }
-
let(:project) { create(:project) }
+ let(:enqueue_pipelines_to_unlock_service_class) { Ci::Refs::EnqueuePipelinesToUnlockService }
+ let(:enqueue_pipelines_to_unlock_service_instance_spy) { instance_double(Ci::Refs::EnqueuePipelinesToUnlockService) }
- include_examples 'an idempotent worker' do
- subject(:idempotent_perform) { perform_multiple([project_id, user_id, ref], exec_times: 2) }
-
+ context 'when project exists' do
let(:project_id) { project.id }
- let(:user_id) { project.creator.id }
-
- let(:pipeline) { create(:ci_pipeline, ref: 'master', project: project, locked: :artifacts_locked) }
- it 'unlocks the artifacts from older pipelines' do
- expect { idempotent_perform }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ before do
+ allow(enqueue_pipelines_to_unlock_service_class)
+ .to receive(:new).and_return(enqueue_pipelines_to_unlock_service_instance_spy)
end
- end
-
- context 'when project exists' do
- let(:project_id) { project.id }
context 'when user exists' do
let(:user_id) { project.creator.id }
@@ -34,24 +26,14 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_arti
context 'when ci ref exists for project' do
let!(:ci_ref) { create(:ci_ref, ref_path: ref, project: project) }
- it 'calls the service' do
- service = spy(Ci::UnlockArtifactsService)
- expect(Ci::UnlockArtifactsService).to receive(:new).and_return(service)
+ it 'calls the enqueue pipelines to unlock service' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:total_pending_entries, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:total_new_entries, 2)
- perform
-
- expect(service).to have_received(:execute).with(ci_ref)
- end
+ expect(enqueue_pipelines_to_unlock_service_instance_spy)
+ .to receive(:execute).with(ci_ref).and_return(total_pending_entries: 3, total_new_entries: 2)
- context 'when a locked pipeline with persisted artifacts exists' do
- let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'master', project: project, locked: :artifacts_locked) }
-
- it 'logs the correct extra metadata' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipelines, 1)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 2)
-
- perform
- end
+ perform
end
end
@@ -59,7 +41,7 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_arti
let!(:another_ci_ref) { create(:ci_ref, ref_path: ref) }
it 'does not call the service' do
- expect(Ci::UnlockArtifactsService).not_to receive(:new)
+ expect(enqueue_pipelines_to_unlock_service_class).not_to receive(:new)
perform
end
@@ -69,13 +51,11 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_arti
let!(:another_ci_ref) { create(:ci_ref, ref_path: ref) }
let!(:ci_ref) { create(:ci_ref, ref_path: ref, project: project) }
- it 'calls the service with the correct ref_id' do
- service = spy(Ci::UnlockArtifactsService)
- expect(Ci::UnlockArtifactsService).to receive(:new).and_return(service)
+ it 'calls the enqueue pipelines to unlock service with the correct ref' do
+ expect(enqueue_pipelines_to_unlock_service_instance_spy)
+ .to receive(:execute).with(ci_ref).and_return(total_pending_entries: 3, total_new_entries: 2)
perform
-
- expect(service).to have_received(:execute).with(ci_ref)
end
end
end
@@ -83,8 +63,8 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_arti
context 'when user does not exist' do
let(:user_id) { non_existing_record_id }
- it 'does not call service' do
- expect(Ci::UnlockArtifactsService).not_to receive(:new)
+ it 'does not call the service' do
+ expect(enqueue_pipelines_to_unlock_service_class).not_to receive(:new)
perform
end
@@ -95,11 +75,27 @@ RSpec.describe Ci::RefDeleteUnlockArtifactsWorker, feature_category: :build_arti
let(:project_id) { non_existing_record_id }
let(:user_id) { project.creator.id }
- it 'does not call service' do
- expect(Ci::UnlockArtifactsService).not_to receive(:new)
+ it 'does not call the service' do
+ expect(enqueue_pipelines_to_unlock_service_class).not_to receive(:new)
perform
end
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:project_id) { project.id }
+ let(:user_id) { project.creator.id }
+ let(:exec_times) { IdempotentWorkerHelper::WORKER_EXEC_TIMES }
+ let(:job_args) { [project_id, user_id, ref] }
+
+ let!(:ci_ref) { create(:ci_ref, ref_path: ref, project: project) }
+ let!(:pipeline) { create(:ci_pipeline, ci_ref: ci_ref, project: project, locked: :artifacts_locked) }
+
+ it 'enqueues all pipelines for the ref to be unlocked' do
+ subject
+
+ expect(pipeline_ids_waiting_to_be_unlocked).to eq([pipeline.id])
+ end
+ end
end
end
diff --git a/spec/workers/ci/refs/unlock_previous_pipelines_worker_spec.rb b/spec/workers/ci/refs/unlock_previous_pipelines_worker_spec.rb
new file mode 100644
index 00000000000..2f00ea45edc
--- /dev/null
+++ b/spec/workers/ci/refs/unlock_previous_pipelines_worker_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Refs::UnlockPreviousPipelinesWorker, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ let(:worker) { described_class.new }
+
+ let!(:older_pipeline) do
+ create(
+ :ci_pipeline,
+ :with_persisted_artifacts,
+ :artifacts_locked
+ )
+ end
+
+ let!(:pipeline) do
+ create(
+ :ci_pipeline,
+ :with_persisted_artifacts,
+ ref: older_pipeline.ref,
+ tag: older_pipeline.tag,
+ project: older_pipeline.project
+ )
+ end
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(Ci::Refs::EnqueuePipelinesToUnlockService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ worker.perform(pipeline.ci_ref.id)
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { pipeline.ci_ref.id }
+
+ it 'only enqueues IDs of older pipelines if they are not in the queue' do
+ expect { subject }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([])
+ .to([older_pipeline.id])
+ end
+ end
+end
diff --git a/spec/workers/ci/schedule_unlock_pipelines_in_queue_cron_worker_spec.rb b/spec/workers/ci/schedule_unlock_pipelines_in_queue_cron_worker_spec.rb
new file mode 100644
index 00000000000..79132cbfce2
--- /dev/null
+++ b/spec/workers/ci/schedule_unlock_pipelines_in_queue_cron_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ScheduleUnlockPipelinesInQueueCronWorker, :unlock_pipelines, feature_category: :build_artifacts do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'enqueues UnlockPipelinesWorker jobs' do
+ expect(Ci::UnlockPipelinesInQueueWorker).to receive(:perform_with_capacity)
+
+ worker.perform
+ end
+ end
+end
diff --git a/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb b/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb
new file mode 100644
index 00000000000..ca1d234eb5b
--- /dev/null
+++ b/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnlockPipelinesInQueueWorker, :unlock_pipelines, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ let(:worker) { described_class.new }
+
+ it 'is a limited capacity worker' do
+ expect(described_class.new).to be_a(LimitedCapacity::Worker)
+ end
+
+ describe '#perform_work' do
+ let(:service) { instance_double('Ci::UnlockPipelineService') }
+
+ it 'pops the oldest pipeline ID from the queue and unlocks it' do
+ pipeline_1 = create(:ci_pipeline, :artifacts_locked)
+ pipeline_2 = create(:ci_pipeline, :artifacts_locked)
+
+ enqueue_timestamp = nil
+ travel_to(3.minutes.ago) do
+ enqueue_timestamp = Time.current.utc.to_i
+ Ci::UnlockPipelineRequest.enqueue(pipeline_1.id)
+ end
+
+ Ci::UnlockPipelineRequest.enqueue(pipeline_2.id)
+
+ expect(Ci::UnlockPipelineService).to receive(:new).with(pipeline_1).and_return(service)
+ expect(service)
+ .to receive(:execute)
+ .and_return(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: false,
+ unlocked_job_artifacts: 3,
+ unlocked_pipeline_artifacts: 2
+ )
+
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:pipeline_id, pipeline_1.id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:project, pipeline_1.project.full_path)
+
+ unlock_timestamp = Time.current.utc
+ unlock_wait_time = unlock_timestamp.to_i - enqueue_timestamp
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:unlock_wait_time, unlock_wait_time)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:remaining_pending, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:skipped_already_leased, false)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:skipped_already_unlocked, false)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:exec_timeout, false)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_job_artifacts, 3)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:unlocked_pipeline_artifacts, 2)
+
+ travel_to(unlock_timestamp) do
+ expect { worker.perform_work }
+ .to change { pipeline_ids_waiting_to_be_unlocked }
+ .from([pipeline_1.id, pipeline_2.id])
+ .to([pipeline_2.id])
+ end
+ end
+
+ context 'when queue is empty' do
+ it 'does nothing but still logs information' do
+ expect(Ci::UnlockPipelineService).not_to receive(:new)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:remaining_pending, 0)
+
+ worker.perform_work
+ end
+ end
+
+ context 'when pipeline ID does not exist' do
+ before do
+ Ci::UnlockPipelineRequest.enqueue(9999)
+ end
+
+ it 'does nothing' do
+ expect(Ci::UnlockPipelineService).not_to receive(:new)
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform_work
+ end
+ end
+ end
+
+ describe '#remaining_work_count' do
+ subject { worker.remaining_work_count }
+
+ context 'and there are remaining unlock pipeline requests' do
+ before do
+ Ci::UnlockPipelineRequest.enqueue(123)
+ end
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'and there are no remaining unlock pipeline requests' do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#max_running_jobs' do
+ subject { worker.max_running_jobs }
+
+ before do
+ stub_feature_flags(
+ ci_unlock_pipelines: false,
+ ci_unlock_pipelines_medium: false,
+ ci_unlock_pipelines_high: false
+ )
+ end
+
+ it { is_expected.to eq(0) }
+
+ context 'when ci_unlock_pipelines flag is enabled' do
+ before do
+ stub_feature_flags(ci_unlock_pipelines: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_LOW) }
+ end
+
+ context 'when ci_unlock_pipelines_medium flag is enabled' do
+ before do
+ stub_feature_flags(ci_unlock_pipelines_medium: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_MEDIUM) }
+ end
+
+ context 'when ci_unlock_pipelines_high flag is enabled' do
+ before do
+ stub_feature_flags(ci_unlock_pipelines_high: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_HIGH) }
+ end
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 27e1077b138..b2bc502d156 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
end
def representation_class
- MockRepresantation
+ MockRepresentation
end
end.new
end
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
let(:stubbed_representation) { representation_class }
before do
- stub_const('MockRepresantation', stubbed_representation)
+ stub_const('MockRepresentation', stubbed_representation)
end
describe '#import', :clean_gitlab_redis_cache do
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
it 'imports the object' do
expect(importer_class)
.to receive(:new)
- .with(instance_of(MockRepresantation), project, client)
+ .with(instance_of(MockRepresentation), project, client)
.and_return(importer_instance)
expect(importer_instance)
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:info)
.with(
{
- github_identifiers: github_identifiers,
+ external_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:info)
.with(
{
- github_identifiers: github_identifiers,
+ external_identifiers: github_identifiers,
message: 'importer finished',
project_id: project.id,
importer: 'klass_name'
@@ -123,7 +123,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:info)
.with(
{
- github_identifiers: nil,
+ external_identifiers: nil,
message: 'Project import is no longer running. Stopping worker.',
project_id: project.id,
importer: 'klass_name',
@@ -135,12 +135,13 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
end
it 'logs error when the import fails' do
+ exception = StandardError.new('some error')
+
expect(importer_class)
.to receive(:new)
- .with(instance_of(MockRepresantation), project, client)
+ .with(instance_of(MockRepresentation), project, client)
.and_return(importer_instance)
- exception = StandardError.new('some error')
expect(importer_instance)
.to receive(:execute)
.and_raise(exception)
@@ -149,20 +150,46 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:info)
.with(
{
- github_identifiers: github_identifiers,
message: 'starting importer',
+ external_identifiers: github_identifiers,
project_id: project.id,
importer: 'klass_name'
}
)
+ expect(Gitlab::Import::Logger)
+ .to receive(:error)
+ .with(
+ {
+ message: 'importer failed',
+ 'exception.message': 'some error',
+ import_type: project.import_type,
+ project_id: project.id,
+ source: 'klass_name',
+ external_identifiers: github_identifiers
+ }
+ )
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ exception,
+ {
+ import_type: project.import_type,
+ project_id: project.id,
+ source: 'klass_name',
+ external_identifiers: github_identifiers
+ }
+ )
+
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
project_id: project.id,
exception: exception,
error_source: 'klass_name',
- fail_import: false
+ fail_import: false,
+ external_identifiers: github_identifiers
)
.and_call_original
@@ -188,7 +215,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
project_id: project.id,
exception: a_kind_of(NoMethodError),
error_source: 'klass_name',
- fail_import: true
+ fail_import: true,
+ external_identifiers: { object_type: 'dummy' }
)
.and_call_original
@@ -203,7 +231,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
before do
expect(importer_class)
.to receive(:new)
- .with(instance_of(MockRepresantation), project, client)
+ .with(instance_of(MockRepresentation), project, client)
.and_return(importer_instance)
expect(importer_instance)
@@ -216,7 +244,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:info)
.with(
{
- github_identifiers: github_identifiers,
+ external_identifiers: github_identifiers,
message: 'starting importer',
project_id: project.id,
importer: 'klass_name'
@@ -229,7 +257,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
project_id: project.id,
exception: exception,
error_source: 'klass_name',
- fail_import: false
+ fail_import: false,
+ external_identifiers: github_identifiers
)
.and_call_original
@@ -296,15 +325,5 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
sidekiq_retries_exhausted
end
end
-
- it 'updates external_identifiers of the correct failure' do
- failure_1, failure_2 = create_list(:import_failure, 2, project: project)
- failure_2.update_column(:correlation_id_value, correlation_id)
-
- sidekiq_retries_exhausted
-
- expect(failure_1.reload.external_identifiers).to be_empty
- expect(failure_2.reload.external_identifiers).to eq(github_identifiers.with_indifferent_access)
- end
end
end
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index f65a8cd0d3c..c8f7427d5ae 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -92,106 +92,48 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
worker.perform(project.id)
end
- context 'when abort_on_failure is false' do
- it 'logs error when import fails' do
- exception = StandardError.new('some error')
+ it 'logs error when import fails' do
+ exception = StandardError.new('some error')
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
-
- expect(worker)
- .to receive(:try_import)
- .and_raise(exception)
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'starting stage',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
-
- expect(Gitlab::Import::ImportFailureService)
- .to receive(:track)
- .with(
- {
- project_id: project.id,
- exception: exception,
- error_source: 'DummyStage',
- fail_import: false
- }
- ).and_call_original
-
- expect { worker.perform(project.id) }
- .to raise_error(exception)
-
- expect(project.import_state.reload.status).to eq('started')
-
- expect(project.import_failures).not_to be_empty
- expect(project.import_failures.last.exception_class).to eq('StandardError')
- expect(project.import_failures.last.exception_message).to eq('some error')
- end
- end
-
- context 'when abort_on_failure is true' do
- let(:worker) do
- Class.new do
- def self.name
- 'DummyStage'
- end
-
- def abort_on_failure
- true
- end
-
- include(Gitlab::GithubImport::StageMethods)
- end.new
- end
-
- it 'logs, captures and re-raises the exception and also marks the import as failed' do
- exception = StandardError.new('some error')
-
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
+ allow(worker)
+ .to receive(:find_project)
+ .with(project.id)
+ .and_return(project)
- expect(worker)
- .to receive(:try_import)
- .and_raise(exception)
+ expect(worker)
+ .to receive(:try_import)
+ .and_raise(exception)
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'starting stage',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: 'DummyStage'
+ }
+ )
- expect(Gitlab::Import::ImportFailureService)
- .to receive(:track)
- .with(
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ {
project_id: project.id,
exception: exception,
error_source: 'DummyStage',
- fail_import: true
- ).and_call_original
+ fail_import: false,
+ metrics: true
+ }
+ ).and_call_original
- expect { worker.perform(project.id) }.to raise_error(exception)
+ expect { worker.perform(project.id) }
+ .to raise_error(exception)
- expect(project.import_state.reload.status).to eq('failed')
- expect(project.import_state.last_error).to eq('some error')
+ expect(project.import_state.reload.status).to eq('started')
- expect(project.import_failures).not_to be_empty
- expect(project.import_failures.last.exception_class).to eq('StandardError')
- expect(project.import_failures.last.exception_message).to eq('some error')
- end
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
end
end
diff --git a/spec/workers/delete_container_repository_worker_spec.rb b/spec/workers/delete_container_repository_worker_spec.rb
deleted file mode 100644
index 6260bea6949..00000000000
--- a/spec/workers/delete_container_repository_worker_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe DeleteContainerRepositoryWorker, feature_category: :container_registry do
- let_it_be(:repository) { create(:container_repository) }
-
- let(:project) { repository.project }
- let(:user) { project.first_owner }
- let(:worker) { described_class.new }
-
- describe '#perform' do
- subject(:perform) { worker.perform(user.id, repository.id) }
-
- it 'is a no op' do
- expect { subject }.to not_change { ContainerRepository.count }
- end
- end
-end
diff --git a/spec/workers/environments/stop_job_failed_worker_spec.rb b/spec/workers/environments/stop_job_failed_worker_spec.rb
new file mode 100644
index 00000000000..21d952dff30
--- /dev/null
+++ b/spec/workers/environments/stop_job_failed_worker_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::StopJobFailedWorker, feature_category: :continuous_delivery do
+ describe '#perform' do
+ let_it_be_with_refind(:environment) { create(:environment, state: :stopping) }
+
+ subject { described_class.new.perform(job.id) }
+
+ shared_examples_for 'recovering a stuck stopping environment' do
+ context 'when the job is not a stop job' do
+ let(:job) { non_stop_job }
+
+ it 'does not recover the environment' do
+ expect { subject }.not_to change { environment.reload.state }
+ end
+ end
+
+ context 'when the stop job is not failed' do
+ let(:job) { stop_job }
+
+ before do
+ job.update!(status: :success)
+ end
+
+ it 'does not recover the environment' do
+ expect { subject }.not_to change { environment.reload.state }
+ end
+ end
+
+ context 'when the stop job is failed' do
+ let(:job) { stop_job }
+
+ it 'recovers the environment' do
+ expect { subject }
+ .to change { environment.reload.state }
+ .from('stopping')
+ .to('available')
+ end
+ end
+
+ context 'when there is no environment' do
+ let(:job) { stop_job }
+
+ before do
+ environment.destroy!
+ end
+
+ it 'does not cause an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ context 'with build job' do
+ let!(:stop_job) do
+ create(
+ :ci_build,
+ :stop_review_app,
+ environment: environment.name,
+ project: environment.project,
+ status: :failed
+ )
+ end
+
+ let!(:non_stop_job) do
+ create(
+ :ci_build,
+ :start_review_app,
+ environment: environment.name,
+ project: environment.project,
+ status: :failed
+ )
+ end
+
+ it_behaves_like 'recovering a stuck stopping environment'
+ end
+
+ context 'with bridge job' do
+ let!(:stop_job) do
+ create(
+ :ci_bridge,
+ :stop_review_app,
+ environment: environment.name,
+ project: environment.project,
+ status: :failed
+ )
+ end
+
+ let!(:non_stop_job) do
+ create(
+ :ci_bridge,
+ :start_review_app,
+ environment: environment.name,
+ project: environment.project,
+ status: :failed
+ )
+ end
+
+ it_behaves_like 'recovering a stuck stopping environment'
+ end
+
+ context 'when job does not exist' do
+ it 'does not raise exception' do
+ expect { described_class.new.perform(non_existing_record_id) }
+ .not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 9a94a836d60..4855967d462 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -168,9 +168,11 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Ci::PipelineBridgeStatusWorker' => 3,
'Ci::PipelineSuccessUnlockArtifactsWorker' => 3,
'Ci::RefDeleteUnlockArtifactsWorker' => 3,
+ 'Ci::Refs::UnlockPreviousPipelinesWorker' => 3,
'Ci::ResourceGroups::AssignResourceFromResourceGroupWorker' => 3,
'Ci::TestFailureHistoryWorker' => 3,
'Ci::TriggerDownstreamSubscriptionsWorker' => 3,
+ 'Ci::UnlockPipelinesInQueueWorker' => 0,
'Ci::SyncReportsToReportApprovalRulesWorker' => 3,
'CleanupContainerRepositoryWorker' => 3,
'ClusterConfigureIstioWorker' => 3,
@@ -198,7 +200,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Database::LockTablesWorker' => false,
'Database::BatchedBackgroundMigration::CiExecutionWorker' => 0,
'Database::BatchedBackgroundMigration::MainExecutionWorker' => 0,
- 'DeleteContainerRepositoryWorker' => 3,
'DeleteDiffFilesWorker' => 3,
'DeleteMergedBranchesWorker' => 3,
'DeleteStoredFilesWorker' => 3,
@@ -258,7 +259,11 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'GeoRepositoryDestroyWorker' => 3,
'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportIssuesWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportIssuesNotesWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportLfsObjectsWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportPullRequestsNotesWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportRepositoryWorker' => 3,
'Gitlab::BitbucketServerImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::FinishImportWorker' => 3,
@@ -310,7 +315,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::JiraImport::Stage::StartImportWorker' => 5,
'GitlabPerformanceBarStatsWorker' => 3,
'GitlabSubscriptions::RefreshSeatsWorker' => 0,
- 'GitlabShellWorker' => 3,
+ 'GitlabSubscriptions::AddOnPurchases::BulkRefreshUserAssignmentsWorker' => 0,
'GitlabServicePingWorker' => 3,
'GroupDestroyWorker' => 3,
'GroupExportWorker' => false,
@@ -480,7 +485,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'WorkItems::ImportWorkItemsCsvWorker' => 3,
'X509CertificateRevokeWorker' => 3,
'ComplianceManagement::MergeRequests::ComplianceViolationsWorker' => 3,
- 'Zoekt::IndexerWorker' => 2
+ 'Zoekt::IndexerWorker' => 2,
+ 'Issuable::RelatedLinksCreateWorker' => 3
}.merge(extra_retry_exceptions)
end
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
index 16e3a3dc481..c04ccafdcf8 100644
--- a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -19,28 +19,30 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
context 'when there are remaining jobs' do
before do
allow(worker)
- .to receive(:find_import_state)
+ .to receive(:find_import_state_jid)
.and_return(import_state)
end
it 'reschedules itself' do
- expect(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({ '123' => 1 })
-
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
-
- worker.perform(project.id, { '123' => 2 }, :finish)
+ freeze_time do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish, Time.zone.now, 1)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
end
end
context 'when there are no remaining jobs' do
before do
allow(worker)
- .to receive(:find_import_state)
+ .to receive(:find_import_state_jid)
.and_return(import_state)
allow(worker)
@@ -98,18 +100,30 @@ RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_
end
end
- describe '#find_import_state' do
- it 'returns a ProjectImportState' do
+ describe '#find_import_state_jid' do
+ it 'returns a ProjectImportState with only id and jid' do
import_state.update_column(:status, 'started')
- found = worker.find_import_state(project.id)
+ found = worker.find_import_state_jid(project.id)
expect(found).to be_an_instance_of(ProjectImportState)
expect(found.attributes.keys).to match_array(%w[id jid])
end
it 'returns nil if the project import is not running' do
- expect(worker.find_import_state(project.id)).to be_nil
+ expect(worker.find_import_state_jid(project.id)).to be_nil
+ end
+ end
+
+ describe '#find_import_state' do
+ it 'returns a ProjectImportState' do
+ import_state.update_column(:status, 'started')
+
+ found_partial = worker.find_import_state_jid(project.id)
+ found = worker.find_import_state(found_partial.id)
+
+ expect(found).to be_an_instance_of(ProjectImportState)
+ expect(found.attributes.keys).to include('id', 'project_id', 'status', 'last_error')
end
end
end
diff --git a/spec/workers/gitlab/bitbucket_import/import_issue_notes_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_issue_notes_worker_spec.rb
new file mode 100644
index 00000000000..0f5df219ab4
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_issue_notes_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportIssueNotesWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter
+end
diff --git a/spec/workers/gitlab/bitbucket_import/import_issue_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_issue_worker_spec.rb
new file mode 100644
index 00000000000..69d66452a45
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_issue_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportIssueWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter
+end
diff --git a/spec/workers/gitlab/bitbucket_import/import_lfs_object_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_lfs_object_worker_spec.rb
new file mode 100644
index 00000000000..6c9d084a639
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_lfs_object_worker_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportLfsObjectWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter do
+ before do
+ # Stub the LfsDownloadObject for these tests so it can be passed an empty Hash
+ allow(LfsDownloadObject).to receive(:new)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/import_pull_request_notes_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_pull_request_notes_worker_spec.rb
new file mode 100644
index 00000000000..1e984e6080b
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_pull_request_notes_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportPullRequestNotesWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_issues_notes_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_issues_notes_worker_spec.rb
new file mode 100644
index 00000000000..ac504efb300
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_issues_notes_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportIssuesNotesWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::IssuesNotesImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :lfs_objects)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call the importer' do
+ expect(Gitlab::BitbucketImport::Importers::IssuesNotesImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call the importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketImport::Importers::IssuesNotesImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::IssuesNotesImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketImport::AdvanceStageWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_issues_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_issues_worker_spec.rb
new file mode 100644
index 00000000000..29decd87d28
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_issues_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportIssuesWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::IssuesImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :issues_notes)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call the importer' do
+ expect(Gitlab::BitbucketImport::Importers::IssuesImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call the importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketImport::Importers::IssuesImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::IssuesImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketImport::AdvanceStageWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_lfs_objects_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_lfs_objects_worker_spec.rb
new file mode 100644
index 00000000000..a645c9e698d
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_lfs_objects_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportLfsObjectsWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::LfsObjectsImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :finish)
+
+ worker.perform(project.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_notes_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_notes_worker_spec.rb
new file mode 100644
index 00000000000..12464f19d06
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_notes_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportPullRequestsNotesWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :issues)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call the importer' do
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call the importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketImport::AdvanceStageWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
index 8f425066160..d107d3fa13e 100644
--- a/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker, feature
it 'schedules the next stage' do
expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :finish)
+ .with(project.id, { '123' => 2 }, :pull_requests_notes)
worker.perform(project.id)
end
diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
index dc715c3026b..d11b044b093 100644
--- a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
let(:log_attributes) do
{
'user_id' => user.id,
- 'github_identifiers' => { 'id': gist_object.id },
+ 'external_identifiers' => { 'id': gist_object.id },
'class' => 'Gitlab::GithubGistsImport::ImportGistWorker',
'correlation_id' => 'new-correlation-id',
'jid' => nil,
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
it 'raises an error' do
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
- .with(log_attributes.merge('message' => 'importer failed', 'error.message' => '_some_error_'))
+ .with(log_attributes.merge('message' => 'importer failed', 'exception.message' => '_some_error_'))
expect(Gitlab::ErrorTracking).to receive(:track_exception)
expect { subject.perform(user.id, gist_hash, 'some_key') }.to raise_error(StandardError)
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
it 'tracks and logs error' do
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
- .with(log_attributes.merge('message' => 'importer failed', 'error.message' => 'error_message'))
+ .with(log_attributes.merge('message' => 'importer failed', 'exception.message' => 'error_message'))
expect(Gitlab::JobWaiter)
.to receive(:notify)
.with('some_key', subject.jid, ttl: Gitlab::Import::JOB_WAITER_TTL)
diff --git a/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
index 220f2bb0c75..0bd371b6c97 100644
--- a/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::GithubGistsImport::StartImportWorker, feature_category: :
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
- .with(log_attributes.merge('message' => 'import failed', 'error.message' => exception.message))
+ .with(log_attributes.merge('message' => 'import failed', 'exception.message' => exception.message))
expect { worker.perform(user.id, token) }.to raise_error(StandardError)
end
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index e517f30ee2c..6d8fa29bd27 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker, feature_category
let(:project) { create(:project) }
let(:worker) { described_class.new }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#perform' do
it 'marks the import as finished and reports import statistics' do
expect(project).to receive(:after_import)
diff --git a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
index e385a5aaf3f..9a4b9106dae 100644
--- a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker, feature_cat
settings.write({ optional_stages: { attachments_import: stage_enabled } })
end
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:importers) do
diff --git a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
index 1ad027a007a..f3b706361e3 100644
--- a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker, feature_catego
let(:importer) { double(:importer) }
let(:client) { double(:client) }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports the base data of a project' do
described_class::IMPORTERS.each do |klass|
@@ -29,23 +31,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker, feature_catego
worker.import(client, project)
end
-
- it 'raises an error' do
- exception = StandardError.new('_some_error_')
-
- expect_next_instance_of(Gitlab::GithubImport::Importer::LabelsImporter) do |importer|
- expect(importer).to receive(:execute).and_raise(exception)
- end
- expect(Gitlab::Import::ImportFailureService).to receive(:track)
- .with(
- project_id: project.id,
- exception: exception,
- error_source: described_class.name,
- fail_import: true,
- metrics: true
- ).and_call_original
-
- expect { worker.import(client, project) }.to raise_error(StandardError)
- end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
index 808f6e827ed..fc38adb5447 100644
--- a/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportCollaboratorsWorker, feature_c
let(:importer) { instance_double(Gitlab::GithubImport::Importer::CollaboratorsImporter) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
let(:push_rights_granted) { true }
@@ -68,23 +70,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportCollaboratorsWorker, feature_c
worker.import(client, project)
end
end
-
- it 'raises an error' do
- exception = StandardError.new('_some_error_')
-
- expect_next_instance_of(Gitlab::GithubImport::Importer::CollaboratorsImporter) do |importer|
- expect(importer).to receive(:execute).and_raise(exception)
- end
- expect(Gitlab::Import::ImportFailureService).to receive(:track)
- .with(
- project_id: project.id,
- exception: exception,
- error_source: described_class.name,
- fail_import: true,
- metrics: true
- ).and_call_original
-
- expect { worker.import(client, project) }.to raise_error(StandardError)
- end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
index 7b0cf77bbbe..4b4d6a5b625 100644
--- a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_cat
settings.write({ optional_stages: { single_endpoint_issue_events_import: stage_enabled } })
end
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
diff --git a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
index 188cf3530f7..7a5813122f4 100644
--- a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker, feat
settings.write({ optional_stages: { single_endpoint_notes_import: single_endpoint_optional_stage } })
end
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports the issues and diff notes' do
client = double(:client)
diff --git a/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
index 2449c0505f5..5d476543743 100644
--- a/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportLfsObjectsWorker, feature_cate
let(:project) { create(:project) }
let(:worker) { described_class.new }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all the lfs objects' do
importer = double(:importer)
diff --git a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
index dcceeb1d6c2..9584708802a 100644
--- a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportNotesWorker, feature_category:
settings.write({ optional_stages: { single_endpoint_notes_import: single_endpoint_optional_stage } })
end
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all the notes' do
client = double(:client)
diff --git a/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
index f848293a3b2..7ecce82dacb 100644
--- a/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker, featu
let(:importer) { instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all the pull requests' do
waiter = Gitlab::JobWaiter.new(2, '123')
@@ -32,27 +34,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker, featu
worker.import(client, project)
end
-
- context 'when an error raised' do
- let(:exception) { StandardError.new('_some_error_') }
-
- before do
- allow_next_instance_of(Gitlab::GithubImport::Importer::ProtectedBranchesImporter) do |importer|
- allow(importer).to receive(:execute).and_raise(exception)
- end
- end
-
- it 'raises an error' do
- expect(Gitlab::Import::ImportFailureService).to receive(:track)
- .with(
- project_id: project.id,
- exception: exception,
- error_source: described_class.name,
- metrics: true
- ).and_call_original
-
- expect { worker.import(client, project) }.to raise_error(StandardError)
- end
- end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
index 0debabda0cc..5917b827d65 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker, fe
let(:import_state) { create(:import_state, project: project) }
let(:worker) { described_class.new }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all the pull requests' do
importer = double(:importer)
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
index 41c0b29df7c..b473de73086 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWork
let(:importer) { instance_double(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter) }
let(:waiter) { Gitlab::JobWaiter.new(2, '123') }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all PR review requests' do
expect(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter)
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
index b1141c7f324..34d3ce9fe95 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker, fea
let(:worker) { described_class.new }
let(:client) { double(:client) }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
it 'imports all the pull request reviews' do
importer = double(:importer)
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
index 9b2cfead684..f9b4a8a99f0 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker, feature_ca
let(:importer) { double(:importer) }
let(:client) { double(:client) }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
context 'with pull requests' do
it 'imports all the pull requests and allocates internal iids' do
@@ -101,26 +103,4 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker, feature_ca
end
end
end
-
- it 'raises an error' do
- exception = StandardError.new('_some_error_')
-
- expect(client).to receive(:each_object).with(
- :pulls, project.import_source, options
- ).and_return([{ number: 4 }].each)
-
- expect_next_instance_of(Gitlab::GithubImport::Importer::PullRequestsImporter) do |importer|
- expect(importer).to receive(:execute).and_raise(exception)
- end
- expect(Gitlab::Import::ImportFailureService).to receive(:track)
- .with(
- project_id: project.id,
- exception: exception,
- error_source: described_class.name,
- fail_import: true,
- metrics: true
- ).and_call_original
-
- expect { worker.import(client, project) }.to raise_error(StandardError)
- end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 94d8155d371..f4a306eeb0c 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker, feature_cate
let(:worker) { described_class.new }
+ it_behaves_like Gitlab::GithubImport::StageMethods
+
describe '#import' do
before do
expect(Gitlab::GithubImport::RefreshImportJidWorker)
@@ -84,37 +86,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker, feature_cate
end
end
end
-
- context 'when the import fails' do
- it 'does not schedule the importing of the base data' do
- client = double(:client)
- exception_class = Gitlab::Git::Repository::NoRepository
-
- expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
- expect(instance).to receive(:execute).and_raise(exception_class)
- end
-
- expect(InternalId).to receive(:exists?).and_return(false)
- expect(client).to receive(:each_object).and_return([nil].each)
- expect(Issue).not_to receive(:track_namespace_iid!)
-
- expect(Gitlab::Import::ImportFailureService).to receive(:track)
- .with(
- project_id: project.id,
- exception: exception_class,
- error_source: described_class.name,
- fail_import: true,
- metrics: true
- ).and_call_original
-
- expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
- .not_to receive(:perform_async)
-
- expect(worker.abort_on_failure).to eq(true)
-
- expect { worker.import(client, project) }
- .to raise_error(exception_class)
- end
- end
end
end
diff --git a/spec/workers/gitlab_shell_worker_spec.rb b/spec/workers/gitlab_shell_worker_spec.rb
deleted file mode 100644
index 9fff4489667..00000000000
--- a/spec/workers/gitlab_shell_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabShellWorker, :sidekiq_inline, feature_category: :source_code_management do
- describe '#perform' do
- Gitlab::Shell::PERMITTED_ACTIONS.each do |action|
- describe "with the #{action} action" do
- it 'forwards the message to Gitlab::Shell' do
- expect_next_instance_of(Gitlab::Shell) do |instance|
- expect(instance).to respond_to(action)
- expect(instance).to receive(action).with('foo', 'bar')
- end
-
- described_class.perform_async(action, 'foo', 'bar')
- end
- end
- end
-
- describe 'all other commands' do
- it 'raises ArgumentError' do
- allow_next_instance_of(described_class) do |job_instance|
- expect(job_instance).not_to receive(:gitlab_shell)
- end
-
- expect { described_class.perform_async('foo', 'bar', 'baz') }
- .to raise_error(ArgumentError, 'foo not allowed for GitlabShellWorker')
- end
- end
- end
-end
diff --git a/spec/workers/hashed_storage/migrator_worker_spec.rb b/spec/workers/hashed_storage/migrator_worker_spec.rb
deleted file mode 100644
index f188928cf92..00000000000
--- a/spec/workers/hashed_storage/migrator_worker_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe HashedStorage::MigratorWorker, feature_category: :source_code_management do
- subject(:worker) { described_class.new }
-
- let(:projects) { create_list(:project, 2, :legacy_storage, :empty_repo) }
- let(:ids) { projects.map(&:id) }
-
- describe '#perform' do
- it 'delegates to MigratorService' do
- expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
- expect(instance).to receive(:bulk_migrate).with(start: 5, finish: 10)
- end
-
- worker.perform(5, 10)
- end
-
- it 'migrates projects in the specified range', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- worker.perform(ids.min, ids.max)
- end
-
- projects.each do |project|
- expect(project.reload.hashed_storage?(:attachments)).to be_truthy
- end
- end
- end
-end
diff --git a/spec/workers/hashed_storage/project_migrate_worker_spec.rb b/spec/workers/hashed_storage/project_migrate_worker_spec.rb
deleted file mode 100644
index 84592e85eaa..00000000000
--- a/spec/workers/hashed_storage/project_migrate_worker_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe HashedStorage::ProjectMigrateWorker, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
- include ExclusiveLeaseHelpers
-
- let(:migration_service) { ::Projects::HashedStorage::MigrationService }
- let(:lease_timeout) { described_class::LEASE_TIMEOUT }
-
- describe '#perform' do
- it 'skips when project no longer exists' do
- stub_exclusive_lease(lease_key(-1), 'uuid', timeout: lease_timeout)
-
- expect(migration_service).not_to receive(:new)
-
- subject.perform(-1)
- end
-
- it 'skips when project is pending delete' do
- pending_delete_project = create(:project, :empty_repo, pending_delete: true)
- stub_exclusive_lease(lease_key(pending_delete_project.id), 'uuid', timeout: lease_timeout)
-
- expect(migration_service).not_to receive(:new)
-
- subject.perform(pending_delete_project.id)
- end
-
- it 'skips when project is already migrated' do
- migrated_project = create(:project, :empty_repo)
- stub_exclusive_lease(lease_key(migrated_project.id), 'uuid', timeout: lease_timeout)
-
- expect(migration_service).not_to receive(:new)
-
- subject.perform(migrated_project.id)
- end
-
- context 'with exclusive lease available' do
- it 'delegates migration to service class' do
- project = create(:project, :empty_repo, :legacy_storage)
- stub_exclusive_lease(lease_key(project.id), 'uuid', timeout: lease_timeout)
-
- service_spy = spy
-
- allow(migration_service)
- .to receive(:new).with(project, project.full_path, logger: subject.logger)
- .and_return(service_spy)
-
- subject.perform(project.id)
-
- expect(service_spy).to have_received(:execute)
- end
-
- it 'delegates migration to service class with correct path in a partially migrated project' do
- project = create(:project, :empty_repo, storage_version: 1)
- stub_exclusive_lease(lease_key(project.id), 'uuid', timeout: lease_timeout)
-
- service_spy = spy
-
- allow(migration_service)
- .to receive(:new).with(project, project.full_path, logger: subject.logger)
- .and_return(service_spy)
-
- subject.perform(project.id)
-
- expect(service_spy).to have_received(:execute)
- end
- end
-
- context 'with exclusive lease taken' do
- it 'skips when it cant acquire the exclusive lease' do
- project = create(:project, :empty_repo, :legacy_storage)
- stub_exclusive_lease_taken(lease_key(project.id), timeout: lease_timeout)
-
- expect(migration_service).not_to receive(:new)
-
- subject.perform(project.id)
- end
- end
- end
-
- def lease_key(key)
- "project_migrate_hashed_storage_worker:#{key}"
- end
-end
diff --git a/spec/workers/hashed_storage/project_rollback_worker_spec.rb b/spec/workers/hashed_storage/project_rollback_worker_spec.rb
deleted file mode 100644
index f27b5e4b9ce..00000000000
--- a/spec/workers/hashed_storage/project_rollback_worker_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe HashedStorage::ProjectRollbackWorker, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
- include ExclusiveLeaseHelpers
-
- describe '#perform' do
- let(:project) { create(:project, :empty_repo) }
- let(:lease_key) { "project_migrate_hashed_storage_worker:#{project.id}" }
- let(:lease_timeout) { described_class::LEASE_TIMEOUT }
- let(:rollback_service) { ::Projects::HashedStorage::RollbackService }
-
- it 'skips when project no longer exists' do
- expect(rollback_service).not_to receive(:new)
-
- subject.perform(-1)
- end
-
- it 'skips when project is pending delete' do
- pending_delete_project = create(:project, :empty_repo, pending_delete: true)
-
- expect(rollback_service).not_to receive(:new)
-
- subject.perform(pending_delete_project.id)
- end
-
- it 'delegates rollback to service class when have exclusive lease' do
- stub_exclusive_lease(lease_key, 'uuid', timeout: lease_timeout)
-
- service_spy = spy
-
- allow(rollback_service)
- .to receive(:new).with(project, project.disk_path, logger: subject.logger)
- .and_return(service_spy)
-
- subject.perform(project.id)
-
- expect(service_spy).to have_received(:execute)
- end
-
- it 'skips when it cant acquire the exclusive lease' do
- stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
-
- expect(rollback_service).not_to receive(:new)
-
- subject.perform(project.id)
- end
- end
-end
diff --git a/spec/workers/hashed_storage/rollbacker_worker_spec.rb b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
deleted file mode 100644
index af8957d9b96..00000000000
--- a/spec/workers/hashed_storage/rollbacker_worker_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe HashedStorage::RollbackerWorker, feature_category: :source_code_management do
- subject(:worker) { described_class.new }
-
- let(:projects) { create_list(:project, 2, :empty_repo) }
- let(:ids) { projects.map(&:id) }
-
- describe '#perform' do
- it 'delegates to MigratorService' do
- expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
- expect(instance).to receive(:bulk_rollback).with(start: 5, finish: 10)
- end
-
- worker.perform(5, 10)
- end
-
- it 'rollsback projects in the specified range', :sidekiq_might_not_need_inline do
- perform_enqueued_jobs do
- worker.perform(ids.min, ids.max)
- end
-
- projects.each do |project|
- expect(project.reload.legacy_storage?).to be_truthy
- end
- end
- end
-end
diff --git a/spec/workers/integrations/execute_worker_spec.rb b/spec/workers/integrations/execute_worker_spec.rb
index 369fc5fd091..10e290005cc 100644
--- a/spec/workers/integrations/execute_worker_spec.rb
+++ b/spec/workers/integrations/execute_worker_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Integrations::ExecuteWorker, '#perform', feature_category: :integrations do
diff --git a/spec/workers/integrations/slack_event_worker_spec.rb b/spec/workers/integrations/slack_event_worker_spec.rb
index 019d68b40e0..6e8c73f1506 100644
--- a/spec/workers/integrations/slack_event_worker_spec.rb
+++ b/spec/workers/integrations/slack_event_worker_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_shared_state,
- :clean_gitlab_redis_cluster_shared_state, feature_category: :integrations do
+RSpec.describe Integrations::SlackEventWorker, :clean_gitlab_redis_cluster_shared_state,
+ feature_category: :integrations do
describe '.event?' do
subject { described_class.event?(event) }
diff --git a/spec/workers/issuable/related_links_create_worker_spec.rb b/spec/workers/issuable/related_links_create_worker_spec.rb
new file mode 100644
index 00000000000..d2d0aeb7081
--- /dev/null
+++ b/spec/workers/issuable/related_links_create_worker_spec.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuable::RelatedLinksCreateWorker, feature_category: :portfolio_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issuable) { create(:work_item, :task, project: project) }
+ let_it_be(:target1) { create(:work_item, :task, project: project) }
+ let_it_be(:target2) { create(:work_item, :task, project: project) }
+ let_it_be(:link1) { create(:work_item_link, source: issuable, target: target1) }
+ let_it_be(:link2) { create(:work_item_link, source: issuable, target: target2) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) do
+ {
+ issuable_class: issuable.class.name,
+ issuable_id: issuable.id,
+ link_ids: [link1.id, link2.id],
+ link_type: 'relates_to',
+ user_id: user.id
+ }.transform_keys(&:to_s)
+ end
+
+ before_all do
+ project.add_reporter(user)
+ end
+
+ subject { described_class.new.perform(params) }
+
+ describe '#perform' do
+ it 'calls #relate_issuable on SystemNoteService' do
+ # One note for the issuable that references all the linked issuables
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, [target1, target2], user)
+
+ # One note for each linked issuable referencing the source issuable
+ expect(SystemNoteService).to receive(:relate_issuable).with(target1, issuable, user)
+ expect(SystemNoteService).to receive(:relate_issuable).with(target2, issuable, user)
+
+ subject
+ end
+
+ it 'creates correct notes' do
+ subject
+
+ expect(issuable.notes.last.note)
+ .to eq("marked this task as related to #{target1.to_reference} and #{target2.to_reference}")
+ expect(target1.notes.last.note).to eq("marked this task as related to #{issuable.to_reference}")
+ expect(target2.notes.last.note).to eq("marked this task as related to #{issuable.to_reference}")
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { params }
+ end
+
+ context 'when params contain errors' do
+ it 'does nothing when user is not found' do
+ params['user_id'] = non_existing_record_id
+
+ expect(Sidekiq.logger).not_to receive(:error)
+ expect { subject }.not_to change { Note.count }
+ end
+
+ it 'does nothing when issuable is not found' do
+ params['issuable_id'] = non_existing_record_id
+
+ expect(Sidekiq.logger).not_to receive(:error)
+ expect { subject }.not_to change { Note.count }
+ end
+
+ it 'does nothing when links are not found' do
+ params['link_ids'] = [non_existing_record_id]
+
+ expect(Sidekiq.logger).not_to receive(:error)
+ expect { subject }.not_to change { Note.count }
+ end
+
+ it 'logs error when issuable_class is invalid' do
+ params['issuable_class'] = 'FooBar'
+
+ expect(Sidekiq.logger).to receive(:error).with({
+ worker: described_class.to_s,
+ message: "Failed to complete job (user_id:#{user.id}, issuable_id:#{issuable.id}, " \
+ "issuable_class:FooBar): Unknown class 'FooBar'"
+ })
+
+ subject
+ end
+
+ context 'when notes are not created' do
+ before do
+ allow(SystemNoteService).to receive(:relate_issuable).with(target1, issuable, user).and_call_original
+ allow(SystemNoteService).to receive(:relate_issuable).with(target2, issuable, user).and_return(nil)
+ allow(SystemNoteService).to receive(:relate_issuable).with(issuable, [target1, target2], user).and_return(nil)
+ end
+
+ it 'logs error' do
+ expect(Sidekiq.logger).to receive(:error).with({
+ worker: described_class.to_s,
+ message: "Failed to complete job (user_id:#{user.id}, issuable_id:#{issuable.id}, " \
+ "issuable_class:#{issuable.class.name}): Could not create notes: " \
+ "{noteable_id: #{target2.id}, reference_ids: [#{issuable.id}]}, " \
+ "{noteable_id: #{issuable.id}, reference_ids: #{[target1.id, target2.id]}}"
+ })
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb
index 9c6a6564df6..48d8ea3ab16 100644
--- a/spec/workers/merge_worker_spec.rb
+++ b/spec/workers/merge_worker_spec.rb
@@ -48,4 +48,61 @@ RSpec.describe MergeWorker, feature_category: :source_code_management do
end
end
end
+
+ describe 'delegation to MergeRequests::MergeService' do
+ # Some ids that should be nonexistentn
+ let(:user_id) { -1 }
+ let(:merge_request_id) { -1 }
+ let(:params) { {} }
+
+ subject { described_class.new.perform(merge_request_id, user_id, params) }
+
+ context 'when user exists' do
+ let!(:user) { create(:user) }
+ let(:user_id) { user.id }
+
+ context 'and merge request exists' do
+ let!(:merge_request) { create(:merge_request, source_project: create(:project, :empty_repo)) }
+ let(:merge_request_id) { merge_request.id }
+ let(:user) { merge_request.author }
+ let(:merge_service_double) { instance_double(MergeRequests::MergeService) }
+
+ it 'delegates to MergeRequests::MergeService' do
+ expect(MergeRequests::MergeService).to receive(:new).with(
+ project: merge_request.target_project,
+ current_user: user,
+ params: { check_mergeability_retry_lease: true }
+ ).and_return(merge_service_double)
+
+ expect(merge_service_double).to receive(:execute)
+ subject
+ end
+
+ context 'and check_mergeability_retry_lease is specified' do
+ let(:params) { { check_mergeability_retry_lease: false } }
+
+ it 'does not change the check_mergeability_retry_lease parameter' do
+ expect(MergeRequests::MergeService).to receive(:new).with(
+ project: merge_request.target_project,
+ current_user: user,
+ params: params
+ ).and_return(merge_service_double)
+
+ expect(merge_service_double).to receive(:execute)
+ subject
+ end
+ end
+ end
+
+ it 'does not call MergeRequests::MergeService' do
+ expect(MergeRequests::MergeService).not_to receive(:new)
+ subject
+ end
+ end
+
+ it 'does not call MergeRequests::MergeService' do
+ expect(MergeRequests::MergeService).not_to receive(:new)
+ subject
+ end
+ end
end
diff --git a/spec/workers/pages/deactivated_deployments_delete_cron_worker_spec.rb b/spec/workers/pages/deactivated_deployments_delete_cron_worker_spec.rb
new file mode 100644
index 00000000000..b4ee1d6b439
--- /dev/null
+++ b/spec/workers/pages/deactivated_deployments_delete_cron_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::DeactivatedDeploymentsDeleteCronWorker, feature_category: :pages do
+ subject(:worker) { described_class.new }
+
+ it 'deletes all deactivated pages deployments' do
+ create(:pages_deployment) # active
+ create(:pages_deployment, deleted_at: 3.minutes.ago) # deactivated
+ create(:pages_deployment, path_prefix: 'other', deleted_at: 3.minutes.ago) # deactivated
+
+ expect { worker.perform }.to change { PagesDeployment.count }.by(-2)
+ end
+end
diff --git a/spec/workers/partition_creation_worker_spec.rb b/spec/workers/partition_creation_worker_spec.rb
index ab525fd5ce2..625e86ad852 100644
--- a/spec/workers/partition_creation_worker_spec.rb
+++ b/spec/workers/partition_creation_worker_spec.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
-#
+
require 'spec_helper'
RSpec.describe PartitionCreationWorker, feature_category: :database do
diff --git a/spec/workers/projects/after_import_worker_spec.rb b/spec/workers/projects/after_import_worker_spec.rb
index 5af4f49d6e0..18105488549 100644
--- a/spec/workers/projects/after_import_worker_spec.rb
+++ b/spec/workers/projects/after_import_worker_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Projects::AfterImportWorker, feature_category: :importers do
message: 'Project housekeeping failed',
project_full_path: project.full_path,
project_id: project.id,
- 'error.message' => exception.to_s
+ 'exception.message' => exception.to_s
}).and_call_original
subject
diff --git a/spec/workers/projects/delete_branch_worker_spec.rb b/spec/workers/projects/delete_branch_worker_spec.rb
index 771ab3def84..ddd65e51383 100644
--- a/spec/workers/projects/delete_branch_worker_spec.rb
+++ b/spec/workers/projects/delete_branch_worker_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
# rubocop: disable Gitlab/ServiceResponse
require 'spec_helper'
diff --git a/spec/workers/projects/record_target_platforms_worker_spec.rb b/spec/workers/projects/record_target_platforms_worker_spec.rb
index 116da404112..d4515f7727a 100644
--- a/spec/workers/projects/record_target_platforms_worker_spec.rb
+++ b/spec/workers/projects/record_target_platforms_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :experimentation_activation do
+RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :activation do
include ExclusiveLeaseHelpers
let_it_be(:swift) { create(:programming_language, name: 'Swift') }
diff --git a/spec/workers/tasks_to_be_done/create_worker_spec.rb b/spec/workers/tasks_to_be_done/create_worker_spec.rb
index 643424ae068..3a4e10b6a6f 100644
--- a/spec/workers/tasks_to_be_done/create_worker_spec.rb
+++ b/spec/workers/tasks_to_be_done/create_worker_spec.rb
@@ -3,34 +3,20 @@
require 'spec_helper'
RSpec.describe TasksToBeDone::CreateWorker, feature_category: :onboarding do
- let_it_be(:member_task) { create(:member_task, tasks: MemberTask::TASKS.values) }
let_it_be(:current_user) { create(:user) }
let(:assignee_ids) { [1, 2] }
- let(:job_args) { [member_task.id, current_user.id, assignee_ids] }
-
- before do
- member_task.project.group.add_owner(current_user)
- end
+ let(:job_args) { [123, current_user.id, assignee_ids] }
describe '.perform' do
it 'executes the task services for all tasks to be done', :aggregate_failures do
- MemberTask::TASKS.each_key do |task|
- service_class = "TasksToBeDone::Create#{task.to_s.camelize}TaskService".constantize
-
- expect(service_class)
- .to receive(:new)
- .with(container: member_task.project, current_user: current_user, assignee_ids: assignee_ids)
- .and_call_original
- end
-
- expect { described_class.new.perform(*job_args) }.to change { Issue.count }.by(3)
+ expect { described_class.new.perform(*job_args) }.not_to change { Issue.count }
end
end
include_examples 'an idempotent worker' do
it 'creates 3 task issues' do
- expect { subject }.to change { Issue.count }.by(3)
+ expect { subject }.not_to change { Issue.count }
end
end
end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
index cd58dd93b80..1e82b0f2845 100644
--- a/spec/workers/web_hook_worker_spec.rb
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe WebHookWorker, feature_category: :integrations do