Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-09-20 14:18:08 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-09-20 14:18:08 +0300
commit5afcbe03ead9ada87621888a31a62652b10a7e4f (patch)
tree9918b67a0d0f0bafa6542e839a8be37adf73102d /spec
parentc97c0201564848c1f53226fe19d71fdcc472f7d0 (diff)
Add latest changes from gitlab-org/gitlab@16-4-stable-eev16.4.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/components/pajamas/banner_component_spec.rb2
-rw-r--r--spec/contracts/consumer/resources/graphql/pipelines.js4
-rw-r--r--spec/contracts/consumer/specs/project/pipelines/show.spec.js4
-rw-r--r--spec/controllers/activity_pub/projects/releases_controller_spec.rb134
-rw-r--r--spec/controllers/admin/jobs_controller_spec.rb2
-rw-r--r--spec/controllers/admin/users_controller_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb27
-rw-r--r--spec/controllers/concerns/onboarding/status_spec.rb28
-rw-r--r--spec/controllers/concerns/preferred_language_switcher_spec.rb74
-rw-r--r--spec/controllers/confirmations_controller_spec.rb22
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb4
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb48
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb230
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb2
-rw-r--r--spec/controllers/invites_controller_spec.rb20
-rw-r--r--spec/controllers/oauth/applications_controller_spec.rb15
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb33
-rw-r--r--spec/controllers/profiles/notifications_controller_spec.rb3
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb56
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb1
-rw-r--r--spec/controllers/profiles_controller_spec.rb14
-rw-r--r--spec/controllers/projects/alerting/notifications_controller_spec.rb27
-rw-r--r--spec/controllers/projects/environments/sample_metrics_controller_spec.rb55
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb10
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb16
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb47
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb66
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb2
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb42
-rw-r--r--spec/controllers/projects/prometheus/alerts_controller_spec.rb110
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb2
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb28
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb25
-rw-r--r--spec/controllers/search_controller_spec.rb54
-rw-r--r--spec/controllers/sent_notifications_controller_spec.rb2
-rw-r--r--spec/controllers/uploads_controller_spec.rb2
-rw-r--r--spec/db/avoid_migration_name_collisions_spec.rb17
-rw-r--r--spec/db/schema_spec.rb51
-rw-r--r--spec/experiments/application_experiment_spec.rb31
-rw-r--r--spec/factories/ci/catalog/resources.rb2
-rw-r--r--spec/factories/ci/catalog/resources/components.rb4
-rw-r--r--spec/factories/ci/catalog/resources/versions.rb4
-rw-r--r--spec/factories/ci/reports/sbom/metadatum.rb44
-rw-r--r--spec/factories/ci/reports/sbom/reports.rb20
-rw-r--r--spec/factories/issues.rb12
-rw-r--r--spec/factories/merge_requests.rb8
-rw-r--r--spec/factories/metrics/dashboard/annotations.rb9
-rw-r--r--spec/factories/metrics/users_starred_dashboards.rb9
-rw-r--r--spec/factories/ml/candidate_params.rb2
-rw-r--r--spec/factories/ml/candidates.rb10
-rw-r--r--spec/factories/packages/dependency_links.rb20
-rw-r--r--spec/factories/packages/nuget/symbol.rb11
-rw-r--r--spec/factories/packages/package_protection_rules.rb10
-rw-r--r--spec/factories/packages/packages.rb4
-rw-r--r--spec/factories/pages_domains.rb466
-rw-r--r--spec/factories/project_alerting_settings.rb20
-rw-r--r--spec/factories/project_authorizations.rb8
-rw-r--r--spec/factories/project_metrics_settings.rb8
-rw-r--r--spec/factories/projects.rb2
-rw-r--r--spec/factories/self_managed_prometheus_alert_event.rb12
-rw-r--r--spec/factories/service_desk/custom_email_verification.rb5
-rw-r--r--spec/factories/usage_data.rb5
-rw-r--r--spec/factories/users.rb4
-rw-r--r--spec/factories/users/group_visit.rb12
-rw-r--r--spec/factories/users/project_visit.rb12
-rw-r--r--spec/factories/work_items.rb12
-rw-r--r--spec/features/abuse_report_spec.rb8
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb354
-rw-r--r--spec/features/admin/admin_hooks_spec.rb2
-rw-r--r--spec/features/admin/admin_jobs_spec.rb114
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb6
-rw-r--r--spec/features/admin/admin_mode_spec.rb2
-rw-r--r--spec/features/admin/admin_runners_spec.rb7
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb10
-rw-r--r--spec/features/admin/users/user_spec.rb10
-rw-r--r--spec/features/admin/users/users_spec.rb6
-rw-r--r--spec/features/alert_management/alert_details_spec.rb2
-rw-r--r--spec/features/alert_management/alert_management_list_spec.rb2
-rw-r--r--spec/features/boards/board_filters_spec.rb250
-rw-r--r--spec/features/boards/multiple_boards_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb2
-rw-r--r--spec/features/calendar_spec.rb2
-rw-r--r--spec/features/contextual_sidebar_spec.rb5
-rw-r--r--spec/features/cycle_analytics_spec.rb4
-rw-r--r--spec/features/dashboard/activity_spec.rb2
-rw-r--r--spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/dashboard/groups_list_spec.rb13
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb2
-rw-r--r--spec/features/dashboard/issues_spec.rb2
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb2
-rw-r--r--spec/features/dashboard/milestones_spec.rb4
-rw-r--r--spec/features/dashboard/navbar_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb2
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb2
-rw-r--r--spec/features/dashboard/snippets_spec.rb2
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb6
-rw-r--r--spec/features/explore/groups_list_spec.rb132
-rw-r--r--spec/features/explore/navbar_spec.rb1
-rw-r--r--spec/features/explore/user_explores_projects_spec.rb4
-rw-r--r--spec/features/global_search_spec.rb2
-rw-r--r--spec/features/groups/container_registry_spec.rb2
-rw-r--r--spec/features/groups/dependency_proxy_for_containers_spec.rb12
-rw-r--r--spec/features/groups/dependency_proxy_spec.rb4
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/groups/group_runners_spec.rb301
-rw-r--r--spec/features/groups/labels/create_spec.rb6
-rw-r--r--spec/features/groups/labels/edit_spec.rb14
-rw-r--r--spec/features/groups/members/manage_members_spec.rb2
-rw-r--r--spec/features/groups/members/request_access_spec.rb4
-rw-r--r--spec/features/groups/navbar_spec.rb5
-rw-r--r--spec/features/groups/new_group_page_spec.rb4
-rw-r--r--spec/features/groups/packages_spec.rb2
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb2
-rw-r--r--spec/features/groups/user_sees_package_sidebar_spec.rb2
-rw-r--r--spec/features/groups_spec.rb2
-rw-r--r--spec/features/help_dropdown_spec.rb4
-rw-r--r--spec/features/ide/user_opens_merge_request_spec.rb6
-rw-r--r--spec/features/incidents/incident_details_spec.rb5
-rw-r--r--spec/features/invites_spec.rb52
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb28
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb4
-rw-r--r--spec/features/issues/form_spec.rb85
-rw-r--r--spec/features/issues/issue_state_spec.rb66
-rw-r--r--spec/features/issues/move_spec.rb2
-rw-r--r--spec/features/issues/note_polling_spec.rb16
-rw-r--r--spec/features/issues/service_desk_spec.rb6
-rw-r--r--spec/features/issues/todo_spec.rb2
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb2
-rw-r--r--spec/features/issues/user_sees_live_update_spec.rb22
-rw-r--r--spec/features/issues/user_uses_quick_actions_spec.rb4
-rw-r--r--spec/features/jira_connect/branches_spec.rb4
-rw-r--r--spec/features/labels_hierarchy_spec.rb2
-rw-r--r--spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb81
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb3
-rw-r--r--spec/features/merge_request/user_merges_merge_request_spec.rb4
-rw-r--r--spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb7
-rw-r--r--spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb34
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb506
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb29
-rw-r--r--spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb28
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb396
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb5
-rw-r--r--spec/features/merge_request/user_sets_to_auto_merge_spec.rb (renamed from spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb)46
-rw-r--r--spec/features/merge_request/user_uses_quick_actions_spec.rb11
-rw-r--r--spec/features/monitor_sidebar_link_spec.rb2
-rw-r--r--spec/features/nav/pinned_nav_items_spec.rb18
-rw-r--r--spec/features/nav/top_nav_responsive_spec.rb2
-rw-r--r--spec/features/nav/top_nav_spec.rb2
-rw-r--r--spec/features/oauth_login_spec.rb2
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb4
-rw-r--r--spec/features/profiles/user_visits_notifications_tab_spec.rb8
-rw-r--r--spec/features/profiles/user_visits_profile_account_page_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_authentication_log_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb2
-rw-r--r--spec/features/projects/active_tabs_spec.rb5
-rw-r--r--spec/features/projects/branches/user_creates_branch_spec.rb2
-rw-r--r--spec/features/projects/ci/editor_spec.rb2
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb2
-rw-r--r--spec/features/projects/clusters/user_spec.rb2
-rw-r--r--spec/features/projects/clusters_spec.rb2
-rw-r--r--spec/features/projects/commit/user_sees_pipelines_tab_spec.rb2
-rw-r--r--spec/features/projects/confluence/user_views_confluence_page_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb188
-rw-r--r--spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb29
-rw-r--r--spec/features/projects/features_visibility_spec.rb4
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb4
-rw-r--r--spec/features/projects/files/user_find_file_spec.rb2
-rw-r--r--spec/features/projects/files/user_searches_for_files_spec.rb11
-rw-r--r--spec/features/projects/forks/fork_list_spec.rb2
-rw-r--r--spec/features/projects/graph_spec.rb2
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb4
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb8
-rw-r--r--spec/features/projects/jobs_spec.rb8
-rw-r--r--spec/features/projects/labels/user_creates_labels_spec.rb2
-rw-r--r--spec/features/projects/labels/user_edits_labels_spec.rb25
-rw-r--r--spec/features/projects/members/manage_members_spec.rb2
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb4
-rw-r--r--spec/features/projects/milestones/user_interacts_with_labels_spec.rb2
-rw-r--r--spec/features/projects/navbar_spec.rb8
-rw-r--r--spec/features/projects/new_project_spec.rb14
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb2
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb484
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb12
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb18
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb5
-rw-r--r--spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/service_desk_setting_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb2
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb4
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb57
-rw-r--r--spec/features/projects/wikis_spec.rb2
-rw-r--r--spec/features/projects/work_items/work_item_spec.rb48
-rw-r--r--spec/features/projects_spec.rb18
-rw-r--r--spec/features/runners_spec.rb489
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb2
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb4
-rw-r--r--spec/features/sentry_js_spec.rb1
-rw-r--r--spec/features/signed_commits_spec.rb8
-rw-r--r--spec/features/snippets/search_snippets_spec.rb5
-rw-r--r--spec/features/snippets/show_spec.rb8
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb10
-rw-r--r--spec/features/task_lists_spec.rb4
-rw-r--r--spec/features/unsubscribe_links_spec.rb8
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb2
-rw-r--r--spec/features/usage_stats_consent_spec.rb2
-rw-r--r--spec/features/users/active_sessions_spec.rb8
-rw-r--r--spec/features/users/anonymous_sessions_spec.rb2
-rw-r--r--spec/features/users/email_verification_on_login_spec.rb11
-rw-r--r--spec/features/users/login_spec.rb50
-rw-r--r--spec/features/users/logout_spec.rb2
-rw-r--r--spec/features/users/overview_spec.rb2
-rw-r--r--spec/features/users/rss_spec.rb6
-rw-r--r--spec/features/users/show_spec.rb4
-rw-r--r--spec/features/users/signup_spec.rb16
-rw-r--r--spec/features/users/snippets_spec.rb6
-rw-r--r--spec/features/users/terms_spec.rb6
-rw-r--r--spec/features/users/user_browses_projects_on_user_page_spec.rb6
-rw-r--r--spec/features/webauthn_spec.rb17
-rw-r--r--spec/features/whats_new_spec.rb6
-rw-r--r--spec/finders/abuse_reports_finder_spec.rb48
-rw-r--r--spec/finders/ci/jobs_finder_spec.rb281
-rw-r--r--spec/finders/ci/runners_finder_spec.rb226
-rw-r--r--spec/finders/ci/triggers_finder_spec.rb29
-rw-r--r--spec/finders/deployments_finder_spec.rb16
-rw-r--r--spec/finders/group_members_finder_spec.rb35
-rw-r--r--spec/finders/groups/accepting_group_transfers_finder_spec.rb21
-rw-r--r--spec/finders/organizations/groups_finder_spec.rb84
-rw-r--r--spec/finders/organizations/organization_users_finder_spec.rb35
-rw-r--r--spec/finders/packages/npm/packages_for_user_finder_spec.rb41
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb10
-rw-r--r--spec/fixtures/api/schemas/entities/codequality_degradation.json3
-rw-r--r--spec/fixtures/api/schemas/job/job.json1
-rw-r--r--spec/fixtures/api/schemas/ml/search_runs.json82
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/integration.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/operations/strategy.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/operations/user_list.json16
-rw-r--r--spec/fixtures/api/schemas/status/ci_detailed_status.json2
-rw-r--r--spec/fixtures/ci_secure_files/sample.p12bin3352 -> 3219 bytes
-rw-r--r--spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml14
-rw-r--r--spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml10
-rw-r--r--spec/fixtures/packages/nuget/symbol/package.pdbbin0 -> 10588 bytes
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json48
-rw-r--r--spec/frontend/__helpers__/clean_html_element_serializer.js142
-rw-r--r--spec/frontend/__helpers__/dom_shims/get_client_rects.js3
-rw-r--r--spec/frontend/__helpers__/html_string_serializer.js11
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper.js121
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper_spec.js46
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js4
-rw-r--r--spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap4
-rw-r--r--spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js80
-rw-r--r--spec/frontend/admin/abuse_report/components/activity_events_list_spec.js30
-rw-r--r--spec/frontend/admin/abuse_report/components/activity_history_item_spec.js (renamed from spec/frontend/admin/abuse_report/components/history_items_spec.js)20
-rw-r--r--spec/frontend/admin/abuse_report/components/labels_select_spec.js297
-rw-r--r--spec/frontend/admin/abuse_report/components/report_actions_spec.js27
-rw-r--r--spec/frontend/admin/abuse_report/components/report_details_spec.js74
-rw-r--r--spec/frontend/admin/abuse_report/components/report_header_spec.js55
-rw-r--r--spec/frontend/admin/abuse_report/components/reported_content_spec.js11
-rw-r--r--spec/frontend/admin/abuse_report/components/user_details_spec.js62
-rw-r--r--spec/frontend/admin/abuse_report/mock_data.js88
-rw-r--r--spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js14
-rw-r--r--spec/frontend/admin/abuse_reports/mock_data.js3
-rw-r--r--spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap1
-rw-r--r--spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap1
-rw-r--r--spec/frontend/admin/users/components/associations/__snapshots__/associations_list_item_spec.js.snap9
-rw-r--r--spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap2
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js2
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap32
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js2
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/__snapshots__/total_time_spec.js.snap46
-rw-r--r--spec/frontend/api/application_settings_api_spec.js45
-rw-r--r--spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap74
-rw-r--r--spec/frontend/avatar_helper_spec.js110
-rw-r--r--spec/frontend/behaviors/markdown/paste_markdown_table_spec.js6
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap6
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap10
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap34
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js61
-rw-r--r--spec/frontend/blob/components/mock_data.js15
-rw-r--r--spec/frontend/blob/line_highlighter_spec.js9
-rw-r--r--spec/frontend/blob/openapi/index_spec.js31
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js1
-rw-r--r--spec/frontend/boards/components/board_card_spec.js1
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js12
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js7
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js8
-rw-r--r--spec/frontend/boards/mock_data.js5
-rw-r--r--spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap51
-rw-r--r--spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap8
-rw-r--r--spec/frontend/ci/admin/jobs_table/admin_job_table_app_spec.js445
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/cancel_jobs_modal_spec.js (renamed from spec/frontend/pages/admin/jobs/components/cancel_jobs_modal_spec.js)2
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/cancel_jobs_spec.js54
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/cells/project_cell_spec.js (renamed from spec/frontend/pages/admin/jobs/components/table/cells/project_cell_spec.js)4
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js (renamed from spec/frontend/pages/admin/jobs/components/table/cells/runner_cell_spec.js)6
-rw-r--r--spec/frontend/ci/admin/jobs_table/components/jobs_skeleton_loader_spec.js (renamed from spec/frontend/pages/admin/jobs/components/jobs_skeleton_loader_spec.js)2
-rw-r--r--spec/frontend/ci/admin/jobs_table/graphql/cache_config_spec.js (renamed from spec/frontend/pages/admin/jobs/components/table/graphql/cache_config_spec.js)4
-rw-r--r--spec/frontend/ci/artifacts/components/feedback_banner_spec.js59
-rw-r--r--spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js12
-rw-r--r--spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js161
-rw-r--r--spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js41
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js338
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js15
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js5
-rw-r--r--spec/frontend/ci/common/pipelines_table_spec.js (renamed from spec/frontend/pipelines/pipelines_table_spec.js)18
-rw-r--r--spec/frontend/ci/common/private/job_links_layer_spec.js (renamed from spec/frontend/pipelines/graph_shared/links_layer_spec.js)6
-rw-r--r--spec/frontend/ci/common/private/jobs_filtered_search/jobs_filtered_search_spec.js123
-rw-r--r--spec/frontend/ci/common/private/jobs_filtered_search/tokens/job_status_token_spec.js (renamed from spec/frontend/jobs/components/filtered_search/tokens/job_status_token_spec.js)2
-rw-r--r--spec/frontend/ci/common/private/jobs_filtered_search/utils_spec.js22
-rw-r--r--spec/frontend/ci/job_details/components/empty_state_spec.js (renamed from spec/frontend/jobs/components/job/empty_state_spec.js)6
-rw-r--r--spec/frontend/ci/job_details/components/environments_block_spec.js (renamed from spec/frontend/jobs/components/job/environments_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/erased_block_spec.js (renamed from spec/frontend/jobs/components/job/erased_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/job_header_spec.js (renamed from spec/frontend/vue_shared/components/header_ci_component_spec.js)41
-rw-r--r--spec/frontend/ci/job_details/components/job_log_controllers_spec.js (renamed from spec/frontend/jobs/components/job/job_log_controllers_spec.js)8
-rw-r--r--spec/frontend/ci/job_details/components/log/collapsible_section_spec.js (renamed from spec/frontend/jobs/components/log/collapsible_section_spec.js)26
-rw-r--r--spec/frontend/ci/job_details/components/log/duration_badge_spec.js (renamed from spec/frontend/jobs/components/log/duration_badge_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/log/line_header_spec.js (renamed from spec/frontend/jobs/components/log/line_header_spec.js)42
-rw-r--r--spec/frontend/ci/job_details/components/log/line_number_spec.js (renamed from spec/frontend/jobs/components/log/line_number_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/log/line_spec.js (renamed from spec/frontend/jobs/components/log/line_spec.js)17
-rw-r--r--spec/frontend/ci/job_details/components/log/log_spec.js (renamed from spec/frontend/jobs/components/log/log_spec.js)35
-rw-r--r--spec/frontend/ci/job_details/components/log/mock_data.js (renamed from spec/frontend/jobs/components/log/mock_data.js)0
-rw-r--r--spec/frontend/ci/job_details/components/manual_variables_form_spec.js (renamed from spec/frontend/jobs/components/job/manual_variables_form_spec.js)12
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js (renamed from spec/frontend/jobs/components/job/artifacts_block_spec.js)6
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/commit_block_spec.js (renamed from spec/frontend/jobs/components/job/commit_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/external_links_block_spec.js49
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js (renamed from spec/frontend/jobs/components/job/job_container_item_spec.js)4
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/job_retry_forward_deployment_modal_spec.js (renamed from spec/frontend/jobs/components/job/job_retry_forward_deployment_modal_spec.js)17
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/job_sidebar_retry_button_spec.js (renamed from spec/frontend/jobs/components/job/job_sidebar_retry_button_spec.js)6
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/jobs_container_spec.js (renamed from spec/frontend/jobs/components/job/jobs_container_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_detail_row_spec.js (renamed from spec/frontend/jobs/components/job/sidebar_detail_row_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js (renamed from spec/frontend/jobs/components/job/sidebar_header_spec.js)22
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js (renamed from spec/frontend/jobs/components/job/job_sidebar_details_container_spec.js)11
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/sidebar_spec.js (renamed from spec/frontend/jobs/components/job/sidebar_spec.js)84
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js (renamed from spec/frontend/jobs/components/job/stages_dropdown_spec.js)11
-rw-r--r--spec/frontend/ci/job_details/components/sidebar/trigger_block_spec.js (renamed from spec/frontend/jobs/components/job/trigger_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/stuck_block_spec.js (renamed from spec/frontend/jobs/components/job/stuck_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/components/unmet_prerequisites_block_spec.js (renamed from spec/frontend/jobs/components/job/unmet_prerequisites_block_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/job_app_spec.js (renamed from spec/frontend/jobs/components/job/job_app_spec.js)24
-rw-r--r--spec/frontend/ci/job_details/mock_data.js (renamed from spec/frontend/jobs/components/job/mock_data.js)0
-rw-r--r--spec/frontend/ci/job_details/store/actions_spec.js (renamed from spec/frontend/jobs/store/actions_spec.js)6
-rw-r--r--spec/frontend/ci/job_details/store/getters_spec.js (renamed from spec/frontend/jobs/store/getters_spec.js)4
-rw-r--r--spec/frontend/ci/job_details/store/helpers.js (renamed from spec/frontend/jobs/store/helpers.js)2
-rw-r--r--spec/frontend/ci/job_details/store/mutations_spec.js (renamed from spec/frontend/jobs/store/mutations_spec.js)6
-rw-r--r--spec/frontend/ci/job_details/store/utils_spec.js (renamed from spec/frontend/jobs/store/utils_spec.js)2
-rw-r--r--spec/frontend/ci/job_details/utils_spec.js265
-rw-r--r--spec/frontend/ci/jobs_mock_data.js (renamed from spec/frontend/jobs/mock_data.js)1
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js (renamed from spec/frontend/jobs/components/table/cells/actions_cell_spec.js)14
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js (renamed from spec/frontend/jobs/components/table/cells/duration_cell_spec.js)2
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js (renamed from spec/frontend/jobs/components/table/cells/job_cell_spec.js)4
-rw-r--r--spec/frontend/ci/jobs_page/components/job_cells/pipeline_cell_spec.js (renamed from spec/frontend/jobs/components/table/cells/pipeline_cell_spec.js)2
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js (renamed from spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js)2
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_spec.js (renamed from spec/frontend/jobs/components/table/jobs_table_spec.js)19
-rw-r--r--spec/frontend/ci/jobs_page/components/jobs_table_tabs_spec.js (renamed from spec/frontend/jobs/components/table/jobs_table_tabs_spec.js)4
-rw-r--r--spec/frontend/ci/jobs_page/graphql/cache_config_spec.js (renamed from spec/frontend/jobs/components/table/graphql/cache_config_spec.js)4
-rw-r--r--spec/frontend/ci/jobs_page/job_page_app_spec.js (renamed from spec/frontend/jobs/components/table/job_table_app_spec.js)16
-rw-r--r--spec/frontend/ci/merge_requests/components/pipelines_table_wrapper_spec.js117
-rw-r--r--spec/frontend/ci/merge_requests/mock_data.js30
-rw-r--r--spec/frontend/ci/mixins/delayed_job_mixin_spec.js (renamed from spec/frontend/jobs/mixins/delayed_job_mixin_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/dag/components/__snapshots__/dag_graph_spec.js.snap743
-rw-r--r--spec/frontend/ci/pipeline_details/dag/components/dag_annotations_spec.js (renamed from spec/frontend/pipelines/components/dag/dag_annotations_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/dag/components/dag_graph_spec.js (renamed from spec/frontend/pipelines/components/dag/dag_graph_spec.js)12
-rw-r--r--spec/frontend/ci/pipeline_details/dag/dag_spec.js (renamed from spec/frontend/pipelines/components/dag/dag_spec.js)10
-rw-r--r--spec/frontend/ci/pipeline_details/dag/mock_data.js (renamed from spec/frontend/pipelines/components/dag/mock_data.js)0
-rw-r--r--spec/frontend/ci/pipeline_details/dag/utils/drawing_utils_spec.js (renamed from spec/frontend/pipelines/components/dag/drawing_utils_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/__snapshots__/links_inner_spec.js.snap110
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/action_component_spec.js (renamed from spec/frontend/pipelines/graph/action_component_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/graph_component_spec.js (renamed from spec/frontend/pipelines/graph/graph_component_spec.js)18
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/graph_view_selector_spec.js (renamed from spec/frontend/pipelines/graph/graph_view_selector_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_group_dropdown_spec.js (renamed from spec/frontend/pipelines/graph/job_group_dropdown_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js (renamed from spec/frontend/pipelines/graph/job_item_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js (renamed from spec/frontend/pipelines/graph/job_name_component_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js (renamed from spec/frontend/pipelines/graph/linked_pipeline_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_column_spec.js (renamed from spec/frontend/pipelines/graph/linked_pipelines_column_spec.js)16
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_mock_data.js (renamed from spec/frontend/pipelines/graph/linked_pipelines_mock_data.js)0
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/links_inner_spec.js (renamed from spec/frontend/pipelines/graph_shared/links_inner_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/graph/components/stage_column_component_spec.js (renamed from spec/frontend/pipelines/graph/stage_column_component_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_details/graph/graph_component_wrapper_spec.js (renamed from spec/frontend/pipelines/graph/graph_component_wrapper_spec.js)18
-rw-r--r--spec/frontend/ci/pipeline_details/graph/mock_data.js (renamed from spec/frontend/pipelines/graph/mock_data.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js (renamed from spec/frontend/pipelines/pipeline_details_header_spec.js)14
-rw-r--r--spec/frontend/ci/pipeline_details/jobs/components/failed_jobs_table_spec.js (renamed from spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_details/jobs/failed_jobs_app_spec.js (renamed from spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/jobs/jobs_app_spec.js (renamed from spec/frontend/pipelines/components/jobs/jobs_app_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/linked_pipelines_mock.json (renamed from spec/frontend/pipelines/linked_pipelines_mock.json)0
-rw-r--r--spec/frontend/ci/pipeline_details/mock_data.js (renamed from spec/frontend/pipelines/mock_data.js)102
-rw-r--r--spec/frontend/ci/pipeline_details/pipeline_tabs_spec.js (renamed from spec/frontend/pipelines/pipeline_tabs_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_details/pipelines_store_spec.js (renamed from spec/frontend/pipelines/pipelines_store_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/tabs/pipeline_tabs_spec.js (renamed from spec/frontend/pipelines/components/pipeline_tabs_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/empty_state_spec.js (renamed from spec/frontend/pipelines/test_reports/empty_state_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/mock_data.js (renamed from spec/frontend/pipelines/test_reports/mock_data.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/stores/actions_spec.js (renamed from spec/frontend/pipelines/test_reports/stores/actions_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/stores/getters_spec.js (renamed from spec/frontend/pipelines/test_reports/stores/getters_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/stores/mutations_spec.js (renamed from spec/frontend/pipelines/test_reports/stores/mutations_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js (renamed from spec/frontend/pipelines/test_reports/stores/utils_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_case_details_spec.js (renamed from spec/frontend/pipelines/test_reports/test_case_details_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js (renamed from spec/frontend/pipelines/test_reports/test_reports_spec.js)10
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js (renamed from spec/frontend/pipelines/test_reports/test_suite_table_spec.js)10
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_summary_spec.js (renamed from spec/frontend/pipelines/test_reports/test_summary_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_summary_table_spec.js (renamed from spec/frontend/pipelines/test_reports/test_summary_table_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_details/utils/index_spec.js (renamed from spec/frontend/pipelines/pipeline_graph/utils_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_details/utils/parsing_utils_spec.js (renamed from spec/frontend/pipelines/utils_spec.js)14
-rw-r--r--spec/frontend/ci/pipeline_details/utils/unwrapping_utils_spec.js (renamed from spec/frontend/pipelines/unwrapping_utils_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_editor/components/graph/mock_data.js (renamed from spec/frontend/pipelines/pipeline_graph/mock_data.js)0
-rw-r--r--spec/frontend/ci/pipeline_editor/components/graph/pipeline_graph_spec.js (renamed from spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js)10
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_header_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js13
-rw-r--r--spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_editor/mock_data.js2
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/job_item_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js)6
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mini_list_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mock_data.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mock_data.js)0
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/mock_data.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js)102
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/pipeline_mini_graph_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js)8
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/pipeline_stage_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_mini_graph/pipeline_stages_spec.js (renamed from spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js11
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js36
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_legacy_spec.js42
-rw-r--r--spec/frontend/ci/pipeline_schedules/mock_data.js2
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/ci_templates_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/empty_state/ci_templates_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/ios_templates_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/empty_state/ios_templates_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/no_ci_empty_state_spec.js (renamed from spec/frontend/pipelines/empty_state_spec.js)6
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/components/failure_widget/failed_job_details_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js)6
-rw-r--r--spec/frontend/ci/pipelines_page/components/failure_widget/failed_jobs_list_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js)8
-rw-r--r--spec/frontend/ci/pipelines_page/components/failure_widget/mock.js (renamed from spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js)0
-rw-r--r--spec/frontend/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/components/failure_widget/utils_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js)5
-rw-r--r--spec/frontend/ci/pipelines_page/components/nav_controls_spec.js (renamed from spec/frontend/pipelines/nav_controls_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js (renamed from spec/frontend/pipelines/pipeline_labels_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_multi_actions_spec.js (renamed from spec/frontend/pipelines/pipeline_multi_actions_spec.js)86
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js (renamed from spec/frontend/pipelines/pipeline_operations_spec.js)8
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js (renamed from spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_triggerer_spec.js (renamed from spec/frontend/pipelines/pipeline_triggerer_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipeline_url_spec.js (renamed from spec/frontend/pipelines/pipeline_url_spec.js)10
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipelines_artifacts_spec.js (renamed from spec/frontend/pipelines/pipelines_artifacts_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipelines_filtered_search_spec.js (renamed from spec/frontend/pipelines/components/pipelines_filtered_search_spec.js)6
-rw-r--r--spec/frontend/ci/pipelines_page/components/pipelines_manual_actions_spec.js (renamed from spec/frontend/pipelines/pipelines_manual_actions_spec.js)6
-rw-r--r--spec/frontend/ci/pipelines_page/components/time_ago_spec.js (renamed from spec/frontend/pipelines/time_ago_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/pipelines_spec.js (renamed from spec/frontend/pipelines/pipelines_spec.js)15
-rw-r--r--spec/frontend/ci/pipelines_page/tokens/pipeline_branch_name_token_spec.js (renamed from spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/tokens/pipeline_source_token_spec.js (renamed from spec/frontend/pipelines/tokens/pipeline_source_token_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/tokens/pipeline_status_token_spec.js (renamed from spec/frontend/pipelines/tokens/pipeline_status_token_spec.js)2
-rw-r--r--spec/frontend/ci/pipelines_page/tokens/pipeline_tag_name_token_spec.js (renamed from spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js)4
-rw-r--r--spec/frontend/ci/pipelines_page/tokens/pipeline_trigger_author_token_spec.js (renamed from spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js)4
-rw-r--r--spec/frontend/ci/reports/components/__snapshots__/issue_status_icon_spec.js.snap7
-rw-r--r--spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js10
-rw-r--r--spec/frontend/ci/runner/components/runner_create_form_spec.js1
-rw-r--r--spec/frontend/ci/runner/components/runner_form_fields_spec.js3
-rw-r--r--spec/frontend/ci/runner/components/runner_managers_table_spec.js4
-rw-r--r--spec/frontend/ci/runner/components/runner_update_form_spec.js2
-rw-r--r--spec/frontend/ci_secure_files/components/metadata/__snapshots__/modal_spec.js.snap101
-rw-r--r--spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap19
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap103
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap24
-rw-r--r--spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap47
-rw-r--r--spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js8
-rw-r--r--spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js (renamed from spec/frontend/commit/pipelines/pipelines_table_spec.js)6
-rw-r--r--spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap26
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap20
-rw-r--r--spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap54
-rw-r--r--spec/frontend/content_editor/components/wrappers/code_block_spec.js71
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js2
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js18
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_destroyed_spec.js32
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_updated_spec.js31
-rw-r--r--spec/frontend/contribution_events/components/contribution_events_spec.js8
-rw-r--r--spec/frontend/contribution_events/components/target_link_spec.js2
-rw-r--r--spec/frontend/contribution_events/utils.js95
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap20
-rw-r--r--spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap58
-rw-r--r--spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap30
-rw-r--r--spec/frontend/design_management/components/__snapshots__/image_spec.js.snap18
-rw-r--r--spec/frontend/design_management/components/design_notes/__snapshots__/design_note_signed_out_spec.js.snap12
-rw-r--r--spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap36
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap66
-rw-r--r--spec/frontend/design_management/components/list/item_spec.js4
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap19
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap10
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap65
-rw-r--r--spec/frontend/diffs/components/app_spec.js23
-rw-r--r--spec/frontend/diffs/components/diff_inline_findings_item_spec.js51
-rw-r--r--spec/frontend/diffs/components/diff_inline_findings_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js3
-rw-r--r--spec/frontend/diffs/components/inline_findings_spec.js6
-rw-r--r--spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap31
-rw-r--r--spec/frontend/diffs/mock_data/inline_findings.js60
-rw-r--r--spec/frontend/drawio/drawio_editor_spec.js5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml11
-rw-r--r--spec/frontend/emoji/index_spec.js17
-rw-r--r--spec/frontend/environments/edit_environment_spec.js20
-rw-r--r--spec/frontend/environments/environment_form_spec.js47
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js45
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js80
-rw-r--r--spec/frontend/feature_flags/components/strategy_spec.js13
-rw-r--r--spec/frontend/filtered_search/filtered_search_manager_spec.js2
-rw-r--r--spec/frontend/fixtures/abuse_reports.rb28
-rw-r--r--spec/frontend/fixtures/issues.rb2
-rw-r--r--spec/frontend/fixtures/jobs.rb10
-rw-r--r--spec/frontend/fixtures/pipeline_header.rb2
-rw-r--r--spec/frontend/fixtures/pipeline_schedules.rb29
-rw-r--r--spec/frontend/fixtures/pipelines.rb2
-rw-r--r--spec/frontend/fixtures/snippet.rb4
-rw-r--r--spec/frontend/groups/components/empty_states/groups_dashboard_empty_state_spec.js29
-rw-r--r--spec/frontend/groups/components/empty_states/groups_explore_empty_state_spec.js27
-rw-r--r--spec/frontend/ide/components/file_templates/dropdown_spec.js168
-rw-r--r--spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap4
-rw-r--r--spec/frontend/ide/init_gitlab_web_ide_spec.js3
-rw-r--r--spec/frontend/ide/lib/gitlab_web_ide/setup_root_element_spec.js4
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js2
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js2
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap14
-rw-r--r--spec/frontend/integrations/index/mock_data.js6
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js145
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js6
-rw-r--r--spec/frontend/invite_members/mock_data/member_modal.js14
-rw-r--r--spec/frontend/invite_members/utils/member_utils_spec.js22
-rw-r--r--spec/frontend/issuable/components/csv_export_modal_spec.js2
-rw-r--r--spec/frontend/issuable/components/issuable_header_warnings_spec.js105
-rw-r--r--spec/frontend/issuable/components/status_badge_spec.js43
-rw-r--r--spec/frontend/issuable/components/status_box_spec.js50
-rw-r--r--spec/frontend/issuable/popover/components/issue_popover_spec.js6
-rw-r--r--spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js1
-rw-r--r--spec/frontend/issues/dashboard/mock_data.js1
-rw-r--r--spec/frontend/issues/list/components/issue_card_time_info_spec.js125
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js1
-rw-r--r--spec/frontend/issues/list/mock_data.js1
-rw-r--r--spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap10
-rw-r--r--spec/frontend/issues/service_desk/components/empty_state_with_any_issues_spec.js (renamed from spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js)4
-rw-r--r--spec/frontend/issues/service_desk/components/empty_state_without_any_issues_spec.js (renamed from spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js)8
-rw-r--r--spec/frontend/issues/service_desk/components/info_banner_spec.js (renamed from spec/frontend/service_desk/components/info_banner_spec.js)4
-rw-r--r--spec/frontend/issues/service_desk/components/service_desk_list_app_spec.js717
-rw-r--r--spec/frontend/issues/service_desk/mock_data.js (renamed from spec/frontend/service_desk/mock_data.js)17
-rw-r--r--spec/frontend/issues/show/components/app_spec.js154
-rw-r--r--spec/frontend/issues/show/components/sticky_header_spec.js135
-rw-r--r--spec/frontend/issues/show/components/task_list_item_actions_spec.js52
-rw-r--r--spec/frontend/issues/show/issue_spec.js43
-rw-r--r--spec/frontend/issues/show/mock_data/mock_data.js3
-rw-r--r--spec/frontend/issues/show/store_spec.js39
-rw-r--r--spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js146
-rw-r--r--spec/frontend/jira_connect/branches/mock_data.js15
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap13
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap86
-rw-r--r--spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js71
-rw-r--r--spec/frontend/jobs/components/filtered_search/utils_spec.js19
-rw-r--r--spec/frontend/lib/utils/array_utility_spec.js36
-rw-r--r--spec/frontend/lib/utils/breadcrumbs_spec.js84
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js39
-rw-r--r--spec/frontend/lib/utils/datetime_range_spec.js382
-rw-r--r--spec/frontend/lib/utils/secret_detection_spec.js1
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js17
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js10
-rw-r--r--spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap26
-rw-r--r--spec/frontend/merge_request_tabs_spec.js16
-rw-r--r--spec/frontend/merge_requests/components/compare_app_spec.js54
-rw-r--r--spec/frontend/merge_requests/components/header_metadata_spec.js93
-rw-r--r--spec/frontend/nav/components/top_nav_new_dropdown_spec.js3
-rw-r--r--spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap40
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js5
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js1
-rw-r--r--spec/frontend/notes/stores/actions_spec.js174
-rw-r--r--spec/frontend/observability/client_spec.js129
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/app_spec.js19
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js88
-rw-r--r--spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js88
-rw-r--r--spec/frontend/organizations/groups_and_projects/mock_data.js252
-rw-r--r--spec/frontend/organizations/shared/components/groups_view_spec.js146
-rw-r--r--spec/frontend/organizations/shared/components/projects_view_spec.js146
-rw-r--r--spec/frontend/organizations/shared/utils_spec.js (renamed from spec/frontend/organizations/groups_and_projects/utils_spec.js)14
-rw-r--r--spec/frontend/organizations/show/components/app_spec.js49
-rw-r--r--spec/frontend/organizations/show/components/association_count_card_spec.js48
-rw-r--r--spec/frontend/organizations/show/components/association_counts_spec.js61
-rw-r--r--spec/frontend/organizations/show/components/groups_and_projects_spec.js106
-rw-r--r--spec/frontend/organizations/show/components/organization_avatar_spec.js64
-rw-r--r--spec/frontend/organizations/show/utils_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/__snapshots__/tags_loader_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/group_empty_state_spec.js.snap2
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap18
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/app_spec.js71
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/utils_spec.js25
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap11
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap4
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap25
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap42
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap5
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap11
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap67
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap5
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap112
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap47
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js105
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/list_spec.js21
-rw-r--r--spec/frontend/packages_and_registries/package_registry/utils_spec.js52
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap2
-rw-r--r--spec/frontend/packages_and_registries/shared/components/__snapshots__/publish_method_spec.js.snap6
-rw-r--r--spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap24
-rw-r--r--spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js4
-rw-r--r--spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js48
-rw-r--r--spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js9
-rw-r--r--spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js105
-rw-r--r--spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js6
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js92
-rw-r--r--spec/frontend/pipelines/components/dag/__snapshots__/dag_graph_spec.js.snap230
-rw-r--r--spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap30
-rw-r--r--spec/frontend/pipelines/notification/mock_data.js33
-rw-r--r--spec/frontend/profile/preferences/components/__snapshots__/diffs_colors_preview_spec.js.snap210
-rw-r--r--spec/frontend/projects/commit/components/form_modal_spec.js9
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js136
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap3
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap1
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap8
-rw-r--r--spec/frontend/projects/settings/access_dropdown_spec.js204
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js55
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js13
-rw-r--r--spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js21
-rw-r--r--spec/frontend/protected_branches/protected_branch_create_spec.js51
-rw-r--r--spec/frontend/protected_branches/protected_branch_edit_spec.js2
-rw-r--r--spec/frontend/protected_tags/mock_data.js18
-rw-r--r--spec/frontend/protected_tags/protected_tag_edit_spec.js113
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap48
-rw-r--r--spec/frontend/releases/components/__snapshots__/issuable_stats_spec.js.snap67
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js6
-rw-r--r--spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap18
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap44
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js81
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap114
-rw-r--r--spec/frontend/repository/mock_data.js11
-rw-r--r--spec/frontend/search/mock_data.js2
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js117
-rw-r--r--spec/frontend/search/sidebar/components/blobs_filters_spec.js85
-rw-r--r--spec/frontend/search/sidebar/components/commits_filters_spec.js28
-rw-r--r--spec/frontend/search/sidebar/components/issues_filters_spec.js98
-rw-r--r--spec/frontend/search/sidebar/components/merge_requests_filters_spec.js123
-rw-r--r--spec/frontend/search/sidebar/components/notes_filters_spec.js28
-rw-r--r--spec/frontend/search/sidebar/components/projects_filters_spec.js (renamed from spec/frontend/search/sidebar/components/projects_filters_specs.js)2
-rw-r--r--spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js68
-rw-r--r--spec/frontend/search/store/actions_spec.js16
-rw-r--r--spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js124
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js18
-rw-r--r--spec/frontend/security_configuration/utils_spec.js64
-rw-r--r--spec/frontend/sentry/index_spec.js104
-rw-r--r--spec/frontend/sentry/init_sentry_spec.js177
-rw-r--r--spec/frontend/sentry/legacy_index_spec.js6
-rw-r--r--spec/frontend/sentry/sentry_config_spec.js103
-rw-r--r--spec/frontend/service_desk/components/service_desk_list_app_spec.js376
-rw-r--r--spec/frontend/sidebar/components/assignees/assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js86
-rw-r--r--spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js7
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js27
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_footer_spec.js37
-rw-r--r--spec/frontend/sidebar/components/lock/__snapshots__/edit_form_spec.js.snap4
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js6
-rw-r--r--spec/frontend/sidebar/components/todo_toggle/__snapshots__/todo_spec.js.snap4
-rw-r--r--spec/frontend/sidebar/mock_data.js80
-rw-r--r--spec/frontend/silent_mode_settings/components/app_spec.js133
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap3
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap26
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap25
-rw-r--r--spec/frontend/snippets/components/embed_dropdown_spec.js48
-rw-r--r--spec/frontend/super_sidebar/components/context_header_spec.js50
-rw-r--r--spec/frontend/super_sidebar/components/context_switcher_spec.js302
-rw-r--r--spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js39
-rw-r--r--spec/frontend/super_sidebar/components/create_menu_spec.js21
-rw-r--r--spec/frontend/super_sidebar/components/flyout_menu_spec.js16
-rw-r--r--spec/frontend/super_sidebar/components/frequent_items_list_spec.js85
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap27
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js42
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js17
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js4
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js16
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js63
-rw-r--r--spec/frontend/super_sidebar/components/global_search/mock_data.js44
-rw-r--r--spec/frontend/super_sidebar/components/global_search/utils_spec.js88
-rw-r--r--spec/frontend/super_sidebar/components/groups_list_spec.js90
-rw-r--r--spec/frontend/super_sidebar/components/items_list_spec.js63
-rw-r--r--spec/frontend/super_sidebar/components/menu_section_spec.js36
-rw-r--r--spec/frontend/super_sidebar/components/nav_item_spec.js97
-rw-r--r--spec/frontend/super_sidebar/components/pinned_section_spec.js29
-rw-r--r--spec/frontend/super_sidebar/components/projects_list_spec.js85
-rw-r--r--spec/frontend/super_sidebar/components/search_results_spec.js69
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_hover_peek_behavior_spec.js213
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_menu_spec.js69
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js25
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_spec.js111
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js23
-rw-r--r--spec/frontend/super_sidebar/components/user_bar_spec.js20
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_spec.js21
-rw-r--r--spec/frontend/super_sidebar/mock_data.js46
-rw-r--r--spec/frontend/super_sidebar/mocks.js24
-rw-r--r--spec/frontend/super_sidebar/utils_spec.js78
-rw-r--r--spec/frontend/time_tracking/components/timelogs_app_spec.js25
-rw-r--r--spec/frontend/tracing/components/tracing_details_spec.js103
-rw-r--r--spec/frontend/tracing/components/tracing_empty_state_spec.js39
-rw-r--r--spec/frontend/tracing/components/tracing_list_filtered_search_spec.js38
-rw-r--r--spec/frontend/tracing/components/tracing_list_spec.js216
-rw-r--r--spec/frontend/tracing/components/tracing_table_list_spec.js77
-rw-r--r--spec/frontend/tracing/details_index_spec.js42
-rw-r--r--spec/frontend/tracing/filters_spec.js141
-rw-r--r--spec/frontend/tracing/list_index_spec.js37
-rw-r--r--spec/frontend/tracking/dispatch_snowplow_event_spec.js76
-rw-r--r--spec/frontend/tracking/internal_events_spec.js147
-rw-r--r--spec/frontend/tracking/mock_data.js17
-rw-r--r--spec/frontend/tracking/tracking_initialization_spec.js29
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js29
-rw-r--r--spec/frontend/usage_quotas/storage/components/usage_graph_spec.js125
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js2
-rw-r--r--spec/frontend/users_select/test_helper.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/action_buttons_spec.js (renamed from spec/frontend/vue_merge_request_widget/components/action_buttons.js)26
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap18
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap154
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/app_spec.js45
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js72
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js7
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js31
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js16
-rw-r--r--spec/frontend/vue_merge_request_widget/mock_data.js168
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js811
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap32
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/integration_help_text_spec.js.snap6
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap59
-rw-r--r--spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap6
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap29
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js77
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/code_block_highlighted_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/code_block_spec.js56
-rw-r--r--spec/frontend/vue_shared/components/confidentiality_badge_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js62
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js190
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js326
-rw-r--r--spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap12
-rw-r--r--spec/frontend/vue_shared/components/entity_select/utils_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js16
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js69
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/groups_list/mock_data.js6
-rw-r--r--spec/frontend/vue_shared/components/list_actions/list_actions_spec.js135
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap3
-rw-r--r--spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_view_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap13
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap18
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap15
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap14
-rw-r--r--spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap6
-rw-r--r--spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap12
-rw-r--r--spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap15
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap13
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js117
-rw-r--r--spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap328
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js28
-rw-r--r--spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap30
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js2
-rw-r--r--spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js4
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js33
-rw-r--r--spec/frontend/vue_shared/issuable/list/mock_data.js2
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js12
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js4
-rw-r--r--spec/frontend/vue_shared/issuable/show/mock_data.js1
-rw-r--r--spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js20
-rw-r--r--spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js64
-rw-r--r--spec/frontend/webhooks/components/__snapshots__/push_events_spec.js.snap168
-rw-r--r--spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap40
-rw-r--r--spec/frontend/work_items/components/notes/__snapshots__/work_item_note_replying_spec.js.snap12
-rw-r--r--spec/frontend/work_items/components/notes/work_item_activity_sort_filter_spec.js27
-rw-r--r--spec/frontend/work_items/components/notes/work_item_add_note_spec.js14
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_spec.js7
-rw-r--r--spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js14
-rw-r--r--spec/frontend/work_items/components/shared/work_item_links_menu_spec.js8
-rw-r--r--spec/frontend/work_items/components/shared/work_item_token_input_spec.js81
-rw-r--r--spec/frontend/work_items/components/work_item_actions_spec.js21
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js84
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js50
-rw-r--r--spec/frontend/work_items/components/work_item_notes_spec.js12
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap29
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js41
-rw-r--r--spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js93
-rw-r--r--spec/frontend/work_items/components/work_item_state_badge_spec.js5
-rw-r--r--spec/frontend/work_items/list/components/work_items_list_app_spec.js18
-rw-r--r--spec/frontend/work_items/mock_data.js192
-rw-r--r--spec/frontend/work_items/utils_spec.js13
-rw-r--r--spec/graphql/mutations/base_mutation_spec.rb2
-rw-r--r--spec/graphql/mutations/design_management/delete_spec.rb35
-rw-r--r--spec/graphql/mutations/work_items/linked_items/base_spec.rb3
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/blame_resolver_spec.rb81
-rw-r--r--spec/graphql/resolvers/branch_commit_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb117
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb20
-rw-r--r--spec/graphql/resolvers/ci/project_runners_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb30
-rw-r--r--spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/work_items_resolver_spec.rb5
-rw-r--r--spec/graphql/types/base_argument_spec.rb2
-rw-r--r--spec/graphql/types/base_edge_spec.rb2
-rw-r--r--spec/graphql/types/base_enum_spec.rb2
-rw-r--r--spec/graphql/types/base_field_spec.rb2
-rw-r--r--spec/graphql/types/base_object_spec.rb2
-rw-r--r--spec/graphql/types/blame/blame_type_spec.rb16
-rw-r--r--spec/graphql/types/blame/commit_data_type_spec.rb21
-rw-r--r--spec/graphql/types/blame/groups_type_spec.rb19
-rw-r--r--spec/graphql/types/ci/job_base_field_spec.rb143
-rw-r--r--spec/graphql/types/ci/job_kind_enum_spec.rb2
-rw-r--r--spec/graphql/types/ci/job_trace_type_spec.rb184
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb1
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/label_type_spec.rb1
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb2
-rw-r--r--spec/graphql/types/organizations/group_sort_enum_spec.rb26
-rw-r--r--spec/graphql/types/organizations/organization_type_spec.rb11
-rw-r--r--spec/graphql/types/organizations/organization_user_type_spec.rb11
-rw-r--r--spec/graphql/types/permission_types/work_item_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb11
-rw-r--r--spec/graphql/types/query_type_spec.rb10
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb1
-rw-r--r--spec/graphql/types/security/codequality_reports_comparer/degradation_type_spec.rb13
-rw-r--r--spec/graphql/types/security/codequality_reports_comparer/report_type_spec.rb13
-rw-r--r--spec/graphql/types/security/codequality_reports_comparer/status_enum_spec.rb11
-rw-r--r--spec/graphql/types/security/codequality_reports_comparer/summary_type_spec.rb13
-rw-r--r--spec/graphql/types/security/codequality_reports_comparer_type_spec.rb11
-rw-r--r--spec/helpers/admin/abuse_reports_helper_spec.rb8
-rw-r--r--spec/helpers/application_helper_spec.rb100
-rw-r--r--spec/helpers/artifacts_helper_spec.rb3
-rw-r--r--spec/helpers/button_helper_spec.rb98
-rw-r--r--spec/helpers/ci/status_helper_spec.rb23
-rw-r--r--spec/helpers/environment_helper_spec.rb9
-rw-r--r--spec/helpers/environments_helper_spec.rb12
-rw-r--r--spec/helpers/icons_helper_spec.rb11
-rw-r--r--spec/helpers/integrations_helper_spec.rb20
-rw-r--r--spec/helpers/invite_members_helper_spec.rb56
-rw-r--r--spec/helpers/issuables_helper_spec.rb325
-rw-r--r--spec/helpers/issues_helper_spec.rb84
-rw-r--r--spec/helpers/members_helper_spec.rb6
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb2
-rw-r--r--spec/helpers/nav_helper_spec.rb9
-rw-r--r--spec/helpers/organizations/organization_helper_spec.rb65
-rw-r--r--spec/helpers/projects/observability_helper_spec.rb37
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb2
-rw-r--r--spec/helpers/projects_helper_spec.rb16
-rw-r--r--spec/helpers/registrations_helper_spec.rb2
-rw-r--r--spec/helpers/sidebars_helper_spec.rb229
-rw-r--r--spec/helpers/sidekiq_helper_spec.rb2
-rw-r--r--spec/helpers/vite_helper_spec.rb59
-rw-r--r--spec/helpers/webpack_helper_spec.rb18
-rw-r--r--spec/helpers/work_items_helper_spec.rb14
-rw-r--r--spec/initializers/action_cable_subscription_adapter_identifier_spec.rb3
-rw-r--r--spec/initializers/mail_starttls_patch_spec.rb31
-rw-r--r--spec/initializers/sidekiq_spec.rb2
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb34
-rw-r--r--spec/lib/api/entities/merge_request_basic_spec.rb2
-rw-r--r--spec/lib/api/entities/merge_request_diff_spec.rb44
-rw-r--r--spec/lib/api/entities/ml/mlflow/get_run_spec.rb63
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_info_spec.rb2
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_spec.rb20
-rw-r--r--spec/lib/api/entities/ml/mlflow/search_runs_spec.rb37
-rw-r--r--spec/lib/api/entities/project_spec.rb2
-rw-r--r--spec/lib/api/helpers/packages_helpers_spec.rb4
-rw-r--r--spec/lib/api/helpers_spec.rb222
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb24
-rw-r--r--spec/lib/backup/database_model_spec.rb82
-rw-r--r--spec/lib/backup/database_spec.rb92
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb14
-rw-r--r--spec/lib/backup/repositories_spec.rb46
-rw-r--r--spec/lib/banzai/filter/code_language_filter_spec.rb36
-rw-r--r--spec/lib/banzai/filter/inline_diff_filter_spec.rb2
-rw-r--r--spec/lib/bitbucket/representation/pull_request_spec.rb51
-rw-r--r--spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb24
-rw-r--r--spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb6
-rw-r--r--spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb53
-rw-r--r--spec/lib/bulk_imports/file_downloads/validations_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb27
-rw-r--r--spec/lib/bulk_imports/network_error_spec.rb30
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb34
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb125
-rw-r--r--spec/lib/bulk_imports/users_mapper_spec.rb26
-rw-r--r--spec/lib/click_house/bind_index_manager_spec.rb33
-rw-r--r--spec/lib/click_house/query_builder_spec.rb26
-rw-r--r--spec/lib/click_house/record_sync_context_spec.rb32
-rw-r--r--spec/lib/click_house/sync_cursor_spec.rb35
-rw-r--r--spec/lib/constraints/activity_pub_constrainer_spec.rb39
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt2
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb6
-rw-r--r--spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb6
-rw-r--r--spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb5
-rw-r--r--spec/lib/gitlab/auth/o_auth/provider_spec.rb14
-rw-r--r--spec/lib/gitlab/auth/user_access_denied_reason_spec.rb2
-rw-r--r--spec/lib/gitlab/auth_spec.rb24
-rw-r--r--spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb101
-rw-r--r--spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb165
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb68
-rw-r--r--spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb81
-rw-r--r--spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb42
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb22
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb166
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb71
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb49
-rw-r--r--spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb43
-rw-r--r--spec/lib/gitlab/bitbucket_import/user_finder_spec.rb75
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb653
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb206
-rw-r--r--spec/lib/gitlab/ci/build/duration_parser_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/components/instance_path_spec.rb251
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/default_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/external/file/component_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb218
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb65
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb120
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/component_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/templates/MATLAB_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb122
-rw-r--r--spec/lib/gitlab/composer/version_index_spec.rb115
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb56
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb34
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb9
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb8
-rw-r--r--spec/lib/gitlab/database/click_house_client_spec.rb191
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb33
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb51
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb119
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb127
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb44
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb353
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb2
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb7
-rw-r--r--spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb80
-rw-r--r--spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb26
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb30
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb155
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb26
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb91
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb20
-rw-r--r--spec/lib/gitlab/database/tables_truncate_spec.rb278
-rw-r--r--spec/lib/gitlab/database_spec.rb53
-rw-r--r--spec/lib/gitlab/database_warnings_spec.rb96
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb4
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb8
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb45
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb108
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb28
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb47
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb82
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb45
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb48
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb54
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing_spec.rb35
-rw-r--r--spec/lib/gitlab/email/service_desk/custom_email_spec.rb37
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb16
-rw-r--r--spec/lib/gitlab/etag_caching/router/rails_spec.rb14
-rw-r--r--spec/lib/gitlab/etag_caching/store_spec.rb2
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb20
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb10
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb25
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb31
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb105
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb110
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb13
-rw-r--r--spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb23
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb51
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/merged_by_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/markdown/attachment_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb26
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb269
-rw-r--r--spec/lib/gitlab/github_import_spec.rb4
-rw-r--r--spec/lib/gitlab/gl_repository/identifier_spec.rb6
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb24
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb11
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb88
-rw-r--r--spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb2
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb13
-rw-r--r--spec/lib/gitlab/http_spec.rb9
-rw-r--r--spec/lib/gitlab/import/errors_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml8
-rw-r--r--spec/lib/gitlab/import_export/attributes_permitter_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb31
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb14
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb11
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb3
-rw-r--r--spec/lib/gitlab/import_export/project/export_task_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb54
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb1
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb40
-rw-r--r--spec/lib/gitlab/manifest_import/metadata_spec.rb18
-rw-r--r--spec/lib/gitlab/metrics/dashboard/cache_spec.rb88
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb30
-rw-r--r--spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb54
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb101
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb106
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb78
-rw-r--r--spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/observability_spec.rb29
-rw-r--r--spec/lib/gitlab/other_markup_spec.rb41
-rw-r--r--spec/lib/gitlab/pages/cache_control_spec.rb88
-rw-r--r--spec/lib/gitlab/pages/virtual_host_finder_spec.rb58
-rw-r--r--spec/lib/gitlab/pages_spec.rb87
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb102
-rw-r--r--spec/lib/gitlab/patch/redis_cache_store_spec.rb66
-rw-r--r--spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb89
-rw-r--r--spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb248
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb23
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb45
-rw-r--r--spec/lib/gitlab/rack_attack/request_spec.rb33
-rw-r--r--spec/lib/gitlab/redis/chat_spec.rb2
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb100
-rw-r--r--spec/lib/gitlab/redis/pubsub_spec.rb8
-rw-r--r--spec/lib/gitlab/redis/queues_metadata_spec.rb43
-rw-r--r--spec/lib/gitlab/redis/workhorse_spec.rb (renamed from spec/lib/gitlab/redis/etag_cache_spec.rb)34
-rw-r--r--spec/lib/gitlab/regex_spec.rb25
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb14
-rw-r--r--spec/lib/gitlab/search_results_spec.rb54
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb10
-rw-r--r--spec/lib/gitlab/setup_helper/workhorse_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb65
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb2
-rw-r--r--spec/lib/gitlab/sql/cte_spec.rb3
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb46
-rw-r--r--spec/lib/gitlab/time_tracking_formatter_spec.rb8
-rw-r--r--spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/service_ping_context_spec.rb24
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb3
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb3
-rw-r--r--spec/lib/gitlab/url_sanitizer_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb60
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb12
-rw-r--r--spec/lib/gitlab/usage/metrics/query_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/time_series_storable_spec.rb40
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb186
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb8
-rw-r--r--spec/lib/gitlab/user_access_snippet_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/markdown_spec.rb35
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb145
-rw-r--r--spec/lib/gitlab/x509/certificate_spec.rb2
-rw-r--r--spec/lib/gitlab/x509/commit_sigstore_spec.rb53
-rw-r--r--spec/lib/gitlab/x509/commit_spec.rb6
-rw-r--r--spec/lib/gitlab/x509/signature_sigstore_spec.rb453
-rw-r--r--spec/lib/gitlab/x509/signature_spec.rb2
-rw-r--r--spec/lib/gitlab/x509/tag_sigstore_spec.rb45
-rw-r--r--spec/lib/gitlab/x509/tag_spec.rb27
-rw-r--r--spec/lib/peek/views/click_house_spec.rb13
-rw-r--r--spec/lib/sidebars/admin/panel_spec.rb8
-rw-r--r--spec/lib/sidebars/concerns/has_avatar_spec.rb29
-rw-r--r--spec/lib/sidebars/explore/panel_spec.rb17
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb39
-rw-r--r--spec/lib/sidebars/groups/menus/scope_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_panel_spec.rb8
-rw-r--r--spec/lib/sidebars/menu_item_spec.rb9
-rw-r--r--spec/lib/sidebars/menu_spec.rb12
-rw-r--r--spec/lib/sidebars/organizations/menus/scope_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/organizations/panel_spec.rb1
-rw-r--r--spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb7
-rw-r--r--spec/lib/sidebars/panel_spec.rb18
-rw-r--r--spec/lib/sidebars/projects/menus/issues_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb14
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb25
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_panel_spec.rb8
-rw-r--r--spec/lib/sidebars/search/panel_spec.rb7
-rw-r--r--spec/lib/sidebars/static_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/user_profile/panel_spec.rb7
-rw-r--r--spec/lib/sidebars/user_settings/panel_spec.rb3
-rw-r--r--spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb42
-rw-r--r--spec/lib/sidebars/your_work/panel_spec.rb3
-rw-r--r--spec/lib/system_check/app/table_truncate_check_spec.rb75
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb251
-rw-r--r--spec/lib/users/internal_spec.rb97
-rw-r--r--spec/mailers/emails/in_product_marketing_spec.rb69
-rw-r--r--spec/mailers/emails/profile_spec.rb73
-rw-r--r--spec/mailers/emails/service_desk_spec.rb90
-rw-r--r--spec/mailers/notify_spec.rb72
-rw-r--r--spec/migrations/20230125093723_rebalance_partition_id_ci_pipeline_spec.rb58
-rw-r--r--spec/migrations/20230125093840_rebalance_partition_id_ci_build_spec.rb58
-rw-r--r--spec/migrations/20230208100917_fix_partition_ids_for_ci_pipeline_variable_spec.rb58
-rw-r--r--spec/migrations/20230208103009_fix_partition_ids_for_ci_job_artifact_spec.rb58
-rw-r--r--spec/migrations/20230208132608_fix_partition_ids_for_ci_stage_spec.rb58
-rw-r--r--spec/migrations/20230209090702_fix_partition_ids_for_ci_build_report_result_spec.rb60
-rw-r--r--spec/migrations/20230209092204_fix_partition_ids_for_ci_build_trace_metadata_spec.rb60
-rw-r--r--spec/migrations/20230209140102_fix_partition_ids_for_ci_build_metadata_spec.rb60
-rw-r--r--spec/migrations/20230214122717_fix_partition_ids_for_ci_job_variables_spec.rb51
-rw-r--r--spec/migrations/20230214154101_fix_partition_ids_on_ci_sources_pipelines_spec.rb45
-rw-r--r--spec/migrations/20230726142555_ensure_notes_bigint_backfill_is_finished_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/20230726144458_swap_notes_id_to_bigint_for_self_managed_spec.rb120
-rw-r--r--spec/migrations/20230802212443_add_current_user_todos_widget_to_epic_work_item_type_spec.rb25
-rw-r--r--spec/migrations/20230809170822_ensure_system_note_metadata_bigint_backfill_is_finished_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/20230809174702_swap_system_note_metadata_note_id_to_bigint_for_self_managed_spec.rb121
-rw-r--r--spec/migrations/20230809203254_ensure_issue_user_mentions_bigint_backfill_is_finished_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/20230809210550_swap_issue_user_mentions_note_id_to_bigint_for_self_managed_spec.rb127
-rw-r--r--spec/migrations/20230810113227_swap_note_diff_files_note_id_to_bigint_for_self_hosts_spec.rb156
-rw-r--r--spec/migrations/20230810124545_schedule_fixing_namespace_ids_of_vulnerability_reads_spec.rb27
-rw-r--r--spec/migrations/20230811103457_queue_backfill_nuget_normalized_version_spec.rb26
-rw-r--r--spec/migrations/20230815140656_queue_populate_denormalized_columns_for_sbom_occurrences_spec.rb26
-rw-r--r--spec/migrations/20230815160428_rename_plans_titles_with_legacy_plan_names_spec.rb23
-rw-r--r--spec/migrations/20230816152540_ensure_dum_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb36
-rw-r--r--spec/migrations/20230816152639_swap_design_user_mentions_note_id_to_big_int_for_self_managed_spec.rb122
-rw-r--r--spec/migrations/20230817111938_swap_events_target_id_to_bigint_for_self_hosts_spec.rb121
-rw-r--r--spec/migrations/20230817143637_swap_award_emoji_note_id_to_bigint_for_self_hosts_spec.rb121
-rw-r--r--spec/migrations/20230818083610_queue_backfill_users_with_defaults_spec.rb27
-rw-r--r--spec/migrations/20230818085219_queue_backfill_user_preferences_with_defaults_spec.rb27
-rw-r--r--spec/migrations/20230818142801_queue_create_compliance_standards_adherence_spec.rb50
-rw-r--r--spec/migrations/20230821081603_queue_convert_credit_card_validation_data_to_hashes_spec.rb26
-rw-r--r--spec/migrations/20230822104028_delete_project_callout_three_spec.rb21
-rw-r--r--spec/migrations/20230822151454_remove_free_user_cap_email_workers_spec.rb24
-rw-r--r--spec/migrations/20230823090001_queue_backfill_project_statistics_storage_size_with_recent_size_spec.rb26
-rw-r--r--spec/migrations/20230823140934_add_linked_items_widget_to_ticket_work_item_type_spec.rb29
-rw-r--r--spec/migrations/20230830121830_queue_update_users_set_external_if_service_account_spec.rb26
-rw-r--r--spec/migrations/20230831084632_queue_sync_scan_result_policies_spec.rb26
-rw-r--r--spec/migrations/20230906204934_restart_self_hosted_sent_notifications_bigint_conversion_spec.rb144
-rw-r--r--spec/migrations/20230906204935_restart_self_hosted_sent_notifications_backfill_spec.rb162
-rw-r--r--spec/migrations/20230907155247_queue_backfill_has_merge_request_of_vulnerability_reads_spec.rb26
-rw-r--r--spec/migrations/backfill_alert_management_prometheus_integrations_spec.rb126
-rw-r--r--spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb35
-rw-r--r--spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_for_self_managed_spec.rb135
-rw-r--r--spec/models/ability_spec.rb5
-rw-r--r--spec/models/abuse_report_spec.rb27
-rw-r--r--spec/models/active_session_spec.rb18
-rw-r--r--spec/models/alert_management/http_integration_spec.rb2
-rw-r--r--spec/models/alerting/project_alerting_setting_spec.rb29
-rw-r--r--spec/models/analytics/cycle_analytics/runtime_limiter_spec.rb55
-rw-r--r--spec/models/application_setting_spec.rb12
-rw-r--r--spec/models/award_emoji_spec.rb33
-rw-r--r--spec/models/bulk_imports/entity_spec.rb8
-rw-r--r--spec/models/ci/build_spec.rb179
-rw-r--r--spec/models/ci/catalog/listing_spec.rb12
-rw-r--r--spec/models/ci/catalog/resource_spec.rb6
-rw-r--r--spec/models/ci/catalog/resources/component_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb7
-rw-r--r--spec/models/clusters/agent_token_spec.rb39
-rw-r--r--spec/models/commit_status_spec.rb40
-rw-r--r--spec/models/concerns/as_cte_spec.rb2
-rw-r--r--spec/models/concerns/each_batch_spec.rb20
-rw-r--r--spec/models/concerns/expirable_spec.rb7
-rw-r--r--spec/models/concerns/has_user_type_spec.rb80
-rw-r--r--spec/models/concerns/issuable_spec.rb31
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb22
-rw-r--r--spec/models/concerns/require_email_verification_spec.rb2
-rw-r--r--spec/models/concerns/resolvable_discussion_spec.rb8
-rw-r--r--spec/models/concerns/routable_spec.rb71
-rw-r--r--spec/models/concerns/transitionable_spec.rb40
-rw-r--r--spec/models/deploy_key_spec.rb2
-rw-r--r--spec/models/design_management/design_spec.rb10
-rw-r--r--spec/models/doorkeeper/application_spec.rb11
-rw-r--r--spec/models/environment_status_spec.rb12
-rw-r--r--spec/models/group_spec.rb21
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb38
-rw-r--r--spec/models/integration_spec.rb6
-rw-r--r--spec/models/integrations/base_chat_notification_spec.rb21
-rw-r--r--spec/models/integrations/chat_message/deployment_message_spec.rb36
-rw-r--r--spec/models/integrations/confluence_spec.rb6
-rw-r--r--spec/models/integrations/mattermost_spec.rb2
-rw-r--r--spec/models/integrations/prometheus_spec.rb28
-rw-r--r--spec/models/integrations/shimo_spec.rb8
-rw-r--r--spec/models/integrations/slack_spec.rb2
-rw-r--r--spec/models/integrations/zentao_spec.rb8
-rw-r--r--spec/models/issue_spec.rb7
-rw-r--r--spec/models/loose_foreign_keys/modification_tracker_spec.rb14
-rw-r--r--spec/models/loose_foreign_keys/turbo_modification_tracker_spec.rb23
-rw-r--r--spec/models/member_spec.rb7
-rw-r--r--spec/models/members/group_member_spec.rb16
-rw-r--r--spec/models/merge_request_spec.rb160
-rw-r--r--spec/models/metrics/dashboard/annotation_spec.rb73
-rw-r--r--spec/models/metrics/users_starred_dashboard_spec.rb39
-rw-r--r--spec/models/ml/model_version_spec.rb10
-rw-r--r--spec/models/namespace_spec.rb326
-rw-r--r--spec/models/note_spec.rb111
-rw-r--r--spec/models/notification_setting_spec.rb7
-rw-r--r--spec/models/oauth_access_token_spec.rb6
-rw-r--r--spec/models/organizations/organization_spec.rb1
-rw-r--r--spec/models/packages/dependency_link_spec.rb68
-rw-r--r--spec/models/packages/ml_model/package_spec.rb67
-rw-r--r--spec/models/packages/nuget/metadatum_spec.rb23
-rw-r--r--spec/models/packages/nuget/symbol_spec.rb70
-rw-r--r--spec/models/packages/package_spec.rb43
-rw-r--r--spec/models/packages/protection/rule_spec.rb40
-rw-r--r--spec/models/pages/virtual_domain_spec.rb53
-rw-r--r--spec/models/pages_deployment_spec.rb51
-rw-r--r--spec/models/pages_domain_spec.rb11
-rw-r--r--spec/models/performance_monitoring/prometheus_metric_spec.rb67
-rw-r--r--spec/models/performance_monitoring/prometheus_panel_group_spec.rb62
-rw-r--r--spec/models/performance_monitoring/prometheus_panel_spec.rb85
-rw-r--r--spec/models/plan_spec.rb12
-rw-r--r--spec/models/pool_repository_spec.rb46
-rw-r--r--spec/models/project_authorization_spec.rb37
-rw-r--r--spec/models/project_authorizations/changes_spec.rb10
-rw-r--r--spec/models/project_ci_cd_setting_spec.rb2
-rw-r--r--spec/models/project_feature_spec.rb20
-rw-r--r--spec/models/project_import_state_spec.rb8
-rw-r--r--spec/models/project_metrics_setting_spec.rb63
-rw-r--r--spec/models/project_spec.rb113
-rw-r--r--spec/models/repository_spec.rb31
-rw-r--r--spec/models/resource_label_event_spec.rb17
-rw-r--r--spec/models/resource_state_event_spec.rb16
-rw-r--r--spec/models/review_spec.rb19
-rw-r--r--spec/models/route_spec.rb10
-rw-r--r--spec/models/snippet_repository_spec.rb2
-rw-r--r--spec/models/user_custom_attribute_spec.rb29
-rw-r--r--spec/models/user_preference_spec.rb14
-rw-r--r--spec/models/user_spec.rb125
-rw-r--r--spec/models/users/credit_card_validation_spec.rb143
-rw-r--r--spec/models/users/group_visit_spec.rb25
-rw-r--r--spec/models/users/project_visit_spec.rb25
-rw-r--r--spec/models/work_item_spec.rb81
-rw-r--r--spec/models/work_items/related_work_item_link_spec.rb40
-rw-r--r--spec/models/work_items/widgets/description_spec.rb2
-rw-r--r--spec/models/work_items/widgets/linked_items_spec.rb4
-rw-r--r--spec/models/x509_certificate_spec.rb1
-rw-r--r--spec/models/x509_issuer_spec.rb2
-rw-r--r--spec/policies/ci/bridge_policy_spec.rb34
-rw-r--r--spec/policies/ci/pipeline_policy_spec.rb25
-rw-r--r--spec/policies/global_policy_spec.rb16
-rw-r--r--spec/policies/group_policy_spec.rb22
-rw-r--r--spec/policies/issue_policy_spec.rb4
-rw-r--r--spec/policies/organizations/organization_policy_spec.rb14
-rw-r--r--spec/policies/packages/policies/project_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb4
-rw-r--r--spec/presenters/blob_presenter_spec.rb12
-rw-r--r--spec/presenters/event_presenter_spec.rb100
-rw-r--r--spec/presenters/gitlab/blame_presenter_spec.rb29
-rw-r--r--spec/presenters/issue_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/composer/packages_presenter_spec.rb2
-rw-r--r--spec/presenters/projects/security/configuration_presenter_spec.rb3
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb2
-rw-r--r--spec/rake_helper.rb2
-rw-r--r--spec/requests/admin/abuse_reports_controller_spec.rb79
-rw-r--r--spec/requests/admin/users_controller_spec.rb23
-rw-r--r--spec/requests/api/bulk_imports_spec.rb24
-rw-r--r--spec/requests/api/ci/jobs_spec.rb8
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb61
-rw-r--r--spec/requests/api/commit_statuses_spec.rb30
-rw-r--r--spec/requests/api/commits_spec.rb2
-rw-r--r--spec/requests/api/discussions_spec.rb11
-rw-r--r--spec/requests/api/feature_flags_spec.rb162
-rw-r--r--spec/requests/api/features_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb103
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb166
-rw-r--r--spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb20
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb10
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb5
-rw-r--r--spec/requests/api/graphql/group/work_item_spec.rb71
-rw-r--r--spec/requests/api/graphql/group/work_items_spec.rb32
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/groups_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/jobs_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/merge_requests/codequality_reports_comparer_spec.rb185
-rw-r--r--spec/requests/api/graphql/mutations/admin/abuse_report_labels/create_spec.rb55
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb25
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule/delete_spec.rb23
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule/play_spec.rb23
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule/update_spec.rb25
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb23
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/update_spec.rb32
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb22
-rw-r--r--spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb9
-rw-r--r--spec/requests/api/graphql/mutations/work_items/linked_items/remove_spec.rb120
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb14
-rw-r--r--spec/requests/api/graphql/organizations/organization_query_spec.rb178
-rw-r--r--spec/requests/api/graphql/packages/package_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb7
-rw-r--r--spec/requests/api/graphql/project/runners_spec.rb14
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb38
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb18
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb44
-rw-r--r--spec/requests/api/groups_spec.rb36
-rw-r--r--spec/requests/api/internal/base_spec.rb15
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb162
-rw-r--r--spec/requests/api/merge_requests_spec.rb28
-rw-r--r--spec/requests/api/metadata_spec.rb18
-rw-r--r--spec/requests/api/metrics/dashboard/annotations_spec.rb3
-rw-r--r--spec/requests/api/metrics/user_starred_dashboards_spec.rb5
-rw-r--r--spec/requests/api/ml/mlflow/experiments_spec.rb4
-rw-r--r--spec/requests/api/ml/mlflow/runs_spec.rb138
-rw-r--r--spec/requests/api/npm_group_packages_spec.rb43
-rw-r--r--spec/requests/api/npm_instance_packages_spec.rb29
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb68
-rw-r--r--spec/requests/api/project_attributes.yml3
-rw-r--r--spec/requests/api/project_import_spec.rb10
-rw-r--r--spec/requests/api/project_packages_spec.rb6
-rw-r--r--spec/requests/api/projects_spec.rb8
-rw-r--r--spec/requests/api/search_spec.rb45
-rw-r--r--spec/requests/api/settings_spec.rb6
-rw-r--r--spec/requests/api/usage_data_queries_spec.rb6
-rw-r--r--spec/requests/api/users_spec.rb47
-rw-r--r--spec/requests/clusters/agents/dashboard_controller_spec.rb76
-rw-r--r--spec/requests/content_security_policy_spec.rb29
-rw-r--r--spec/requests/groups/email_campaigns_controller_spec.rb127
-rw-r--r--spec/requests/groups/settings/access_tokens_controller_spec.rb22
-rw-r--r--spec/requests/groups/work_items_controller_spec.rb44
-rw-r--r--spec/requests/openid_connect_spec.rb2
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb70
-rw-r--r--spec/requests/projects/noteable_notes_spec.rb78
-rw-r--r--spec/requests/projects/settings/access_tokens_controller_spec.rb22
-rw-r--r--spec/requests/projects/tracing_controller_spec.rb104
-rw-r--r--spec/requests/rack_attack_global_spec.rb114
-rw-r--r--spec/requests/search_controller_spec.rb1
-rw-r--r--spec/requests/sessions_spec.rb42
-rw-r--r--spec/requests/users/namespace_visits_controller_spec.rb72
-rw-r--r--spec/requests/verifies_with_email_spec.rb6
-rw-r--r--spec/routing/organizations/organizations_controller_routing_spec.rb10
-rw-r--r--spec/rubocop/cop/capybara/testid_finders_spec.rb50
-rw-r--r--spec/rubocop/cop/lint/last_keyword_argument_spec.rb168
-rw-r--r--spec/rubocop/cop/migration/versioned_migration_class_spec.rb9
-rw-r--r--spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb2
-rw-r--r--spec/scripts/trigger-build_spec.rb12
-rw-r--r--spec/serializers/activity_pub/activity_streams_serializer_spec.rb157
-rw-r--r--spec/serializers/activity_pub/project_entity_spec.rb32
-rw-r--r--spec/serializers/activity_pub/release_entity_spec.rb48
-rw-r--r--spec/serializers/activity_pub/releases_actor_entity_spec.rb39
-rw-r--r--spec/serializers/activity_pub/releases_actor_serializer_spec.rb16
-rw-r--r--spec/serializers/activity_pub/releases_outbox_serializer_spec.rb34
-rw-r--r--spec/serializers/activity_pub/user_entity_spec.rb28
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb113
-rw-r--r--spec/serializers/admin/abuse_report_details_serializer_spec.rb5
-rw-r--r--spec/serializers/admin/abuse_report_entity_spec.rb15
-rw-r--r--spec/serializers/admin/reported_content_entity_spec.rb50
-rw-r--r--spec/serializers/build_details_entity_spec.rb16
-rw-r--r--spec/serializers/ci/job_annotation_entity_spec.rb30
-rw-r--r--spec/serializers/codequality_degradation_entity_spec.rb17
-rw-r--r--spec/serializers/codequality_reports_comparer_serializer_spec.rb4
-rw-r--r--spec/serializers/deployment_entity_spec.rb89
-rw-r--r--spec/serializers/import/github_realtime_repo_entity_spec.rb4
-rw-r--r--spec/serializers/import/github_realtime_repo_serializer_spec.rb2
-rw-r--r--spec/serializers/profile/event_entity_spec.rb11
-rw-r--r--spec/services/admin/abuse_report_labels/create_service_spec.rb51
-rw-r--r--spec/services/admin/abuse_reports/moderate_user_service_spec.rb17
-rw-r--r--spec/services/admin/abuse_reports/update_service_spec.rb85
-rw-r--r--spec/services/application_settings/update_service_spec.rb8
-rw-r--r--spec/services/auto_merge/base_service_spec.rb5
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb176
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb22
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb14
-rw-r--r--spec/services/ci/components/fetch_service_spec.rb37
-rw-r--r--spec/services/ci/create_commit_status_service_spec.rb461
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb17
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb22
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb69
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb32
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb40
-rw-r--r--spec/services/ci/create_pipeline_service/variables_spec.rb33
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb174
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb14
-rw-r--r--spec/services/ci/register_job_service_spec.rb15
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb4
-rw-r--r--spec/services/concerns/rate_limited_service_spec.rb2
-rw-r--r--spec/services/concerns/services/return_service_responses_spec.rb32
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb21
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb8
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb27
-rw-r--r--spec/services/discussions/resolve_service_spec.rb8
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb9
-rw-r--r--spec/services/environments/stop_service_spec.rb3
-rw-r--r--spec/services/environments/stop_stale_service_spec.rb6
-rw-r--r--spec/services/files/delete_service_spec.rb7
-rw-r--r--spec/services/files/update_service_spec.rb6
-rw-r--r--spec/services/git/branch_push_service_spec.rb24
-rw-r--r--spec/services/google_cloud/create_cloudsql_instance_service_spec.rb30
-rw-r--r--spec/services/google_cloud/fetch_google_ip_list_service_spec.rb2
-rw-r--r--spec/services/google_cloud/generate_pipeline_service_spec.rb48
-rw-r--r--spec/services/google_cloud/get_cloudsql_instances_service_spec.rb38
-rw-r--r--spec/services/gpg_keys/destroy_service_spec.rb22
-rw-r--r--spec/services/groups/destroy_service_spec.rb11
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb3
-rw-r--r--spec/services/groups/update_service_spec.rb72
-rw-r--r--spec/services/import_export_clean_up_service_spec.rb16
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb2
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb4
-rw-r--r--spec/services/issuable/process_assignees_spec.rb80
-rw-r--r--spec/services/issue_links/destroy_service_spec.rb9
-rw-r--r--spec/services/issue_links/list_service_spec.rb55
-rw-r--r--spec/services/issues/close_service_spec.rb4
-rw-r--r--spec/services/issues/create_service_spec.rb8
-rw-r--r--spec/services/issues/export_csv_service_spec.rb32
-rw-r--r--spec/services/issues/move_service_spec.rb3
-rw-r--r--spec/services/issues/resolve_discussions_spec.rb26
-rw-r--r--spec/services/issues/update_service_spec.rb13
-rw-r--r--spec/services/labels/available_labels_service_spec.rb24
-rw-r--r--spec/services/labels/update_service_spec.rb8
-rw-r--r--spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb20
-rw-r--r--spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb4
-rw-r--r--spec/services/members/invitation_reminder_email_service_spec.rb2
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb45
-rw-r--r--spec/services/merge_requests/base_service_spec.rb28
-rw-r--r--spec/services/merge_requests/create_ref_service_spec.rb183
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb144
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb895
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb7
-rw-r--r--spec/services/merge_requests/update_service_spec.rb52
-rw-r--r--spec/services/metrics/global_metrics_update_service_spec.rb14
-rw-r--r--spec/services/metrics/sample_metrics_service_spec.rb45
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb216
-rw-r--r--spec/services/note_summary_spec.rb9
-rw-r--r--spec/services/notes/create_service_spec.rb5
-rw-r--r--spec/services/notes/destroy_service_spec.rb7
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb39
-rw-r--r--spec/services/notes/update_service_spec.rb12
-rw-r--r--spec/services/notification_service_spec.rb134
-rw-r--r--spec/services/packages/ml_model/create_package_file_service_spec.rb4
-rw-r--r--spec/services/packages/npm/generate_metadata_service_spec.rb11
-rw-r--r--spec/services/packages/nuget/check_duplicates_service_spec.rb155
-rw-r--r--spec/services/packages/nuget/extract_metadata_file_service_spec.rb14
-rw-r--r--spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb126
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb4
-rw-r--r--spec/services/packages/nuget/odata_package_entry_service_spec.rb69
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb24
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb3
-rw-r--r--spec/services/preview_markdown_service_spec.rb21
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb19
-rw-r--r--spec/services/projects/create_service_spec.rb26
-rw-r--r--spec/services/projects/import_service_spec.rb102
-rw-r--r--spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb54
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb4
-rw-r--r--spec/services/projects/update_pages_service_spec.rb128
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb47
-rw-r--r--spec/services/projects/update_service_spec.rb74
-rw-r--r--spec/services/protected_branches/api_service_spec.rb20
-rw-r--r--spec/services/push_event_payload_service_spec.rb4
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb33
-rw-r--r--spec/services/releases/create_service_spec.rb2
-rw-r--r--spec/services/releases/destroy_service_spec.rb28
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb3
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb12
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb3
-rw-r--r--spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb2
-rw-r--r--spec/services/security/merge_reports_service_spec.rb105
-rw-r--r--spec/services/service_desk/custom_email_verifications/create_service_spec.rb23
-rw-r--r--spec/services/service_desk/custom_email_verifications/update_service_spec.rb34
-rw-r--r--spec/services/service_desk/custom_emails/create_service_spec.rb13
-rw-r--r--spec/services/service_desk/custom_emails/destroy_service_spec.rb7
-rw-r--r--spec/services/service_desk_settings/update_service_spec.rb25
-rw-r--r--spec/services/spam/spam_action_service_spec.rb11
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb4
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb13
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb21
-rw-r--r--spec/services/users/authorized_build_service_spec.rb8
-rw-r--r--spec/services/users/build_service_spec.rb51
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb10
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb11
-rw-r--r--spec/services/work_items/related_work_item_links/destroy_service_spec.rb82
-rw-r--r--spec/services/work_items/update_service_spec.rb5
-rw-r--r--spec/spec_helper.rb48
-rw-r--r--spec/support/before_all_adapter.rb11
-rw-r--r--spec/support/capybara.rb6
-rw-r--r--spec/support/capybara_wait_for_all_requests.rb20
-rw-r--r--spec/support/database/auto_explain.rb15
-rw-r--r--spec/support/database/click_house/hooks.rb8
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb48
-rw-r--r--spec/support/database_cleaner.rb10
-rw-r--r--spec/support/db_cleaner.rb3
-rw-r--r--spec/support/factory_bot.rb14
-rw-r--r--spec/support/finder_collection_allowlist.yml6
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml7
-rw-r--r--spec/support/helpers/database/duplicate_indexes.rb77
-rw-r--r--spec/support/helpers/database/duplicate_indexes.yml265
-rw-r--r--spec/support/helpers/features/admin_users_helpers.rb2
-rw-r--r--spec/support/helpers/features/highlight_content_helper.rb19
-rw-r--r--spec/support/helpers/features/runners_helpers.rb10
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb2
-rw-r--r--spec/support/helpers/loose_foreign_keys_helper.rb11
-rw-r--r--spec/support/helpers/sign_up_helpers.rb27
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb14
-rw-r--r--spec/support/helpers/x509_helpers.rb181
-rw-r--r--spec/support/matchers/pagination_matcher.rb10
-rw-r--r--spec/support/migration.rb14
-rw-r--r--spec/support/multiple_databases.rb2
-rw-r--r--spec/support/protected_branch_helpers.rb2
-rw-r--r--spec/support/rspec.rb10
-rw-r--r--spec/support/rspec_order_todo.yml17
-rw-r--r--spec/support/shared_contexts/dependency_proxy_shared_context.rb14
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/features/integrations/group_integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/finders/users_finder_shared_contexts.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/database/load_balancing/wal_tracking_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb20
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb2
-rw-r--r--spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/ci/create_pipeline_service_environment_shared_examples.rb166
-rw-r--r--spec/support/shared_examples/ci/deployable_shared_examples.rb56
-rw-r--r--spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/controllers/concerns/web_hooks/integrations_hook_log_actions_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/controllers/labels_controller_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/controllers/search_rate_limit_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb2
-rw-r--r--spec/support/shared_examples/features/2fa_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb4
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/snippets_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/variable_list_pagination_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/harbor/tags_controller_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb100
-rw-r--r--spec/support/shared_examples/lib/gitlab/bitbucket_import/stage_methods_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb37
-rw-r--r--spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb109
-rw-r--r--spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb87
-rw-r--r--spec/support/shared_examples/lib/menus_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/lib/sidebars/user_profile/user_profile_menus_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb6
-rw-r--r--spec/support/shared_examples/mailers/notify_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb107
-rw-r--r--spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/models/group_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/members_notifications_shared_example.rb8
-rw-r--r--spec/support/shared_examples/models/users/pages_visits_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb84
-rw-r--r--spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/requests/api/graphql/work_item_list_shared_examples.rb98
-rw-r--r--spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/requests/api_keyset_pagination_shared_examples.rb50
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/services/incident_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb71
-rw-r--r--spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb193
-rw-r--r--spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/pages_size_limit_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/services/protected_branches_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/services/users/build_service_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/users/migrate_records_to_ghost_user_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/users/pages_visits_shared_examples.rb63
-rw-r--r--spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb21
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb22
-rw-r--r--spec/support/sidekiq.rb2
-rw-r--r--spec/support_specs/capybara_wait_for_all_requests_spec.rb16
-rw-r--r--spec/support_specs/database/duplicate_indexes_spec.rb108
-rw-r--r--spec/support_specs/database/multiple_databases_helpers_spec.rb2
-rw-r--r--spec/support_specs/helpers/redis_commands/recorder_spec.rb6
-rw-r--r--spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb12
-rw-r--r--spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb8
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb24
-rw-r--r--spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/container_registry_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/validate_config_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb32
-rw-r--r--spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb34
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/lfs/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/packages/migrate_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/password_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb6
-rw-r--r--spec/tasks/gitlab/snippets_rake_spec.rb6
-rw-r--r--spec/tasks/gitlab/terraform/migrate_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb8
-rw-r--r--spec/tasks/gitlab/workhorse_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/x509/update_rake_spec.rb2
-rw-r--r--spec/tasks/migrate/schema_check_rake_spec.rb2
-rw-r--r--spec/tooling/danger/clickhouse_spec.rb70
-rw-r--r--spec/tooling/danger/ignored_model_columns_spec.rb145
-rw-r--r--spec/tooling/fixtures/cleanup_conversion_migration.txt44
-rw-r--r--spec/tooling/fixtures/remove_column_migration.txt84
-rw-r--r--spec/tooling/fixtures/rename_column_migration.txt45
-rw-r--r--spec/uploaders/packages/nuget/symbol_uploader_spec.rb28
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb1
-rw-r--r--spec/views/admin/identities/index.html.haml_spec.rb9
-rw-r--r--spec/views/devise/shared/_signin_box.html.haml_spec.rb2
-rw-r--r--spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb50
-rw-r--r--spec/views/events/event/_push.html.haml_spec.rb13
-rw-r--r--spec/views/layouts/_page.html.haml_spec.rb38
-rw-r--r--spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb4
-rw-r--r--spec/views/layouts/organization.html.haml_spec.rb59
-rw-r--r--spec/views/layouts/snippets.html.haml_spec.rb2
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb2
-rw-r--r--spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb2
-rw-r--r--spec/views/projects/pages/_pages_settings.html.haml_spec.rb30
-rw-r--r--spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb49
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb1
-rw-r--r--spec/workers/bulk_import_worker_spec.rb169
-rw-r--r--spec/workers/bulk_imports/finish_project_import_worker_spec.rb28
-rw-r--r--spec/workers/bulk_imports/pipeline_batch_worker_spec.rb10
-rw-r--r--spec/workers/click_house/events_sync_worker_spec.rb145
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb3
-rw-r--r--spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb2
-rw-r--r--spec/workers/concerns/gitlab/import/notify_upon_death_spec.rb (renamed from spec/workers/concerns/gitlab/notify_upon_death_spec.rb)10
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb18
-rw-r--r--spec/workers/database/lock_tables_worker_spec.rb136
-rw-r--r--spec/workers/database/monitor_locked_tables_worker_spec.rb55
-rw-r--r--spec/workers/environments/stop_job_success_worker_spec.rb55
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb14
-rw-r--r--spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb115
-rw-r--r--spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb9
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/finish_import_worker_spec.rb27
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb77
-rw-r--r--spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb21
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/advance_stage_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/import_pull_request_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb8
-rw-r--r--spec/workers/gitlab/github_import/advance_stage_worker_spec.rb112
-rw-r--r--spec/workers/gitlab/jira_import/advance_stage_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb2
-rw-r--r--spec/workers/incident_management/close_incident_worker_spec.rb2
-rw-r--r--spec/workers/incident_management/process_alert_worker_v2_spec.rb4
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb62
-rw-r--r--spec/workers/merge_requests/ensure_prepared_worker_spec.rb59
-rw-r--r--spec/workers/metrics/global_metrics_update_worker_spec.rb30
-rw-r--r--spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb32
-rw-r--r--spec/workers/new_merge_request_worker_spec.rb28
-rw-r--r--spec/workers/new_note_worker_spec.rb2
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb267
-rw-r--r--spec/workers/personal_access_tokens/expiring_worker_spec.rb27
-rw-r--r--spec/workers/post_receive_spec.rb2
-rw-r--r--spec/workers/projects/record_target_platforms_worker_spec.rb2
-rw-r--r--spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb4
-rw-r--r--spec/workers/users/track_namespace_visits_worker_spec.rb27
1711 files changed, 40940 insertions, 25934 deletions
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
index 6b99b4c1d76..c9d9a9176e8 100644
--- a/spec/components/pajamas/banner_component_spec.rb
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
it 'renders a close button' do
- expect(page).to have_css "button.gl-banner-close"
+ expect(page).to have_css "button.gl-button.gl-banner-close"
end
describe 'button_text and button_link' do
diff --git a/spec/contracts/consumer/resources/graphql/pipelines.js b/spec/contracts/consumer/resources/graphql/pipelines.js
index 48724e15eb8..201045e011f 100644
--- a/spec/contracts/consumer/resources/graphql/pipelines.js
+++ b/spec/contracts/consumer/resources/graphql/pipelines.js
@@ -5,7 +5,7 @@ import { extractGraphQLQuery } from '../../helpers/graphql_query_extractor';
export async function getPipelineHeaderDataRequest(endpoint) {
const { url } = endpoint;
const query = await extractGraphQLQuery(
- 'app/assets/javascripts/pipelines/graphql/queries/get_pipeline_header_data.query.graphql',
+ 'app/assets/javascripts/ci/pipeline_details/header/graphql/queries/get_pipeline_header_data.query.graphql',
);
const graphqlQuery = {
query,
@@ -27,7 +27,7 @@ export async function getPipelineHeaderDataRequest(endpoint) {
export async function deletePipeline(endpoint) {
const { url } = endpoint;
const query = await extractGraphQLQuery(
- 'app/assets/javascripts/pipelines/graphql/mutations/delete_pipeline.mutation.graphql',
+ 'app/assets/javascripts/ci/pipeline_details/graphql/mutations/delete_pipeline.mutation.graphql',
);
const graphqlQuery = {
query,
diff --git a/spec/contracts/consumer/specs/project/pipelines/show.spec.js b/spec/contracts/consumer/specs/project/pipelines/show.spec.js
index 97ad9dbbc9d..d2743b1037f 100644
--- a/spec/contracts/consumer/specs/project/pipelines/show.spec.js
+++ b/spec/contracts/consumer/specs/project/pipelines/show.spec.js
@@ -27,7 +27,7 @@ pactWith(
describe(GET_PIPELINE_HEADER_DATA_PROVIDER_NAME, () => {
beforeEach(async () => {
const query = await extractGraphQLQuery(
- 'app/assets/javascripts/pipelines/graphql/queries/get_pipeline_header_data.query.graphql',
+ 'app/assets/javascripts/ci/pipeline_details/header/graphql/queries/get_pipeline_header_data.query.graphql',
);
const graphqlQuery = new GraphQLInteraction()
.given(PipelineHeaderData.scenario.state)
@@ -64,7 +64,7 @@ pactWith(
describe(DELETE_PIPELINE_PROVIDER_NAME, () => {
beforeEach(async () => {
const query = await extractGraphQLQuery(
- 'app/assets/javascripts/pipelines/graphql/mutations/delete_pipeline.mutation.graphql',
+ 'app/assets/javascripts/ci/pipeline_details/graphql/mutations/delete_pipeline.mutation.graphql',
);
const graphqlQuery = new GraphQLInteraction()
.given(DeletePipeline.scenario.state)
diff --git a/spec/controllers/activity_pub/projects/releases_controller_spec.rb b/spec/controllers/activity_pub/projects/releases_controller_spec.rb
new file mode 100644
index 00000000000..8719756b260
--- /dev/null
+++ b/spec/controllers/activity_pub/projects/releases_controller_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::Projects::ReleasesController, feature_category: :groups_and_projects do
+ include AccessMatchersForController
+
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:private_project) { create(:project, :repository, :private) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:release_1) { create(:release, project: project, released_at: Time.zone.parse('2018-10-18')) }
+ let_it_be(:release_2) { create(:release, project: project, released_at: Time.zone.parse('2019-10-19')) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ shared_examples 'common access controls' do
+ it 'renders a 200' do
+ get(action, params: params)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when the project is private' do
+ let(:project) { private_project }
+
+ context 'when user is not logged in' do
+ it 'renders a 404' do
+ get(action, params: params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is a developer' do
+ before do
+ sign_in(developer)
+ end
+
+ it 'still renders a 404' do
+ get(action, params: params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when activity_pub feature flag is disabled' do
+ before do
+ stub_feature_flags(activity_pub: false)
+ end
+
+ it 'renders a 404' do
+ get(action, params: params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when activity_pub_project feature flag is disabled' do
+ before do
+ stub_feature_flags(activity_pub_project: false)
+ end
+
+ it 'renders a 404' do
+ get(action, params: params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ shared_examples_for 'ActivityPub response' do
+ it 'returns an application/activity+json content_type' do
+ expect(response.media_type).to eq 'application/activity+json'
+ end
+
+ it 'is formated as an ActivityStream document' do
+ expect(json_response['@context']).to eq 'https://www.w3.org/ns/activitystreams'
+ end
+ end
+
+ describe 'GET #index' do
+ before do
+ get(action, params: params)
+ end
+
+ let(:action) { :index }
+ let(:params) { { namespace_id: project.namespace, project_id: project } }
+
+ it_behaves_like 'common access controls'
+ it_behaves_like 'ActivityPub response'
+
+ it "returns the project's releases actor profile data" do
+ expect(json_response['id']).to include project_releases_path(project)
+ end
+ end
+
+ describe 'GET #outbox' do
+ before do
+ get(action, params: params)
+ end
+
+ let(:action) { :outbox }
+ let(:params) { { namespace_id: project.namespace, project_id: project, page: page } }
+
+ context 'with no page parameter' do
+ let(:page) { nil }
+
+ it_behaves_like 'common access controls'
+ it_behaves_like 'ActivityPub response'
+
+ it "returns the project's releases collection index" do
+ expect(json_response['id']).to include outbox_project_releases_path(project)
+ expect(json_response['totalItems']).to eq 2
+ end
+ end
+
+ context 'with a page parameter' do
+ let(:page) { 1 }
+
+ it_behaves_like 'common access controls'
+ it_behaves_like 'ActivityPub response'
+
+ it "returns the project's releases list" do
+ expect(json_response['id']).to include outbox_project_releases_path(project, page: 1)
+
+ names = json_response['orderedItems'].map { |release| release['object']['name'] }
+ expect(names).to match_array([release_2.name, release_1.name])
+ end
+ end
+ end
+end
diff --git a/spec/controllers/admin/jobs_controller_spec.rb b/spec/controllers/admin/jobs_controller_spec.rb
index 2d1482f40d4..c99bb6ff695 100644
--- a/spec/controllers/admin/jobs_controller_spec.rb
+++ b/spec/controllers/admin/jobs_controller_spec.rb
@@ -14,8 +14,6 @@ RSpec.describe Admin::JobsController do
get :index
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:builds)).to be_a(Kaminari::PaginatableWithoutCount)
- expect(assigns(:builds).count).to be(1)
end
end
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 399b7c02c52..f83b98d7a51 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -416,7 +416,7 @@ RSpec.describe Admin::UsersController do
context 'for an internal user' do
it 'does not deactivate the user' do
- internal_user = User.alert_bot
+ internal_user = Users::Internal.alert_bot
put :deactivate, params: { id: internal_user.username }
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 58125f3a831..b7ee01ce6b3 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -865,33 +865,6 @@ RSpec.describe ApplicationController, feature_category: :shared do
end
end
- describe '#required_signup_info' do
- controller(described_class) do
- def index; end
- end
-
- let(:user) { create(:user) }
-
- context 'user with required role' do
- before do
- user.set_role_required!
- sign_in(user)
- get :index
- end
-
- it { is_expected.to redirect_to users_sign_up_welcome_path }
- end
-
- context 'user without a required role' do
- before do
- sign_in(user)
- get :index
- end
-
- it { is_expected.not_to redirect_to users_sign_up_welcome_path }
- end
- end
-
describe 'rescue_from Gitlab::Auth::IpBlocked' do
controller(described_class) do
skip_before_action :authenticate_user!
diff --git a/spec/controllers/concerns/onboarding/status_spec.rb b/spec/controllers/concerns/onboarding/status_spec.rb
index b14346dc052..fe7c5ac6346 100644
--- a/spec/controllers/concerns/onboarding/status_spec.rb
+++ b/spec/controllers/concerns/onboarding/status_spec.rb
@@ -75,32 +75,4 @@ RSpec.describe Onboarding::Status, feature_category: :onboarding do
it { is_expected.to eq(last_member_source) }
end
end
-
- describe '#invite_with_tasks_to_be_done?' do
- subject { described_class.new(nil, nil, user).invite_with_tasks_to_be_done? }
-
- context 'when there are tasks_to_be_done with one member' do
- let_it_be(:member) { create(:group_member, user: user, tasks_to_be_done: tasks_to_be_done) }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when there are multiple members and the tasks_to_be_done is on only one of them' do
- before do
- create(:group_member, user: user, tasks_to_be_done: tasks_to_be_done)
- end
-
- it { is_expected.to eq(true) }
- end
-
- context 'when there are no tasks_to_be_done' do
- it { is_expected.to eq(false) }
- end
-
- context 'when there are no members' do
- let_it_be(:user) { build_stubbed(:user) }
-
- it { is_expected.to eq(false) }
- end
- end
end
diff --git a/spec/controllers/concerns/preferred_language_switcher_spec.rb b/spec/controllers/concerns/preferred_language_switcher_spec.rb
index 40d6ac10c37..4ceb6fa312e 100644
--- a/spec/controllers/concerns/preferred_language_switcher_spec.rb
+++ b/spec/controllers/concerns/preferred_language_switcher_spec.rb
@@ -13,13 +13,79 @@ RSpec.describe PreferredLanguageSwitcher, type: :controller do
end
end
+ subject { cookies[:preferred_language] }
+
context 'when first visit' do
+ let(:glm_source) { 'about.gitlab.com' }
+ let(:accept_language_header) { nil }
+
before do
- get :new
+ request.env['HTTP_ACCEPT_LANGUAGE'] = accept_language_header
+
+ get :new, params: { glm_source: glm_source }
end
it 'sets preferred_language to default' do
- expect(cookies[:preferred_language]).to eq Gitlab::CurrentSettings.default_preferred_language
+ expect(subject).to eq Gitlab::CurrentSettings.default_preferred_language
+ end
+
+ context 'when language param is valid' do
+ let(:glm_source) { 'about.gitlab.com/fr-fr/' }
+
+ it 'sets preferred_language accordingly' do
+ expect(subject).to eq 'fr'
+ end
+
+ context 'when language param is invalid' do
+ let(:glm_source) { 'about.gitlab.com/ko-ko/' }
+
+ it 'sets preferred_language to default' do
+ expect(subject).to eq Gitlab::CurrentSettings.default_preferred_language
+ end
+ end
+ end
+
+ context 'when browser preferred language is not english' do
+ context 'with selectable language' do
+ let(:accept_language_header) { 'zh-CN,zh;q=0.8,zh-TW;q=0.7' }
+
+ it 'sets preferred_language accordingly' do
+ expect(subject).to eq 'zh_CN'
+ end
+ end
+
+ context 'with unselectable language' do
+ let(:accept_language_header) { 'nl-NL;q=0.8' }
+
+ it 'sets preferred_language to default' do
+ expect(subject).to eq Gitlab::CurrentSettings.default_preferred_language
+ end
+ end
+
+ context 'with empty string in language header' do
+ let(:accept_language_header) { '' }
+
+ it 'sets preferred_language to default' do
+ expect(subject).to eq Gitlab::CurrentSettings.default_preferred_language
+ end
+ end
+
+ context 'with language header without dashes' do
+ let(:accept_language_header) { 'fr;q=8' }
+
+ it 'sets preferred_language accordingly' do
+ expect(subject).to eq 'fr'
+ end
+ end
+ end
+
+ context 'when language params and language header are both valid' do
+ let(:accept_language_header) { 'zh-CN,zh;q=0.8,zh-TW;q=0.7' }
+ let(:glm_source) { 'about.gitlab.com/fr-fr/' }
+
+ it 'sets preferred_language according to language params' do
+ expect(subject).to eq 'fr'
+ end
end
end
@@ -36,7 +102,7 @@ RSpec.describe PreferredLanguageSwitcher, type: :controller do
let(:user_preferred_language) { 'zh_CN' }
it 'keeps preferred language unchanged' do
- expect(cookies[:preferred_language]).to eq user_preferred_language
+ expect(subject).to eq user_preferred_language
end
end
@@ -44,7 +110,7 @@ RSpec.describe PreferredLanguageSwitcher, type: :controller do
let(:user_preferred_language) { 'xxx' }
it 'sets preferred_language to default' do
- expect(cookies[:preferred_language]).to eq Gitlab::CurrentSettings.default_preferred_language
+ expect(subject).to eq Gitlab::CurrentSettings.default_preferred_language
end
end
end
diff --git a/spec/controllers/confirmations_controller_spec.rb b/spec/controllers/confirmations_controller_spec.rb
index fea43894f1c..cbe0ec5d126 100644
--- a/spec/controllers/confirmations_controller_spec.rb
+++ b/spec/controllers/confirmations_controller_spec.rb
@@ -19,17 +19,6 @@ RSpec.describe ConfirmationsController, feature_category: :system_access do
get :show, params: { confirmation_token: confirmation_token }
end
- context 'when signup info is required' do
- before do
- allow(controller).to receive(:current_user) { user }
- user.set_role_required!
- end
-
- it 'does not redirect' do
- expect(perform_request).not_to redirect_to(users_sign_up_welcome_path)
- end
- end
-
context 'user is already confirmed' do
before do
user.confirm
@@ -137,17 +126,6 @@ RSpec.describe ConfirmationsController, feature_category: :system_access do
stub_feature_flags(identity_verification: false)
end
- context 'when signup info is required' do
- before do
- allow(controller).to receive(:current_user) { user }
- user.set_role_required!
- end
-
- it 'does not redirect' do
- expect(perform_request).not_to redirect_to(users_sign_up_welcome_path)
- end
- end
-
context "when `email_confirmation_setting` is set to `soft`" do
before do
stub_application_setting_enum('email_confirmation_setting', 'soft')
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 3fb5e08f065..6bb791d2fd4 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -244,7 +244,7 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
subject
send_data_type, send_data = workhorse_send_data
- header, url = send_data.values_at('Header', 'Url')
+ header, url = send_data.values_at('Headers', 'Url')
expect(send_data_type).to eq('send-dependency')
expect(header).to eq(
@@ -312,7 +312,7 @@ RSpec.describe Groups::DependencyProxyForContainersController, feature_category:
subject
send_data_type, send_data = workhorse_send_data
- header, url = send_data.values_at('Header', 'Url')
+ header, url = send_data.values_at('Headers', 'Url')
expect(send_data_type).to eq('send-dependency')
expect(header).to eq("Authorization" => ["Bearer abcd1234"])
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index f9f1fc21538..3dcf41941bb 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Groups::LabelsController, feature_category: :team_planning do
- let_it_be(:group) { create(:group) }
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: root_group) }
let_it_be(:user) { create(:user) }
let_it_be(:another_user) { create(:user) }
let_it_be(:project) { create(:project, namespace: group) }
@@ -162,5 +163,50 @@ RSpec.describe Groups::LabelsController, feature_category: :team_planning do
let(:group_request) { put :update, params: { group_id: group.to_param, id: label.to_param, label: { title: 'Test' } } }
let(:sub_group_request) { put :update, params: { group_id: sub_group.to_param, id: label.to_param, label: { title: 'Test' } } }
end
+
+ context 'when updating lock_on_merge' do
+ let_it_be(:params) { { lock_on_merge: true } }
+ let_it_be_with_reload(:label) { create(:group_label, group: group) }
+
+ subject(:update_request) { put :update, params: { group_id: group.to_param, id: label.to_param, label: params } }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it 'does not allow setting lock_on_merge' do
+ update_request
+
+ expect(response).to redirect_to(group_labels_path)
+ expect(label.reload.lock_on_merge).to be_falsey
+ end
+ end
+
+ shared_examples 'allows setting lock_on_merge' do
+ it do
+ update_request
+
+ expect(response).to redirect_to(group_labels_path)
+ expect(label.reload.lock_on_merge).to be_truthy
+ end
+ end
+
+ context 'when feature flag for group is enabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: group)
+ end
+
+ it_behaves_like 'allows setting lock_on_merge'
+ end
+
+ context 'when feature flag for ancestor group is enabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: root_group)
+ end
+
+ it_behaves_like 'allows setting lock_on_merge'
+ end
+ end
end
end
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 37242bce6bf..a4e55a89f41 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -3,65 +3,88 @@
require 'spec_helper'
RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:namespace_settings) { create(:namespace_settings, runner_registration_enabled: true) }
+ let_it_be(:group) { create(:group, namespace_settings: namespace_settings) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:runner) { create(:ci_runner, :group, groups: [group]) }
let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:instance_runner) { create(:ci_runner, :instance) }
- let(:params_runner_project) { { group_id: group, id: project_runner } }
- let(:params_runner_instance) { { group_id: group, id: instance_runner } }
- let(:params) { { group_id: group, id: runner } }
-
before do
sign_in(user)
end
describe '#index', :snowplow do
- context 'when user is owner' do
- before do
- group.add_owner(user)
- end
+ subject(:execute_get_request) { get :index, params: { group_id: group } }
- it 'renders show with 200 status code' do
- get :index, params: { group_id: group }
+ shared_examples 'can access the page' do
+ it 'renders index with 200 status code' do
+ execute_get_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
it 'tracks the event' do
- get :index, params: { group_id: group }
+ execute_get_request
expect_snowplow_event(category: described_class.name, action: 'index', user: user, namespace: group)
end
+ end
+
+ shared_examples 'cannot access the page' do
+ it 'renders 404' do
+ execute_get_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
- it 'assigns variables' do
- get :index, params: { group_id: group }
+ it 'does not track the event' do
+ execute_get_request
- expect(assigns(:group_new_runner_path)).to eq(new_group_runner_path(group))
+ expect_no_snowplow_event
end
end
- context 'when user is not owner' do
+ context 'when the user is a maintainer' do
before do
group.add_maintainer(user)
end
- it 'renders a 404' do
- get :index, params: { group_id: group }
+ include_examples 'can access the page'
- expect(response).to have_gitlab_http_status(:not_found)
+ it 'does not expose runner creation and registration variables' do
+ execute_get_request
+
+ expect(assigns(:group_runner_registration_token)).to be_nil
+ expect(assigns(:group_new_runner_path)).to be_nil
end
+ end
- it 'does not track the event' do
- get :index, params: { group_id: group }
+ context 'when the user is an owner' do
+ before do
+ group.add_owner(user)
+ end
- expect_no_snowplow_event
+ include_examples 'can access the page'
+
+ it 'exposes runner creation and registration variables' do
+ execute_get_request
+
+ expect(assigns(:group_runner_registration_token)).not_to be_nil
+ expect(assigns(:group_new_runner_path)).to eq(new_group_runner_path(group))
end
end
+
+ context 'when user is not maintainer' do
+ before do
+ group.add_developer(user)
+ end
+
+ include_examples 'cannot access the page'
+ end
end
describe '#new' do
@@ -139,9 +162,9 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
describe '#show' do
- context 'when user is owner' do
+ context 'when user is maintainer' do
before do
- group.add_owner(user)
+ group.add_maintainer(user)
end
it 'renders show with 200 status code' do
@@ -166,9 +189,9 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
end
- context 'when user is not owner' do
+ context 'when user is not maintainer' do
before do
- group.add_maintainer(user)
+ group.add_developer(user)
end
it 'renders a 404' do
@@ -197,20 +220,26 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
group.add_owner(user)
end
- it 'renders edit with 200 status code' do
+ it 'renders 200 for group runner' do
get :edit, params: { group_id: group, id: runner }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
end
- it 'renders a 404 instance runner' do
+ it 'renders 404 for non-existing runner' do
+ get :edit, params: { group_id: group, id: non_existing_record_id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders 404 for instance runner' do
get :edit, params: { group_id: group, id: instance_runner }
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'renders edit with 200 status code project runner' do
+ it 'renders 200 for project runner' do
get :edit, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:ok)
@@ -218,18 +247,49 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
end
- context 'when user is not owner' do
+ context 'when user is maintainer' do
before do
group.add_maintainer(user)
end
- it 'renders a 404' do
+ it 'renders 404 for group runner' do
get :edit, params: { group_id: group, id: runner }
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'renders a 404 project runner' do
+ it 'renders 404 for instance runner' do
+ get :edit, params: { group_id: group, id: instance_runner }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders 200 for project runner' do
+ get :edit, params: { group_id: group, id: project_runner }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ end
+ end
+
+ context 'when user is not maintainer' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'renders 404 for group runner' do
+ get :edit, params: { group_id: group, id: runner }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders 404 for instance runner' do
+ get :edit, params: { group_id: group, id: instance_runner }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders 404 for project runner' do
get :edit, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:not_found)
@@ -238,83 +298,105 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do
end
describe '#update' do
- let!(:runner) { create(:ci_runner, :group, groups: [group]) }
-
- context 'when user is an owner' do
- before do
- group.add_owner(user)
- end
+ let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ shared_examples 'updates the runner' do
it 'updates the runner, ticks the queue, and redirects' do
new_desc = runner.description.swapcase
expect do
- post :update, params: params.merge(runner: { description: new_desc })
+ post :update, params: { group_id: group, id: runner, runner: { description: new_desc } }
+ runner.reload
end.to change { runner.ensure_runner_queue_value }
expect(response).to have_gitlab_http_status(:found)
expect(runner.reload.description).to eq(new_desc)
end
+ end
- it 'does not update the instance runner' do
- new_desc = instance_runner.description.swapcase
+ shared_examples 'rejects the update' do
+ it 'does not update the runner' do
+ new_desc = runner.description.swapcase
expect do
- post :update, params: params_runner_instance.merge(runner: { description: new_desc })
- end.to not_change { instance_runner.ensure_runner_queue_value }
- .and not_change { instance_runner.description }
+ post :update, params: { group_id: group, id: runner, runner: { description: new_desc } }
+ runner.reload
+ end.to not_change { runner.ensure_runner_queue_value }
+ .and not_change { runner.description }
expect(response).to have_gitlab_http_status(:not_found)
end
+ end
+
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
- it 'updates the project runner, ticks the queue, and redirects project runner' do
- new_desc = project_runner.description.swapcase
+ context 'with group runner' do
+ let(:runner) { group_runner }
- expect do
- post :update, params: params_runner_project.merge(runner: { description: new_desc })
- end.to change { project_runner.ensure_runner_queue_value }
+ it_behaves_like 'updates the runner'
+ end
- expect(response).to have_gitlab_http_status(:found)
- expect(project_runner.reload.description).to eq(new_desc)
+ context 'with instance runner' do
+ let(:runner) { instance_runner }
+
+ it_behaves_like 'rejects the update'
+ end
+
+ context 'with project runner' do
+ let(:runner) { project_runner }
+
+ it_behaves_like 'updates the runner'
end
end
- context 'when user is not an owner' do
+ context 'when user is maintainer' do
before do
group.add_maintainer(user)
end
- it 'rejects the update and responds 404' do
- old_desc = runner.description
+ context 'with group runner' do
+ let(:runner) { group_runner }
- expect do
- post :update, params: params.merge(runner: { description: old_desc.swapcase })
- end.not_to change { runner.ensure_runner_queue_value }
+ it_behaves_like 'rejects the update'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- expect(runner.reload.description).to eq(old_desc)
+ context 'with instance runner' do
+ let(:runner) { instance_runner }
+
+ it_behaves_like 'rejects the update'
end
- it 'rejects the update and responds 404 instance runner' do
- old_desc = instance_runner.description
+ context 'with project runner' do
+ let(:runner) { project_runner }
- expect do
- post :update, params: params_runner_instance.merge(runner: { description: old_desc.swapcase })
- end.not_to change { instance_runner.ensure_runner_queue_value }
+ it_behaves_like 'updates the runner'
+ end
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- expect(instance_runner.reload.description).to eq(old_desc)
+ context 'when user is not maintainer' do
+ before do
+ group.add_developer(user)
end
- it 'rejects the update and responds 404 project runner' do
- old_desc = project_runner.description
+ context 'with group runner' do
+ let(:runner) { group_runner }
- expect do
- post :update, params: params_runner_project.merge(runner: { description: old_desc.swapcase })
- end.not_to change { project_runner.ensure_runner_queue_value }
+ it_behaves_like 'rejects the update'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- expect(project_runner.reload.description).to eq(old_desc)
+ context 'with instance runner' do
+ let(:runner) { instance_runner }
+
+ it_behaves_like 'rejects the update'
+ end
+
+ context 'with project runner' do
+ let(:runner) { project_runner }
+
+ it_behaves_like 'rejects the update'
end
end
end
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 7795fff5541..94bb9c9aa02 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UploadsController do
+RSpec.describe Groups::UploadsController, feature_category: :portfolio_management do
include WorkhorseHelpers
let(:model) { create(:group, :public) }
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index f3b21e191c4..b3b7753df61 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -192,26 +192,6 @@ RSpec.describe InvitesController do
expect(session[:invite_email]).to eq(member.invite_email)
end
- context 'with stored location for user' do
- it 'stores the correct path for user' do
- request
-
- expect(controller.stored_location_for(:user)).to eq(activity_project_path(member.source))
- end
-
- context 'with relative root' do
- before do
- stub_default_url_options(script_name: '/gitlab')
- end
-
- it 'stores the correct path for user' do
- request
-
- expect(controller.stored_location_for(:user)).to eq(activity_project_path(member.source))
- end
- end
- end
-
context 'when it is part of our invite email experiment' do
let(:extra_params) { { invite_type: 'initial_email' } }
diff --git a/spec/controllers/oauth/applications_controller_spec.rb b/spec/controllers/oauth/applications_controller_spec.rb
index 5b9fd192ad4..44deeb6c47e 100644
--- a/spec/controllers/oauth/applications_controller_spec.rb
+++ b/spec/controllers/oauth/applications_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Oauth::ApplicationsController do
+RSpec.describe Oauth::ApplicationsController, feature_category: :system_access do
let(:user) { create(:user) }
let(:application) { create(:oauth_application, owner: user) }
@@ -86,10 +86,10 @@ RSpec.describe Oauth::ApplicationsController do
it_behaves_like 'redirects to login page when the user is not signed in'
it_behaves_like 'redirects to 2fa setup page when the user requires it'
- it 'returns the secret in json format' do
+ it 'returns the prefixed secret in json format' do
subject
- expect(json_response['secret']).not_to be_nil
+ expect(json_response['secret']).to match(/gloas-\h{64}/)
end
context 'when renew fails' do
@@ -153,6 +153,15 @@ RSpec.describe Oauth::ApplicationsController do
expect(response).to render_template :show
end
+ context 'the secret' do
+ render_views
+
+ it 'is in the response' do
+ subject
+ expect(response.body).to match(/gloas-\h{64}/)
+ end
+ end
+
it 'redirects back to profile page if OAuth applications are disabled' do
disable_user_oauth
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index 4772c3f3487..cfb512afc91 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -5,9 +5,15 @@ require 'spec_helper'
RSpec.describe Oauth::AuthorizationsController do
let(:user) { create(:user) }
let(:application_scopes) { 'api read_user' }
+ let(:confidential) { true }
let!(:application) do
- create(:oauth_application, scopes: application_scopes, redirect_uri: 'http://example.com')
+ create(
+ :oauth_application,
+ scopes: application_scopes,
+ redirect_uri: 'http://example.com',
+ confidential: confidential
+ )
end
let(:params) do
@@ -68,12 +74,27 @@ RSpec.describe Oauth::AuthorizationsController do
create(:oauth_access_token, application: application, resource_owner_id: user.id, scopes: scopes)
end
- it 'authorizes the request and shows the user a page that redirects' do
- subject
+ context 'when application is confidential' do
+ let(:confidential) { true }
- expect(request.session['user_return_to']).to be_nil
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('doorkeeper/authorizations/redirect')
+ it 'authorizes the request and shows the user a page that redirects' do
+ subject
+
+ expect(request.session['user_return_to']).to be_nil
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('doorkeeper/authorizations/redirect')
+ end
+ end
+
+ context 'when application is not confidential' do
+ let(:confidential) { false }
+
+ it 'returns 200 code and renders view' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('doorkeeper/authorizations/new')
+ end
end
end
diff --git a/spec/controllers/profiles/notifications_controller_spec.rb b/spec/controllers/profiles/notifications_controller_spec.rb
index 36ec36fb6f1..22c0a62a6a1 100644
--- a/spec/controllers/profiles/notifications_controller_spec.rb
+++ b/spec/controllers/profiles/notifications_controller_spec.rb
@@ -149,11 +149,10 @@ RSpec.describe Profiles::NotificationsController do
it 'updates only permitted attributes' do
sign_in(user)
- put :update, params: { user: { notification_email: 'new@example.com', email_opted_in: true, notified_of_own_activity: true, admin: true } }
+ put :update, params: { user: { notification_email: 'new@example.com', notified_of_own_activity: true, admin: true } }
user.reload
expect(user.notification_email).to eq('new@example.com')
- expect(user.email_opted_in).to eq(true)
expect(user.notified_of_own_activity).to eq(true)
expect(user.admin).to eq(false)
expect(controller).to set_flash[:notice].to('Notification settings saved')
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 044ce8f397a..14f3f5c23cd 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -73,14 +73,14 @@ RSpec.describe Profiles::PersonalAccessTokensController do
get :index
end
- it "only includes details of the active personal access token" do
+ it "only includes details of active personal access tokens" do
active_personal_access_tokens_detail =
::PersonalAccessTokenSerializer.new.represent([active_personal_access_token])
expect(assigns(:active_access_tokens).to_json).to eq(active_personal_access_tokens_detail.to_json)
end
- it "sets PAT name and scopes" do
+ it "builds a PAT with name and scopes from params" do
name = 'My PAT'
scopes = 'api,read_user'
@@ -105,5 +105,57 @@ RSpec.describe Profiles::PersonalAccessTokensController do
expect(json_response.count).to eq(1)
end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes)).to eq(Gitlab::Auth.available_scopes_for(access_token_user))
+ end
+
+ context 'with feature flag k8s_proxy_pat disabled' do
+ before do
+ stub_feature_flags(k8s_proxy_pat: false)
+ # Impersonation and inactive personal tokens are ignored
+ create(:personal_access_token, :impersonation, user: access_token_user)
+ create(:personal_access_token, :revoked, user: access_token_user)
+ get :index
+ end
+
+ it "only includes details of active personal access tokens" do
+ active_personal_access_tokens_detail =
+ ::PersonalAccessTokenSerializer.new.represent([active_personal_access_token])
+
+ expect(assigns(:active_access_tokens).to_json).to eq(active_personal_access_tokens_detail.to_json)
+ end
+
+ it "builds a PAT with name and scopes from params" do
+ name = 'My PAT'
+ scopes = 'api,read_user'
+
+ get :index, params: { name: name, scopes: scopes }
+
+ expect(assigns(:personal_access_token)).to have_attributes(
+ name: eq(name),
+ scopes: contain_exactly(:api, :read_user)
+ )
+ end
+
+ it 'returns 404 when personal access tokens are disabled' do
+ allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns tokens for json format' do
+ get :index, params: { format: :json }
+
+ expect(json_response.count).to eq(1)
+ end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes))
+ .to eq(Gitlab::Auth.available_scopes_for(access_token_user) - [Gitlab::Auth::K8S_PROXY_SCOPE])
+ end
+ end
end
end
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index b4ffe0bc844..aaf169cd42b 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -54,6 +54,7 @@ RSpec.describe Profiles::PreferencesController do
preferred_language: 'jp',
tab_width: '5',
project_shortcut_buttons: 'true',
+ keyboard_shortcuts_enabled: 'true',
render_whitespace_in_code: 'true'
}.with_indifferent_access
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index b1c43a33386..2bcb47f97ab 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -193,20 +193,6 @@ RSpec.describe ProfilesController, :request_store do
.to raise_error(ActionController::ParameterMissing)
end
- context 'with legacy storage' do
- it 'moves dependent projects to new namespace' do
- project = create(:project_empty_repo, :legacy_storage, namespace: namespace)
-
- put :update_username,
- params: { user: { username: new_username } }
-
- user.reload
-
- expect(response).to have_gitlab_http_status(:found)
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_username}/#{project.path}.git")).to be_truthy
- end
- end
-
context 'with hashed storage' do
it 'keeps repository location unchanged on disk' do
project = create(:project_empty_repo, namespace: namespace)
diff --git a/spec/controllers/projects/alerting/notifications_controller_spec.rb b/spec/controllers/projects/alerting/notifications_controller_spec.rb
index 5ce2950f95f..6a8c57e4abd 100644
--- a/spec/controllers/projects/alerting/notifications_controller_spec.rb
+++ b/spec/controllers/projects/alerting/notifications_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Alerting::NotificationsController do
+RSpec.describe Projects::Alerting::NotificationsController, feature_category: :incident_management do
include HttpBasicAuthHelpers
let_it_be(:project) { create(:project) }
@@ -68,6 +68,31 @@ RSpec.describe Projects::Alerting::NotificationsController do
make_request
end
+ context 'with a corresponding project_alerting_setting' do
+ let_it_be_with_reload(:setting) { create(:project_alerting_setting, :with_http_integration, project: project) }
+ let_it_be_with_reload(:integration) { project.alert_management_http_integrations.last! }
+
+ context 'and a migrated or synced HTTP integration' do
+ it 'extracts and finds the integration' do
+ expect(notify_service).to receive(:execute).with('some token', integration)
+
+ make_request
+ end
+ end
+
+ context 'and no migrated or synced HTTP integration' do
+ before do
+ integration.destroy!
+ end
+
+ it 'does not find an integration' do
+ expect(notify_service).to receive(:execute).with('some token', nil)
+
+ make_request
+ end
+ end
+ end
+
context 'with a corresponding integration' do
context 'with integration parameters specified' do
let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
diff --git a/spec/controllers/projects/environments/sample_metrics_controller_spec.rb b/spec/controllers/projects/environments/sample_metrics_controller_spec.rb
deleted file mode 100644
index b266c569edd..00000000000
--- a/spec/controllers/projects/environments/sample_metrics_controller_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Environments::SampleMetricsController do
- include StubENV
-
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:user) { create(:user) }
-
- before do
- project.add_reporter(user)
- sign_in(user)
- end
-
- describe 'GET #query' do
- context 'when the file is not found' do
- before do
- get :query, params: environment_params
- end
-
- it 'returns a 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when the sample data is found' do
- before do
- allow_next_instance_of(Metrics::SampleMetricsService) do |service|
- allow(service).to receive(:query).and_return([])
- end
- get :query, params: environment_params
- end
-
- it 'returns JSON with a message and a 200 status code' do
- expect(json_response.keys).to contain_exactly('status', 'data')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
- private
-
- def environment_params(params = {})
- {
- id: environment.id.to_s,
- namespace_id: project.namespace.full_path,
- project_id: project.path,
- identifier: 'sample_metric_query_result',
- start: '2019-12-02T23:31:45.000Z',
- end: '2019-12-03T00:01:45.000Z'
- }.merge(params)
- end
-end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 4b091e9221e..c421aee88f8 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -398,7 +398,7 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq(
{ 'redirect_url' =>
- project_pipeline_url(project, action.pipeline_id) })
+ project_job_url(project, action) })
end
it 'returns environment url for multiple stop actions' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 7b576533ae5..d4f04105605 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -120,11 +120,11 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
allow(Kaminari.config).to receive(:default_per_page).and_return(1)
end
- it 'redirects to last page when out of bounds on non-html requests' do
+ it 'does not redirect when out of bounds on non-html requests' do
get :index, params: params.merge(page: last_page + 1), format: 'atom'
- expect(response).to have_gitlab_http_status(:redirect)
- expect(response).to redirect_to(action: 'index', format: 'atom', page: last_page, state: 'opened')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:issues).size).to eq(0)
end
end
@@ -1725,7 +1725,7 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
describe 'GET service_desk' do
let_it_be(:project) { create(:project_empty_repo, :public) }
- let_it_be(:support_bot) { User.support_bot }
+ let_it_be(:support_bot) { Users::Internal.support_bot }
let_it_be(:other_user) { create(:user) }
let_it_be(:service_desk_issue_1) { create(:issue, project: project, author: support_bot) }
let_it_be(:service_desk_issue_2) { create(:issue, project: project, author: support_bot, assignees: [other_user]) }
@@ -1756,7 +1756,7 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
it 'allows an assignee to be specified by id' do
get_service_desk(assignee_id: other_user.id)
- expect(assigns(:users)).to contain_exactly(other_user, support_bot)
+ expect(assigns(:issues)).to contain_exactly(service_desk_issue_2)
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index ede26ebd032..9851153bd39 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -47,7 +47,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
it 'has only pending builds' do
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:builds).first.status).to eq('pending')
end
end
@@ -60,7 +59,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
it 'has only running jobs' do
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:builds).first.status).to eq('running')
end
end
@@ -73,7 +71,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
it 'has only finished jobs' do
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:builds).first.status).to eq('success')
end
end
@@ -89,7 +86,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
it 'redirects to the page' do
expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:builds).current_page).to eq(last_page)
end
end
end
@@ -156,6 +152,18 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'when the job is a bridge' do
+ let!(:downstream_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let(:job) { downstream_pipeline.source_job }
+
+ it 'redirects to the downstream pipeline page' do
+ get_show(id: job.id)
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(namespace_project_pipeline_path(id: downstream_pipeline.id))
+ end
+ end
end
context 'when requesting JSON' do
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index 74c16621fc5..db8cac8bb4a 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -297,6 +297,53 @@ RSpec.describe Projects::LabelsController, feature_category: :team_planning do
end
end
+ describe 'PUT #update' do
+ context 'when updating lock_on_merge' do
+ let_it_be(:params) { { lock_on_merge: true } }
+ let_it_be_with_reload(:label) { create(:label, project: project) }
+
+ subject(:update_request) { put :update, params: { namespace_id: project.namespace, project_id: project, id: label.to_param, label: params } }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it 'does not allow setting lock_on_merge' do
+ update_request
+
+ expect(response).to redirect_to(namespace_project_labels_path)
+ expect(label.reload.lock_on_merge).to be_falsey
+ end
+ end
+
+ shared_examples 'allows setting lock_on_merge' do
+ it do
+ update_request
+
+ expect(response).to redirect_to(namespace_project_labels_path)
+ expect(label.reload.lock_on_merge).to be_truthy
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: project)
+ end
+
+ it_behaves_like 'allows setting lock_on_merge'
+ end
+
+ context 'when feature flag for ancestor group is enabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: group)
+ end
+
+ it_behaves_like 'allows setting lock_on_merge'
+ end
+ end
+ end
+
describe 'DELETE #destroy' do
context 'when current user has ability to destroy the label' do
before do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 0e3e3f31783..a47bb98770c 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -68,72 +68,6 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
end
end
- context 'when add_prepared_state_to_mr feature flag on' do
- before do
- stub_feature_flags(add_prepared_state_to_mr: true)
- end
-
- context 'when the merge request is not prepared' do
- before do
- merge_request.update!(prepared_at: nil, created_at: 10.minutes.ago)
- end
-
- it 'prepares the merge request' do
- expect(NewMergeRequestWorker).to receive(:perform_async)
-
- go
- end
-
- context 'when the merge request was created less than 5 minutes ago' do
- it 'does not prepare the merge request again' do
- travel_to(4.minutes.from_now) do
- merge_request.update!(created_at: Time.current - 4.minutes)
-
- expect(NewMergeRequestWorker).not_to receive(:perform_async)
-
- go
- end
- end
- end
-
- context 'when the merge request was created 5 minutes ago' do
- it 'prepares the merge request' do
- travel_to(6.minutes.from_now) do
- merge_request.update!(created_at: Time.current - 6.minutes)
-
- expect(NewMergeRequestWorker).to receive(:perform_async)
-
- go
- end
- end
- end
- end
-
- context 'when the merge request is prepared' do
- before do
- merge_request.update!(prepared_at: Time.current, created_at: 10.minutes.ago)
- end
-
- it 'prepares the merge request' do
- expect(NewMergeRequestWorker).not_to receive(:perform_async)
-
- go
- end
- end
- end
-
- context 'when add_prepared_state_to_mr feature flag is off' do
- before do
- stub_feature_flags(add_prepared_state_to_mr: false)
- end
-
- it 'does not prepare the merge request again' do
- expect(NewMergeRequestWorker).not_to receive(:perform_async)
-
- go
- end
- end
-
describe 'as html' do
it 'sets the endpoint_metadata_url' do
go
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 500fab471ef..35aa01cdfad 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
request.headers['X-Last-Fetched-At'] = microseconds(last_fetched_at)
end
- specify { expect(get(:index, params: request_params)).to have_request_urgency(:medium) }
+ specify { expect(get(:index, params: request_params)).to have_request_urgency(:low) }
it 'sets the correct feature category' do
get :index, params: request_params
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index a5542a2b825..43e7bafc206 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -769,6 +769,33 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
[
{
+ chart_param: 'time-to-restore-service',
+ event: 'p_analytics_ci_cd_time_to_restore_service'
+ },
+ {
+ chart_param: 'change-failure-rate',
+ event: 'p_analytics_ci_cd_change_failure_rate'
+ }
+ ].each do |tab|
+ it_behaves_like 'tracking unique visits', :charts do
+ let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
+ let(:target_id) { ['p_analytics_pipelines', tab[:event]] }
+ end
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ subject { get :charts, params: request_params, format: :html }
+
+ let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
+ let(:category) { described_class.name }
+ let(:action) { 'perform_analytics_usage_action' }
+ let(:namespace) { project.namespace }
+ let(:label) { 'redis_hll_counters.analytics.analytics_total_unique_counts_monthly' }
+ let(:property) { 'p_analytics_pipelines' }
+ end
+ end
+
+ [
+ {
chart_param: '',
event: 'p_analytics_ci_cd_pipelines'
},
@@ -783,14 +810,6 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
{
chart_param: 'lead-time',
event: 'p_analytics_ci_cd_lead_time'
- },
- {
- chart_param: 'time-to-restore-service',
- event: 'p_analytics_ci_cd_time_to_restore_service'
- },
- {
- chart_param: 'change-failure-rate',
- event: 'p_analytics_ci_cd_change_failure_rate'
}
].each do |tab|
it_behaves_like 'tracking unique visits', :charts do
@@ -798,15 +817,12 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
let(:target_id) { ['p_analytics_pipelines', tab[:event]] }
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ it_behaves_like 'internal event tracking' do
subject { get :charts, params: request_params, format: :html }
let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
- let(:category) { described_class.name }
- let(:action) { 'perform_analytics_usage_action' }
+ let(:action) { tab[:event] }
let(:namespace) { project.namespace }
- let(:label) { 'redis_hll_counters.analytics.analytics_total_unique_counts_monthly' }
- let(:property) { 'p_analytics_pipelines' }
end
end
end
diff --git a/spec/controllers/projects/prometheus/alerts_controller_spec.rb b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
deleted file mode 100644
index 3e64631fbf1..00000000000
--- a/spec/controllers/projects/prometheus/alerts_controller_spec.rb
+++ /dev/null
@@ -1,110 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Prometheus::AlertsController, feature_category: :incident_management do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- shared_examples 'unprivileged' do
- before do
- project.add_developer(user)
- end
-
- it 'returns not_found' do
- make_request
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- shared_examples 'project non-specific environment' do |status|
- let(:other) { create(:environment) }
-
- it "returns #{status}" do
- make_request(environment_id: other)
-
- expect(response).to have_gitlab_http_status(status)
- end
-
- if status == :ok
- it 'returns no prometheus alerts' do
- make_request(environment_id: other)
-
- expect(json_response).to be_empty
- end
- end
- end
-
- describe 'POST #notify' do
- let(:alert_1) { build(:alert_management_alert, :prometheus, project: project) }
- let(:alert_2) { build(:alert_management_alert, :prometheus, project: project) }
- let(:service_response) { ServiceResponse.success(http_status: :created) }
- let(:notify_service) { instance_double(Projects::Prometheus::Alerts::NotifyService, execute: service_response) }
-
- before do
- sign_out(user)
-
- expect(Projects::Prometheus::Alerts::NotifyService)
- .to receive(:new)
- .with(project, duck_type(:permitted?))
- .and_return(notify_service)
- end
-
- it 'returns created if notification succeeds' do
- expect(notify_service).to receive(:execute).and_return(service_response)
-
- post :notify, params: project_params, session: { as: :json }
-
- expect(response).to have_gitlab_http_status(:created)
- end
-
- it 'returns unprocessable entity if notification fails' do
- expect(notify_service).to receive(:execute).and_return(
- ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
- )
-
- post :notify, params: project_params, session: { as: :json }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
-
- context 'bearer token' do
- context 'when set' do
- it 'extracts bearer token' do
- request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
-
- expect(notify_service).to receive(:execute).with('some token')
-
- post :notify, params: project_params, as: :json
- end
-
- it 'pass nil if cannot extract a non-bearer token' do
- request.headers['HTTP_AUTHORIZATION'] = 'some token'
-
- expect(notify_service).to receive(:execute).with(nil)
-
- post :notify, params: project_params, as: :json
- end
- end
-
- context 'when missing' do
- it 'passes nil' do
- expect(notify_service).to receive(:execute).with(nil)
-
- post :notify, params: project_params, as: :json
- end
- end
- end
- end
-
- def project_params(opts = {})
- opts.reverse_merge(namespace_id: project.namespace, project_id: project)
- end
-end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index 01635f2e158..9f20856fa68 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::UploadsController do
+RSpec.describe Projects::UploadsController, feature_category: :team_planning do
include WorkhorseHelpers
let(:model) { create(:project, :public) }
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index 5a3feefc1ba..0bac52c8dca 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -12,21 +12,12 @@ RSpec.describe Registrations::WelcomeController, feature_category: :system_acces
it { is_expected.to redirect_to new_user_registration_path }
end
- context 'when role or setup_for_company is not set' do
+ context 'when setup_for_company is not set' do
before do
sign_in(user)
end
it { is_expected.to render_template(:show) }
- end
-
- context 'when role is required and setup_for_company is not set' do
- before do
- user.set_role_required!
- sign_in(user)
- end
-
- it { is_expected.to render_template(:show) }
render_views
@@ -37,7 +28,7 @@ RSpec.describe Registrations::WelcomeController, feature_category: :system_acces
end
end
- context 'when role and setup_for_company is set' do
+ context 'when setup_for_company is set' do
before do
user.update!(setup_for_company: false)
sign_in(user)
@@ -46,15 +37,6 @@ RSpec.describe Registrations::WelcomeController, feature_category: :system_acces
it { is_expected.to redirect_to(dashboard_projects_path) }
end
- context 'when role is set and setup_for_company is not set' do
- before do
- user.update!(role: :software_developer)
- sign_in(user)
- end
-
- it { is_expected.to render_template(:show) }
- end
-
context 'when 2FA is required from group' do
before do
user = create(:user, require_two_factor_authentication_from_group: true)
@@ -131,12 +113,6 @@ RSpec.describe Registrations::WelcomeController, feature_category: :system_acces
expect(subject).to redirect_to(dashboard_projects_path)
end
end
-
- context 'when tasks to be done are assigned' do
- let!(:member1) { create(:group_member, user: user, tasks_to_be_done: %w[ci code]) }
-
- it { is_expected.to redirect_to(issues_dashboard_path(assignee_username: user.username)) }
- end
end
end
end
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index 88af7d1fe45..602c9c0a2ce 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Repositories::GitHttpController, feature_category: :source_code_management do
- let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:project) { create(:project_with_design, :public, :repository) }
let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository) }
let_it_be(:project_snippet) { create(:project_snippet, :public, :repository, project: project) }
@@ -177,4 +177,27 @@ RSpec.describe Repositories::GitHttpController, feature_category: :source_code_m
it_behaves_like 'handles logging git receive pack operation'
end
end
+
+ context 'when repository container is a design_management_repository' do
+ let(:container) { project.design_management_repository }
+ let(:access_checker_class) { Gitlab::GitAccessDesign }
+ let(:repository_path) { "#{container.full_path}.git" }
+ let(:params) { { repository_path: repository_path, service: 'git-upload-pack' } }
+
+ describe 'GET #info_refs' do
+ it 'calls the right access checker class with the right object' do
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+
+ access_double = double
+
+ expect(access_checker_class).to receive(:new)
+ .with(nil, container, 'http', hash_including({ repository_path: repository_path }))
+ .and_return(access_double)
+
+ allow(access_double).to receive(:check).and_return(false)
+
+ get :info_refs, params: params
+ end
+ end
+ end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 57ae1d5a1db..9771141a955 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -41,21 +41,25 @@ RSpec.describe SearchController, feature_category: :global_search do
describe 'rate limit scope' do
it 'uses current_user and search scope' do
%w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user, scope])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user, scope], users_allowlist: [])
get :show, params: { search: 'hello', scope: scope }
end
end
it 'uses just current_user when no search scope is used' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :show, params: { search: 'hello' }
end
it 'uses just current_user when search scope is abusive' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get(:show, params: { search: 'hello', scope: 'hack-the-mainframe' })
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :show, params: { search: 'hello', scope: 'blobs' * 1000 }
end
end
@@ -298,6 +302,14 @@ RSpec.describe SearchController, feature_category: :global_search do
end
end
+ it_behaves_like 'search request exceeding rate limit', :clean_gitlab_redis_cache do
+ let(:current_user) { user }
+
+ def request
+ get(:show, params: { search: 'foo@bar.com', scope: 'users' })
+ end
+ end
+
it 'increments the custom search sli apdex' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_apdex).with(
elapsed: a_kind_of(Numeric),
@@ -370,16 +382,19 @@ RSpec.describe SearchController, feature_category: :global_search do
describe 'rate limit scope' do
it 'uses current_user and search scope' do
%w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user, scope])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user, scope], users_allowlist: [])
get :count, params: { search: 'hello', scope: scope }
end
end
it 'uses just current_user when search scope is abusive' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :count, params: { search: 'hello', scope: 'hack-the-mainframe' }
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :count, params: { search: 'hello', scope: 'blobs' * 1000 }
end
end
@@ -432,6 +447,14 @@ RSpec.describe SearchController, feature_category: :global_search do
get(:count, params: { search: 'foo@bar.com', scope: 'users' })
end
end
+
+ it_behaves_like 'search request exceeding rate limit', :clean_gitlab_redis_cache do
+ let(:current_user) { user }
+
+ def request
+ get(:count, params: { search: 'foo@bar.com', scope: 'users' })
+ end
+ end
end
describe 'GET #autocomplete' do
@@ -454,16 +477,19 @@ RSpec.describe SearchController, feature_category: :global_search do
describe 'rate limit scope' do
it 'uses current_user and search scope' do
%w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user, scope])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user, scope], users_allowlist: [])
get :autocomplete, params: { term: 'hello', scope: scope }
end
end
it 'uses just current_user when search scope is abusive' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :autocomplete, params: { term: 'hello', scope: 'hack-the-mainframe' }
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit,
+ scope: [user], users_allowlist: [])
get :autocomplete, params: { term: 'hello', scope: 'blobs' * 1000 }
end
end
@@ -476,6 +502,14 @@ RSpec.describe SearchController, feature_category: :global_search do
end
end
+ it_behaves_like 'search request exceeding rate limit', :clean_gitlab_redis_cache do
+ let(:current_user) { user }
+
+ def request
+ get(:autocomplete, params: { term: 'foo@bar.com', scope: 'users' })
+ end
+ end
+
it 'can be filtered with params[:filter]' do
get :autocomplete, params: { term: 'setting', filter: 'generic' }
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/controllers/sent_notifications_controller_spec.rb b/spec/controllers/sent_notifications_controller_spec.rb
index e60cf37aad6..190c00092b6 100644
--- a/spec/controllers/sent_notifications_controller_spec.rb
+++ b/spec/controllers/sent_notifications_controller_spec.rb
@@ -299,7 +299,7 @@ RSpec.describe SentNotificationsController do
end
context 'when support bot is the notification recipient' do
- let(:sent_notification) { create(:sent_notification, project: target_project, noteable: noteable, recipient: User.support_bot) }
+ let(:sent_notification) { create(:sent_notification, project: target_project, noteable: noteable, recipient: Users::Internal.support_bot) }
it 'deletes the external author on the issue' do
expect { unsubscribe }.to change { issue.issue_email_participants.count }.by(-1)
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 8015136d1e0..8ae78c5ee35 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -19,7 +19,7 @@ RSpec.shared_examples 'content publicly cached' do
end
end
-RSpec.describe UploadsController do
+RSpec.describe UploadsController, feature_category: :groups_and_projects do
include WorkhorseHelpers
let!(:user) { create(:user, avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) }
diff --git a/spec/db/avoid_migration_name_collisions_spec.rb b/spec/db/avoid_migration_name_collisions_spec.rb
new file mode 100644
index 00000000000..f5fa3da0c81
--- /dev/null
+++ b/spec/db/avoid_migration_name_collisions_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Avoid Migration Name Collisions', feature_category: :database do
+ subject(:duplicated_migration_class_names) do
+ class_names = migration_files.map { |path| class_name_regex.match(File.read(path))[1] }
+ class_names.select { |class_name| class_names.count(class_name) > 1 }
+ end
+
+ let(:class_name_regex) { /^\s*class\s+:*([A-Z][A-Za-z0-9_]+\S+)/ }
+ let(:migration_files) { Dir['db/migrate/*.rb', 'db/post_migrate/*.rb', 'ee/elastic/migrate/*.rb'] }
+
+ it 'loads all database and search migrations without name collisions' do
+ expect(duplicated_migration_class_names).to be_empty
+ end
+end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 3c99393b14b..cfd6bbf3094 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'Database schema', feature_category: :database do
aws_roles: %w[role_external_id],
boards: %w[milestone_id iteration_id],
broadcast_messages: %w[namespace_id],
- chat_names: %w[chat_id team_id user_id integration_id],
+ chat_names: %w[chat_id team_id user_id],
chat_teams: %w[team_id],
ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
@@ -187,6 +187,44 @@ RSpec.describe 'Database schema', feature_category: :database do
expect(ignored_columns).to match_array(ignored_columns - foreign_keys)
end
end
+
+ context 'btree indexes' do
+ it 'only has existing indexes in the ignored duplicate indexes duplicate_indexes.yml' do
+ table_ignored_indexes = (ignored_indexes[table] || {}).to_a.flatten.uniq
+ indexes_by_name = indexes.map(&:name)
+ expect(indexes_by_name).to include(*table_ignored_indexes)
+ end
+
+ it 'does not have any duplicated indexes' do
+ duplicate_indexes = Database::DuplicateIndexes.new(table, indexes).duplicate_indexes
+ expect(duplicate_indexes).to be_an_instance_of Hash
+
+ table_ignored_indexes = ignored_indexes[table] || {}
+
+ # We ignore all the indexes that are explicitly ignored in duplicate_indexes.yml
+ duplicate_indexes.each do |index, matching_indexes|
+ duplicate_indexes[index] = matching_indexes.reject do |matching_index|
+ table_ignored_indexes.fetch(index.name, []).include?(matching_index.name) ||
+ table_ignored_indexes.fetch(matching_index.name, []).include?(index.name)
+ end
+
+ duplicate_indexes.delete(index) if duplicate_indexes[index].empty?
+ end
+
+ if duplicate_indexes.present?
+ btree_index = duplicate_indexes.each_key.first
+ matching_indexes = duplicate_indexes[btree_index]
+
+ error_message = <<~ERROR
+ Duplicate index: #{btree_index.name} with #{matching_indexes.map(&:name)}
+ #{btree_index.name} : #{btree_index.columns.inspect}
+ #{matching_indexes.first.name} : #{matching_indexes.first.columns.inspect}.
+ Consider dropping the indexes #{matching_indexes.map(&:name).join(', ')}
+ ERROR
+ raise error_message
+ end
+ end
+ end
end
end
end
@@ -196,23 +234,18 @@ RSpec.describe 'Database schema', feature_category: :database do
IGNORED_LIMIT_ENUMS = {
'Analytics::CycleAnalytics::Stage' => %w[start_event_identifier end_event_identifier],
'Ci::Bridge' => %w[failure_reason],
- 'Ci::Bridge::Partitioned' => %w[failure_reason],
'Ci::Build' => %w[failure_reason],
- 'Ci::Build::Partitioned' => %w[failure_reason],
'Ci::BuildMetadata' => %w[timeout_source],
'Ci::BuildTraceChunk' => %w[data_store],
'Ci::DailyReportResult' => %w[param_type],
'Ci::JobArtifact' => %w[file_type],
'Ci::Pipeline' => %w[source config_source failure_reason],
'Ci::Processable' => %w[failure_reason],
- 'Ci::Processable::Partitioned' => %w[failure_reason],
'Ci::Runner' => %w[access_level],
'Ci::Stage' => %w[status],
'Clusters::Cluster' => %w[platform_type provider_type],
'CommitStatus' => %w[failure_reason],
- 'CommitStatus::Partitioned' => %w[failure_reason],
'GenericCommitStatus' => %w[failure_reason],
- 'GenericCommitStatus::Partitioned' => %w[failure_reason],
'InternalId' => %w[usage],
'List' => %w[list_type],
'NotificationSetting' => %w[level],
@@ -244,7 +277,6 @@ RSpec.describe 'Database schema', feature_category: :database do
"ApplicationSetting" => %w[repository_storages_weighted],
"AlertManagement::Alert" => %w[payload],
"Ci::BuildMetadata" => %w[config_options config_variables],
- "Ci::BuildMetadata::Partitioned" => %w[config_options config_variables id_tokens runtime_runner_features secrets],
"ExperimentSubject" => %w[context],
"ExperimentUser" => %w[context],
"Geo::Event" => %w[payload],
@@ -409,4 +441,9 @@ RSpec.describe 'Database schema', feature_category: :database do
def ignored_jsonb_columns(model)
IGNORED_JSONB_COLUMNS.fetch(model, [])
end
+
+ def ignored_indexes
+ duplicate_indexes_file_path = "spec/support/helpers/database/duplicate_indexes.yml"
+ @ignored_indexes ||= YAML.load_file(Rails.root.join(duplicate_indexes_file_path)) || {}
+ end
end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 461a6390a33..8a65c219f5d 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -211,7 +211,7 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :experiment
application_experiment.variant(:variant1) {}
application_experiment.variant(:variant2) {}
- expect(application_experiment.assigned.name).to eq('variant2')
+ expect(application_experiment.assigned.name).to eq(:variant2)
end
end
@@ -248,7 +248,7 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :experiment
end
it "caches the variant determined by the variant resolver" do
- expect(application_experiment.assigned.name).to eq('candidate') # we should be in the experiment
+ expect(application_experiment.assigned.name).to eq(:candidate) # we should be in the experiment
application_experiment.run
@@ -263,7 +263,7 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :experiment
# the control.
stub_feature_flags(namespaced_stub: false) # simulate being not rolled out
- expect(application_experiment.assigned.name).to eq('control') # if we ask, it should be control
+ expect(application_experiment.assigned.name).to eq(:control) # if we ask, it should be control
application_experiment.run
@@ -299,29 +299,4 @@ RSpec.describe ApplicationExperiment, :experiment, feature_category: :experiment
end
end
end
-
- context "with deprecation warnings" do
- before do
- Gitlab::Experiment::Configuration.instance_variable_set(:@__dep_versions, nil) # clear the internal memoization
-
- allow(ActiveSupport::Deprecation).to receive(:new).and_call_original
- end
-
- it "doesn't warn on non dev/test environments" do
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
-
- expect { experiment(:example) { |e| e.use {} } }.not_to raise_error
- expect(ActiveSupport::Deprecation).not_to have_received(:new).with(anything, 'Gitlab::Experiment')
- end
-
- it "warns on dev and test environments" do
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(true)
-
- # This will eventually raise an ActiveSupport::Deprecation exception,
- # it's ok to change it when that happens.
- expect { experiment(:example) { |e| e.use {} } }.not_to raise_error
-
- expect(ActiveSupport::Deprecation).to have_received(:new).with(anything, 'Gitlab::Experiment')
- end
- end
end
diff --git a/spec/factories/ci/catalog/resources.rb b/spec/factories/ci/catalog/resources.rb
index 66c2e58cdd9..c663164d449 100644
--- a/spec/factories/ci/catalog/resources.rb
+++ b/spec/factories/ci/catalog/resources.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :catalog_resource, class: 'Ci::Catalog::Resource' do
+ factory :ci_catalog_resource, class: 'Ci::Catalog::Resource' do
project factory: :project
end
end
diff --git a/spec/factories/ci/catalog/resources/components.rb b/spec/factories/ci/catalog/resources/components.rb
index 3eeb2f4251a..8feecc695bc 100644
--- a/spec/factories/ci/catalog/resources/components.rb
+++ b/spec/factories/ci/catalog/resources/components.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :catalog_resource_component, class: 'Ci::Catalog::Resources::Component' do
- version factory: :catalog_resource_version
+ factory :ci_catalog_resource_component, class: 'Ci::Catalog::Resources::Component' do
+ version factory: :ci_catalog_resource_version
catalog_resource { version.catalog_resource }
project { version.project }
name { catalog_resource.name }
diff --git a/spec/factories/ci/catalog/resources/versions.rb b/spec/factories/ci/catalog/resources/versions.rb
index d5057969273..520708d9d58 100644
--- a/spec/factories/ci/catalog/resources/versions.rb
+++ b/spec/factories/ci/catalog/resources/versions.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :catalog_resource_version, class: 'Ci::Catalog::Resources::Version' do
- catalog_resource
+ factory :ci_catalog_resource_version, class: 'Ci::Catalog::Resources::Version' do
+ catalog_resource factory: :ci_catalog_resource
project { catalog_resource.project }
release { association :release, project: project }
end
diff --git a/spec/factories/ci/reports/sbom/metadatum.rb b/spec/factories/ci/reports/sbom/metadatum.rb
new file mode 100644
index 00000000000..f05ace8754f
--- /dev/null
+++ b/spec/factories/ci/reports/sbom/metadatum.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_sbom_metadata, class: '::Gitlab::Ci::Reports::Sbom::Metadata' do
+ transient do
+ vendor { generate(:name) }
+ author_name { generate(:name) }
+ end
+
+ tools do
+ [
+ {
+ vendor: vendor,
+ name: "Gemnasium",
+ version: "2.34.0"
+ }
+ ]
+ end
+ authors do
+ [
+ {
+ name: author_name,
+ email: "support@gitlab.com"
+ }
+ ]
+ end
+ properties do
+ [
+ {
+ name: "gitlab:dependency_scanning:input_file:path",
+ value: "package-lock.json"
+ },
+ {
+ name: "gitlab:dependency_scanning:package_manager:name",
+ value: "npm"
+ }
+ ]
+ end
+
+ skip_create
+
+ initialize_with { new(tools: tools, authors: authors, properties: properties) }
+ end
+end
diff --git a/spec/factories/ci/reports/sbom/reports.rb b/spec/factories/ci/reports/sbom/reports.rb
index 7a076282915..3698b0f17eb 100644
--- a/spec/factories/ci/reports/sbom/reports.rb
+++ b/spec/factories/ci/reports/sbom/reports.rb
@@ -3,6 +3,14 @@
FactoryBot.define do
factory :ci_reports_sbom_report, class: '::Gitlab::Ci::Reports::Sbom::Report' do
transient do
+ sbom_attributes do
+ {
+ bom_format: 'CycloneDX',
+ spec_version: '1.4',
+ serial_number: "urn:uuid:aec33827-20ae-40d0-ae83-18ee846364d2",
+ version: 1
+ }
+ end
num_components { 5 }
components { build_list :ci_reports_sbom_component, num_components }
source { association :ci_reports_sbom_source }
@@ -14,8 +22,18 @@ FactoryBot.define do
end
end
+ trait(:with_metadata) do
+ transient do
+ metadata { association(:ci_reports_sbom_metadata) }
+ end
+
+ after(:build) do |report, options|
+ report.metadata = options.metadata
+ end
+ end
+
after(:build) do |report, options|
- options.components.each { |component| report.add_component(component) }
+ options.components.each { |component| report.add_component(component) } if options.components
report.set_source(options.source)
end
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 3f17d4d5a97..7d044c4aa92 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -65,6 +65,18 @@ FactoryBot.define do
end
end
+ trait :group_level do
+ project { nil }
+ association :namespace, factory: :group
+ association :author, factory: :user
+ end
+
+ trait :user_namespace_level do
+ project { nil }
+ association :namespace, factory: :user_namespace
+ association :author, factory: :user
+ end
+
trait :issue do
association :work_item_type, :default, :issue
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 390db24dde8..3b37d6cf8ad 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -56,6 +56,14 @@ FactoryBot.define do
state_id { MergeRequest.available_states[:merged] }
end
+ trait :unprepared do
+ prepared_at { nil }
+ end
+
+ trait :prepared do
+ prepared_at { Time.now }
+ end
+
trait :with_merged_metrics do
merged
diff --git a/spec/factories/metrics/dashboard/annotations.rb b/spec/factories/metrics/dashboard/annotations.rb
deleted file mode 100644
index 50c9ed01fd8..00000000000
--- a/spec/factories/metrics/dashboard/annotations.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :metrics_dashboard_annotation, class: '::Metrics::Dashboard::Annotation' do
- description { "Dashbaord annoation description" }
- dashboard_path { "custom_dashbaord.yml" }
- starting_at { Time.current }
- end
-end
diff --git a/spec/factories/metrics/users_starred_dashboards.rb b/spec/factories/metrics/users_starred_dashboards.rb
deleted file mode 100644
index 06fe7735e9a..00000000000
--- a/spec/factories/metrics/users_starred_dashboards.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :metrics_users_starred_dashboard, class: '::Metrics::UsersStarredDashboard' do
- dashboard_path { "custom_dashboard.yml" }
- user
- project
- end
-end
diff --git a/spec/factories/ml/candidate_params.rb b/spec/factories/ml/candidate_params.rb
index 73cb0c54089..e3af8ab834b 100644
--- a/spec/factories/ml/candidate_params.rb
+++ b/spec/factories/ml/candidate_params.rb
@@ -4,7 +4,7 @@ FactoryBot.define do
factory :ml_candidate_params, class: '::Ml::CandidateParam' do
association :candidate, factory: :ml_candidates
- sequence(:name) { |n| "metric#{n}" }
+ sequence(:name) { |n| "params#{n}" }
sequence(:value) { |n| "value#{n}" }
end
end
diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb
index b9a2320138a..9bfb78066bd 100644
--- a/spec/factories/ml/candidates.rb
+++ b/spec/factories/ml/candidates.rb
@@ -7,16 +7,12 @@ FactoryBot.define do
experiment { association :ml_experiments, project_id: project.id }
trait :with_metrics_and_params do
- after(:create) do |candidate|
- candidate.metrics = FactoryBot.create_list(:ml_candidate_metrics, 2, candidate: candidate )
- candidate.params = FactoryBot.create_list(:ml_candidate_params, 2, candidate: candidate )
- end
+ metrics { Array.new(2) { association(:ml_candidate_metrics, candidate: instance) } }
+ params { Array.new(2) { association(:ml_candidate_params, candidate: instance) } }
end
trait :with_metadata do
- after(:create) do |candidate|
- candidate.metadata = FactoryBot.create_list(:ml_candidate_metadata, 2, candidate: candidate )
- end
+ metadata { Array.new(2) { association(:ml_candidate_metadata, candidate: instance) } }
end
trait :with_artifact do
diff --git a/spec/factories/packages/dependency_links.rb b/spec/factories/packages/dependency_links.rb
index 6470cbdc9a6..d28263efe05 100644
--- a/spec/factories/packages/dependency_links.rb
+++ b/spec/factories/packages/dependency_links.rb
@@ -6,15 +6,31 @@ FactoryBot.define do
dependency { association(:packages_dependency) }
dependency_type { :dependencies }
- trait(:with_nuget_metadatum) do
+ trait :with_nuget_metadatum do
after :build do |link|
link.nuget_metadatum = build(:nuget_dependency_link_metadatum)
end
end
- trait(:rubygems) do
+ trait :rubygems do
package { association(:rubygems_package) }
dependency { association(:packages_dependency, :rubygems) }
end
+
+ trait :dependencies do
+ dependency_type { :dependencies }
+ end
+
+ trait :dev_dependencies do
+ dependency_type { :devDependencies }
+ end
+
+ trait :bundle_dependencies do
+ dependency_type { :bundleDependencies }
+ end
+
+ trait :peer_dependencies do
+ dependency_type { :peerDependencies }
+ end
end
end
diff --git a/spec/factories/packages/nuget/symbol.rb b/spec/factories/packages/nuget/symbol.rb
new file mode 100644
index 00000000000..7ab1e026cda
--- /dev/null
+++ b/spec/factories/packages/nuget/symbol.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :nuget_symbol, class: 'Packages::Nuget::Symbol' do
+ package { association(:nuget_package) }
+ file { fixture_file_upload('spec/fixtures/packages/nuget/symbol/package.pdb') }
+ file_path { 'lib/net7.0/package.pdb' }
+ size { 100.bytes }
+ sequence(:signature) { |n| "b91a152048fc4b3883bf3cf73fbc03f#{n}FFFFFFFF" }
+ end
+end
diff --git a/spec/factories/packages/package_protection_rules.rb b/spec/factories/packages/package_protection_rules.rb
new file mode 100644
index 00000000000..3038fb847e7
--- /dev/null
+++ b/spec/factories/packages/package_protection_rules.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :package_protection_rule, class: 'Packages::Protection::Rule' do
+ project
+ package_name_pattern { '@my_scope/my_package' }
+ package_type { :npm }
+ push_protected_up_to_access_level { Gitlab::Access::DEVELOPER }
+ end
+end
diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb
index 132152bf028..caec7580e46 100644
--- a/spec/factories/packages/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -301,9 +301,9 @@ FactoryBot.define do
end
end
- factory :ml_model_package do
+ factory :ml_model_package, class: 'Packages::MlModel::Package' do
sequence(:name) { |n| "mlmodel-package-#{n}" }
- sequence(:version) { |n| "v1.0.#{n}" }
+ sequence(:version) { |n| "1.0.#{n}" }
package_type { :ml_model }
end
end
diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb
index 2ba5cbb48bf..d91037e803f 100644
--- a/spec/factories/pages_domains.rb
+++ b/spec/factories/pages_domains.rb
@@ -349,6 +349,472 @@ x6zG6WoibsbsJMj70nwseUnPTBQNDP+j61RJjC/r
end
end
+ trait :extra_long_key do
+ certificate do
+ <<~CERT
+ -----BEGIN CERTIFICATE-----
+ MIIRLzCCCRegAwIBAgIULB+G07cadoQD0Sh7NOq6jio5SaowDQYJKoZIhvcNAQEL
+ BQAwJzELMAkGA1UEBhMCZGUxGDAWBgNVBAMMD2xvY2FsaG9zdC5sb2NhbDAeFw0y
+ MzA4MTQxNjIxMzdaFw0yMzA5MTMxNjIxMzdaMCcxCzAJBgNVBAYTAmRlMRgwFgYD
+ VQQDDA9sb2NhbGhvc3QubG9jYWwwgggiMA0GCSqGSIb3DQEBAQUAA4IIDwAwgggK
+ AoIIAQDhfq6cKgjogJYFGRuokm7MAyUHwMBzkprL1wSemGquI2i1DkjzbDHSa2iR
+ qTTiNgr8NHlYXhmqn6Km7T4DNaWBqrWLsYVusGBtKKIl6EbE+dVjV/7iqn1lgUF2
+ RI77S7t6tXYKYwG1CiboUi+Dyz/eJB408KY8ruHkSkuqdMRV6XXkkytU3DRd6FKj
+ mdw8S7A0IcY8I/r8Sj81CifAuI4BkSqrh210o01RwYZVjcXiq5R+qIXbT51H6MRV
+ pSMTPRMQ2yvJ997OTR3UopZWv5WeGc0wyQSqMUBBL82wvpNeOWc5GYLLGx1uilh1
+ zWr+MnCYebaDOfP1a4GnHB2KwCY9RUVw6tAKcLxBMWbcd7JN5ijObkhk3TmPexol
+ XmkB72+5q6cytwgdj1Wc2udg746kkPwkKeOmJt1789Jaqlvn4Emez/g3N2hXO3s9
+ DJZuTY3NXesmraq9oGmlWSZNF5up2sZ4811ci1cMEl9p8GSNpTcMy98ZdXCUhrrS
+ g3fPbaK6abcRx5xhbXqzuI6QExBie+6x9aPPO7VR3ibwdk2rae24f2fnquS6sFLa
+ Oa7Spl0eFdS0nySvlMhII2kB4ZBaa1dzZYsVmJgOMKfBKsh7k1EZPOYcnKAyyiWS
+ RAhzgPIC9TULZtnEJ9RBW6th4gUvA2aa1YM8PPERW+kYXBfNsPOqKkW5mK9FI+9S
+ at/og1vQQHY2GFXy5pyQDlgX6UArdnF6grAOOwZJFhCXg0FMaMFy6FEdohNnCOZm
+ iUNk3JE+FyI50UeA6rR5J/x11+kfwmAxpo5+E7zIpIe5MTCdvCdqk7QklAVbIWK6
+ JLY/nqWj0pyhpGSRPJ0U44/ildZt3+tj5IdyqNnuwQwbLCpVYu4o6qhd2WtctfL1
+ L/fXuR3BuhzULmAzatmzJQd5+ewd1e5gH8aQsHMD0OMXJnKK1zrdj/FmbvvTfT69
+ aelyQvFORCqvTZo/b+zXKF6MRd7OblJoqeRVwjxoQWHv5n+FbfLzqUuy+XDleBTp
+ dXSdkQIK5rII23vKoo25gp6uZ99dqMI5RTUN1h0GLHwkCrIACOF3FMuAuqjugP/9
+ sIZK1fXpNnQ1qmZN17phpRDra/tYdoX3YlLYBs/1W5IIauBPJKpz/TYxa2vlisKz
+ yfvkV5CYqUz2ax2mb5bGHeyYYkbPfF6tV986GhEIZTQRBi10BM5eIU/l1WTJUUqn
+ Ld2RF2T2AiFgaavSqiBIUzj5mpVyVjeDs96yik0oCgx31OUUgqV5oSgwnUJYf/1l
+ 1Z5Yg/VhnENo1NN4HHdPWMPLK18dWvY/Ui3GAL6s/6LCLTWS9+mV1zW2IhDyvapq
+ vuWMmQdfbKKvwsD8gFQtxO09CkWa8JOjTYt7VQaISnl039Y/L3vAwy7q9sE9fbNt
+ BiNKxLeULx6TBumHLbSJPUesqKSkM3Iz10seyXD+dZX3Z5dULV8ME16/lAs5UUPt
+ g4SKGnhoyoxciWRB0YYGq8MW9RceppUkn8sG/zF4xsffvdft4KAMBWbzZKOFsO5S
+ pjKFyLBIg68cXmgyqTrWODS6HaBagiTjrKyI+EUl6riFhHjClWtGRTAmwWiif/jF
+ dav0C4GMrF3jpnfQYmz9mE4G/PgHTvb4gXaOQsHFuxUPmWjOz07Ba0mo8GR492jD
+ 3I9ffIjOA1UBA8tRMBAbBzKavQrX6qy9XKHopXC5vrB5l3zBquX8X0I9CZmjrvZt
+ vdj/7Lu+p6wU7RbFr5C3b+obFZN57qj/uf+7GfrjuZnfrxkxb0LxLAgrirUi6RkW
+ rHJ3aL+dQlGd7vzZKsLmgJv34PtproTfIeFgVq3q2nz3uKBCdBwLQRs0scKvbwSu
+ VresQWQfwoy7viMI2964pDl9KBmt8dsVYm0TGx6AYtB2XhzHnF5wgr82anEswbBp
+ n3fo7wgI9lbmwrwXpS5LgCIvOIcqGtH2izXcqt+45fqazsPDj3b3pEyT6FcqwlLC
+ T/1p9kjUJ//mg8DBTXcyWuVDdtbdGpGJHtKftU0tWr3X84k53HGaJFDdUJWuJp8v
+ 87hZMc2IsgjtJ5gBAvpW23BVZf27VFBTJBZqTt/pWMiEdfyMlZeqnUv7o6TlBEfO
+ BNl6BupT9SYQMahE9GDl/mq+QRN0x9qzncDKlQSKZsiocO3Q8eQheTkQY0TVWTUy
+ 9Wgqj845nGTJfM+w4xto8cbkB61fcKd4u1iiDWeYmnTkKOo5Ny0+47bHVrBiaumU
+ 5JsV/qs32+BZBQKIh/mRK/FE/pxXOX0ouZlM3bq3nuDCd7BwqMstI1zx2eKNAjN9
+ G22ZFs9RteI0JbHndJvGIv72Zo7ERGM7+D6rGDARuolPKcgdKWiBH1bTiGv9WSxo
+ fer6hCPNkGl4Z1Upa/pe85P1DL0Yz4mVJteFd+Tn+oJwEyP8XJp6jQj5kYgMDAuG
+ sGq3STyLwnDPe6R+dkCxmQ6kAuZFBNEuduWFZfQJJGxib1vCjnfAsMK0BqW2jFF7
+ cID0A0upiDjgcjloJ+FYF2VLCXUE6dmtgujlNLhyMWcyKDNzKlNsMVB7swkPLnOK
+ 6MFVYQ9dXI72IpI73LKXoREsOcEyItS2pvDhu9TfGcQLBkVTYWllsuhpmsg9xwYJ
+ ajf4ewKP0Yxa6XbkDlxNtyFbRIu8m6AhoRo3sPBPUIb02Dri4qBId7RVBRe+B8M9
+ NuNE5o88QHA21R2u7S7cr67Zw26HSNGEN+9HGY4Xpy66ijW84wIDAQABo1MwUTAd
+ BgNVHQ4EFgQULhLmAennkk5+BcYJY1cU6OYXFokwHwYDVR0jBBgwFoAULhLmAenn
+ kk5+BcYJY1cU6OYXFokwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+ CAEAphitB4iOhDwsKy21Ut2c3C0/gg/rlyyUhoD1H2BAYdJTlRFdCpoH0F2dXFOh
+ rFfh4U3G8sRYm/TwhP9lJ4/TCdYH7WQBIU0dvMectYd2KWyYkNb8eBh4fC1gLZ5I
+ 5zxAigc242Pjft9NsTgcbDI8+xjtSXc0cwaZTD5kxZyQm1BnONoZvF0/s7dsv+n0
+ kU1tB9n0PlcAphQTq311Vk99HW1SqrA9njQftJr/tbOy2nyHqFjhQxAqr9/CYE3t
+ 6v4itH04n3eHgYDlmi0MqrFtqGobLhRp+zVAVxy7o9+nh0Z9wUikknPapV1GH1Vs
+ TO8fhr9eunXsTHQVP2EK77tJK7pNdfBvwHOq595iCbSFp0TF5sG4zCGZReB0TvtS
+ nROSKwq+jsV4xSGlTnqbf4EIoD4HdrWw7BvLOlz55oPK+Og/w4X0DwR6zx3rCAT8
+ nrDm5ekNBTVlAhbD2g284mZ0G5F+c3lnhbju66RoHvrzyEQ+6avdhUB9BTKzZ7Pj
+ CkRLsTlXtOnO5E5/rX1+mKXRwCNmPdK4vYfPucl/vgdlcpvzJunrQMPlvfM2UKeg
+ z2yyT0rW1sK9IcvlqwApWwPuS1mC1o3WrwzZo0qgDwQSzjQh6UkQPSBTmkM9Fv4I
+ AzGQoWAcXw/QbRpvjEm8H3U60B0fCwPyG7Z7eGjf4am2RYs+viRT/4ewJ22i9imx
+ E5jxIpMLeb0CITI594sRQXbLEsISq9Hvrt4kr5lSfZ9IRRv+AiYjjHQVSMRo641j
+ JExXZRuQgagL6Pg4wElyRR1tAsy1aDdenV0hLJ7eSaQf3Z9Bs4honDv97UUVLbQW
+ hebIGnJEPY4w15hUTKzp6eIz+V2rpG2Jcf2G5UXRWnw+Gai3CyBMJ7NnqvIis8qP
+ OUbGYrbqRWOmrjphOgaX9hvLD7nbK1wXMFE3V7cP5py594qN8vg8EGSpsCATXAk3
+ A8aSW/Kl1jVCn1WVzez+E9bbcTf06eUc1M0X5HI8NH7uhw1ECn/jYA9Q5UJ32SNK
+ 4G14vsPtbtG99nAKy3fbA1Qn3MOu5anA2rl0NNICNEc89Q6fMeyFV8ctwyF7qqYq
+ M439B6R8jzz0ESLZdp7r9f8Ve3TlBvs+42knRBkjUqfNHSf06/wG+AUOmNiGF8jt
+ O2e4mXxLotqIAT5OpNpZIQyZY1Sr4uvp3zsvKOnHU3GBwsB4nhHqRzpRqkK6DIaw
+ TnOC/dOKzWpUy9iNzEGCNaJVWkQBCaFMJb968h2cZQzpj15XhAVlKfhh2KHCoDGt
+ WnEPgchVzBQwvhZra3gP7ynkGxSRYYPzLWt7b6oZSMB/JWyU+2fSqRPAXvUue7Ns
+ peHKPuGETVMR8jTuUghQvQDyTpoH9GZzyNQ2CUOfgAoA5cc9XuI+KGcsQuWqQvLv
+ zpxeHM8d1+vAu8WjnTs0E0MZk7Vi+N5DuhsTT7kP9fyz+rQzgQ7+bcanOgBABIcc
+ dsbTdfJApeFwN874s020M11Q+RwsXm40xDZHTYWe+r2Yq/+kd2N0rM40TS/Zv6KA
+ /1Ag5XC7dq3Uqp0Vk06LzZ7qP8gNiN813/qvN2PW/phq4A/OFtCnEGfom4I2MimB
+ SEpYuPTgiRo//y1tqq5D8994J546LdQ2Y1VmlK/7CYHZN7Sq7yoA0gVZh31QC7Wh
+ huX/bjhpmmehMbE8f0//6jEqvJxA6qV8f2XJNGa8ctE0Tf7kwku0JSPSqW4hMdml
+ udZivHIpgANbYgTVeVR4LFnkZO2tVeNXDj+jaQJ8piyZ4V2HVASTWM/0JieUuQU/
+ btvNFQ0iMRrjVKjgK9OZxo3f5O/CNTrKYQDPMyyJEvs2+oZHwj7T7srlgEm0RmX0
+ PkXsR2kmCnjSdgIrwbW2FB+QS2U/N0jUQfFxfv8s0zor9RyL3PuHjtfYPbr8v0wS
+ 5b81CafxumhOw93DWhwZoyK+IcYrZ904tqmJFraaX9odJ1AzZS+vf1AzlwiVTT1+
+ xN0Thf/WnXzQb9JGQA0Ix005ekZbxbjOa1jQ8kIuZ4qQe+/figpTf8AusvxnwG7N
+ T2iP8qVnd/ovxIDrPJ+nssiolQwDK7ocSk6ZCQLYj3fSCSgGmobY8XFlFrMfW+oM
+ TQG4vWvcLuIrrTcnp5aIl4XyvVuvkJoYZD7AXBng5CoGA1AaJEK1He5i6+OeNHIJ
+ HvNMsUPmHaOdYa2iVU1aJ4DUbO8zydXPPNtI6hMvnvqp5oq7beNX8hXkIPIhO1y3
+ Nc09nzD4nLCHH96GVpCwvWuvlbLGK2fHdj/bP0PVRn1ql4O3AbXD0tEY4nacq7Ex
+ AS5oPtHdosrQrTv0ZG2D+H5x7u02f0hraMubSyjruZ+TA9phgQjXm+D+JrcCrDr/
+ oE1L1uBKnQOT/8AsYh2t2JhuV7Ry0cg6Jt+AQAmLCzaBpxIGYWWNIbjDifn9lZi+
+ lZW9Ny+sWVNa4VzB8V9V9rXGWqDnNag1j87JTmS3NTqsECiaP4QJML/A8zjoI7e8
+ QFwKPChCfZHhKA/yAcY7GX7Gwj/ljMrS6ZvYirH0dI+v00rQ7LFA9REplCVLxpBT
+ iboycHkVNdni8H4xqiMpBYw83bX5B5syLS744+QX2kUkhIO8ILSiOJ+gutbDRDi8
+ Vmi1NgacnawjwRBzfKZd3r2JCZ47n3o9j8kbxQlgdOtY8PmttzQH8jUk22rjyWJs
+ O+Y4I/T3OE9g24Ei+b4kwgBFXaoajzWj+/xKOI+Oy+EUPg0=
+ -----END CERTIFICATE-----
+ CERT
+ end
+
+ key do
+ <<~KEY
+ -----BEGIN PRIVATE KEY-----
+ MIIkRAIBADANBgkqhkiG9w0BAQEFAASCJC4wgiQqAgEAAoIIAQDhfq6cKgjogJYF
+ GRuokm7MAyUHwMBzkprL1wSemGquI2i1DkjzbDHSa2iRqTTiNgr8NHlYXhmqn6Km
+ 7T4DNaWBqrWLsYVusGBtKKIl6EbE+dVjV/7iqn1lgUF2RI77S7t6tXYKYwG1Cibo
+ Ui+Dyz/eJB408KY8ruHkSkuqdMRV6XXkkytU3DRd6FKjmdw8S7A0IcY8I/r8Sj81
+ CifAuI4BkSqrh210o01RwYZVjcXiq5R+qIXbT51H6MRVpSMTPRMQ2yvJ997OTR3U
+ opZWv5WeGc0wyQSqMUBBL82wvpNeOWc5GYLLGx1uilh1zWr+MnCYebaDOfP1a4Gn
+ HB2KwCY9RUVw6tAKcLxBMWbcd7JN5ijObkhk3TmPexolXmkB72+5q6cytwgdj1Wc
+ 2udg746kkPwkKeOmJt1789Jaqlvn4Emez/g3N2hXO3s9DJZuTY3NXesmraq9oGml
+ WSZNF5up2sZ4811ci1cMEl9p8GSNpTcMy98ZdXCUhrrSg3fPbaK6abcRx5xhbXqz
+ uI6QExBie+6x9aPPO7VR3ibwdk2rae24f2fnquS6sFLaOa7Spl0eFdS0nySvlMhI
+ I2kB4ZBaa1dzZYsVmJgOMKfBKsh7k1EZPOYcnKAyyiWSRAhzgPIC9TULZtnEJ9RB
+ W6th4gUvA2aa1YM8PPERW+kYXBfNsPOqKkW5mK9FI+9Sat/og1vQQHY2GFXy5pyQ
+ DlgX6UArdnF6grAOOwZJFhCXg0FMaMFy6FEdohNnCOZmiUNk3JE+FyI50UeA6rR5
+ J/x11+kfwmAxpo5+E7zIpIe5MTCdvCdqk7QklAVbIWK6JLY/nqWj0pyhpGSRPJ0U
+ 44/ildZt3+tj5IdyqNnuwQwbLCpVYu4o6qhd2WtctfL1L/fXuR3BuhzULmAzatmz
+ JQd5+ewd1e5gH8aQsHMD0OMXJnKK1zrdj/FmbvvTfT69aelyQvFORCqvTZo/b+zX
+ KF6MRd7OblJoqeRVwjxoQWHv5n+FbfLzqUuy+XDleBTpdXSdkQIK5rII23vKoo25
+ gp6uZ99dqMI5RTUN1h0GLHwkCrIACOF3FMuAuqjugP/9sIZK1fXpNnQ1qmZN17ph
+ pRDra/tYdoX3YlLYBs/1W5IIauBPJKpz/TYxa2vlisKzyfvkV5CYqUz2ax2mb5bG
+ HeyYYkbPfF6tV986GhEIZTQRBi10BM5eIU/l1WTJUUqnLd2RF2T2AiFgaavSqiBI
+ Uzj5mpVyVjeDs96yik0oCgx31OUUgqV5oSgwnUJYf/1l1Z5Yg/VhnENo1NN4HHdP
+ WMPLK18dWvY/Ui3GAL6s/6LCLTWS9+mV1zW2IhDyvapqvuWMmQdfbKKvwsD8gFQt
+ xO09CkWa8JOjTYt7VQaISnl039Y/L3vAwy7q9sE9fbNtBiNKxLeULx6TBumHLbSJ
+ PUesqKSkM3Iz10seyXD+dZX3Z5dULV8ME16/lAs5UUPtg4SKGnhoyoxciWRB0YYG
+ q8MW9RceppUkn8sG/zF4xsffvdft4KAMBWbzZKOFsO5SpjKFyLBIg68cXmgyqTrW
+ ODS6HaBagiTjrKyI+EUl6riFhHjClWtGRTAmwWiif/jFdav0C4GMrF3jpnfQYmz9
+ mE4G/PgHTvb4gXaOQsHFuxUPmWjOz07Ba0mo8GR492jD3I9ffIjOA1UBA8tRMBAb
+ BzKavQrX6qy9XKHopXC5vrB5l3zBquX8X0I9CZmjrvZtvdj/7Lu+p6wU7RbFr5C3
+ b+obFZN57qj/uf+7GfrjuZnfrxkxb0LxLAgrirUi6RkWrHJ3aL+dQlGd7vzZKsLm
+ gJv34PtproTfIeFgVq3q2nz3uKBCdBwLQRs0scKvbwSuVresQWQfwoy7viMI2964
+ pDl9KBmt8dsVYm0TGx6AYtB2XhzHnF5wgr82anEswbBpn3fo7wgI9lbmwrwXpS5L
+ gCIvOIcqGtH2izXcqt+45fqazsPDj3b3pEyT6FcqwlLCT/1p9kjUJ//mg8DBTXcy
+ WuVDdtbdGpGJHtKftU0tWr3X84k53HGaJFDdUJWuJp8v87hZMc2IsgjtJ5gBAvpW
+ 23BVZf27VFBTJBZqTt/pWMiEdfyMlZeqnUv7o6TlBEfOBNl6BupT9SYQMahE9GDl
+ /mq+QRN0x9qzncDKlQSKZsiocO3Q8eQheTkQY0TVWTUy9Wgqj845nGTJfM+w4xto
+ 8cbkB61fcKd4u1iiDWeYmnTkKOo5Ny0+47bHVrBiaumU5JsV/qs32+BZBQKIh/mR
+ K/FE/pxXOX0ouZlM3bq3nuDCd7BwqMstI1zx2eKNAjN9G22ZFs9RteI0JbHndJvG
+ Iv72Zo7ERGM7+D6rGDARuolPKcgdKWiBH1bTiGv9WSxofer6hCPNkGl4Z1Upa/pe
+ 85P1DL0Yz4mVJteFd+Tn+oJwEyP8XJp6jQj5kYgMDAuGsGq3STyLwnDPe6R+dkCx
+ mQ6kAuZFBNEuduWFZfQJJGxib1vCjnfAsMK0BqW2jFF7cID0A0upiDjgcjloJ+FY
+ F2VLCXUE6dmtgujlNLhyMWcyKDNzKlNsMVB7swkPLnOK6MFVYQ9dXI72IpI73LKX
+ oREsOcEyItS2pvDhu9TfGcQLBkVTYWllsuhpmsg9xwYJajf4ewKP0Yxa6XbkDlxN
+ tyFbRIu8m6AhoRo3sPBPUIb02Dri4qBId7RVBRe+B8M9NuNE5o88QHA21R2u7S7c
+ r67Zw26HSNGEN+9HGY4Xpy66ijW84wIDAQABAoIIAQDcVXF+TCB6NrLf9mGtPLAg
+ jm4PfktOYpD43ne4FAwhbZ3xVCz6Fd000xjRQ3nWE6J2PzvWmdQQgX1oCGbQsgmv
+ gsNz5RkRSCxgXRTbX3RPIiNct+3pQ1fV6A+z5VekuqJNS6Q0j/tqD6pm1W9yIxac
+ E8SkTATTRLqa2/HFc+UoYT9+AkOT3rsYi1q8Wyn0jKx2tA3EVA/5lv7d77daO7se
+ Ut9TzbepAawaV7PQQwB59NfbTwXEfq2bRxkY6ow0Tzgi/1VxOs8t2/JrBBdMWlVy
+ r5lssu7o8cjsKS6eJglPR13SUFgZ57vBeFLpgLer/FNC2aL55JW5V7vPMsy29/wl
+ YFty8y4nFXMNbJ0qjZbfQSbcVqxMSlHlHg81NmP6rSAJV22/Q1MdtyGba9YsRMen
+ i7ekCn5TqqQ+asc/Kjk1gFXPZT0PjwdYPVm1FGilDQijA8My/vzX3zd7hnnDWG8U
+ 8B2Ar6OpOsnqlMVAedF3ClmZGlg7wyInLuK7shROzbz00zk7mUT3egcsNwiuRMJ8
+ yMY6g1/1rU0F2sFHswE/nfjXjz5TAwwOUx4R980YLdDNBd3aQ6qQGhv9SQRg/yuS
+ /lHsAut9RaZGL0qrmAdfoFndBEGA8ZYjKpy9p9ZuLi/LrhePtYbRgW2IE2+J7FTO
+ VE9cuYZLROz03k8MK2hi5yWgPz/0Evon3+4IJT/2LOx4t5QKVYseFjIjHLD9ZD/8
+ d/Z4E9y9evUwUuwRcAJNDAsCIXipMOYuhmbDCBqfIlqVRft+bTyl/jAsNmMcLsWu
+ 77oYqbuP++86SnIIBcWQSvpkzEB4gV4eZqfWZOrjjTwisDe2RjCyLXz7nUPJzklB
+ AUw7RmEHK3APN/iBUI1o84rs1iV/1mNuqqbk52MQGeS2mAl1Vn9Pnndr8aG1kPwj
+ RxduO35FgPRRZTmQNFQ10ArH1c+2HHnadAXrBONDb5/jrv3aX0R5+f59WgfQnrEQ
+ GoJRnLftCCcIY+KzjBFMqlt7tQ+vqMakocolOEyjbb6GMlcCCpySKnW7L6OnnP2H
+ wc9OMI6fn3iqwKroeL7nA8ZzGhGjDkDlE42PMH53/0sS/s9cZM0kAMgwgx7eOpvV
+ G7LZP+zdAwMOptQxf2UAUD5xqZjbfzBlkUmgbZvAycMTOFJocc/+AglcOn8lgtnY
+ AZltXXBUkIXWIzVV8ShWth+DoJ82X2XkxJbidhGKpUZUj05Xq8llxjBXG2KPmnmu
+ yAnkmcvfvv2XQwJd0NuqR+Iyz8K6hd7/JMjQSYQ4z2/kWdEQTOz47y+xi3V9Pzro
+ LypwQvdRAwdNeVhqzcwMeEt4y1nDRtQyrBspxK/9ysWGe1sXzH/P+gDG3CZHv5Kz
+ 9b82pNI8mGuSSzZqKtZtHnysb6bt/h16EnLNZQ3+SxGa6nA8Vk90Un+qPDHye8lh
+ FC0Pdkp1bqWY1Gok+xSWm0jeT+1PKcGNaePxV4/3+NL5AR3hhPRaVg0pbiuFXz71
+ JgWKGkj7eHFxQtineq/F1OxsdYzB66xBNI9f7gB3KCFyVkE5PVebNV82fV080IQ0
+ Cy5yQ6XMU+lxuEFas9ZVb2Z9/mkXpQBOFY+9p9nP0wHdwmS/SeH5TuJnquBh31kt
+ FkDnAQyVAHcLPdKihWnSsNPlFDeY/Vb2bTA8ppPGXZuj8oh+CSDA2P8u3BeUeEcj
+ U3gjD6ottWIpOcqXNHwql9asLuVERGUDd7K9ALsK3DzjRsDFwQMSFaFCtG4v1ZZa
+ WtURi8RmZBUP47///9DkwV46/m0TTP5ax9vxjJLkiZOGlQes98KQ1Oyl9b37cds5
+ gb3SbXz65VWYW3/4+h3GecZ6xynfsfavD0d0fBAJN48YaKb3k/9p6qfw3cxr/A4I
+ Y75m9Au9OXZdgncCT19kpC2uXlXu9XlGXcv/VkNt1p0jMta6A1n6kJvyG/hrjUpH
+ WSYibXEW/qod7za4oYBv+3bjNSSaBqjx5n/1U8lj6xp36ilHhHQ7QkIH3O4RqkPj
+ 5oXKezm2CUqxYJ7Lo84f7cLbHXcG9Qp+wmCy97E/NPlK+8yQZyz0e6i8KtxEocsQ
+ u/xqHlCSi+JCR8vNA0ndsWJzI2/dDF/7Q0pcOvIxNARSmyHADabRBen+FPUEE0AN
+ KWaspqrf78hAqLbCXskUZf0T9S9DDMDHQj9bZs0H48OvCVFFb5TzBndG2kJIOZSg
+ OnzOOELcZBPY/rv9g26Q8CrhGnh4GoEEbNP8imPJ8Kmpd1Vk5auVYD9qfygm21Bw
+ uxj1/O0cdnZfT16X9h0JnwjP9MZ4tlRH4t77uswHuaStRTe/dn/kA9Tn0cox4z3q
+ 6txM4uakB+st4LgMhnDTGvMMD+I3TSaReexr82hOZJUwc95VzW4lWrIEB4Yv69uA
+ Lt/5kbJUY44kAgdORPHLVsFhngHilGyD/3m/XyHUxRiNdj6/CDANtWwYN9089Jky
+ gauvpCOT6D2SJVCx/AZIWmDwNAorXVNC6Rl3aySzIK81aqYejm3D6S774ciOG1HC
+ lbmVPtLr1kh/ZbM+VrnpzIu+svm4hFZA6dhAYnx+50ZJUISX4a9i/vkTNBK0puwz
+ yj64bwKxdoRnT7tzJAmz5pMRn6K69ur2mEvW/4KolJeZp3V8YYmPZPjYMLPdeJDc
+ Lf/t6Ff1WoERTpe6IepnPlKxuSgpn9JDzZ+V3hVnQdu4kHjSheZMXei3Bv0fb/eA
+ /DR1vGi6zCGGyGh7lSaWCQKCBAEA+aiesAPcwyZyjGn7u1c1qBOUtUex+EroyO+D
+ JXgKIUbNvKxNcD7+vWD7mq+uDHEsYeKwcK2ulV5t6pD4HYaHROg+qtaGm9jK1Tbu
+ O9zPyQpOOEG/tqHxasO+fuDiQOO22xwy2+oc1BkpPn+q4yDLrGtV60k5mIEFusdB
+ fDYLQIKBRa5mIrXOJ4+DMumAqXAHeB9rjlll4AyQLHt0n3oRok1GXuL5tQJvt1E2
+ HsU1HPMcwM/cX3xuFDMRNJ0PP9GGFmXGHIrWGqYbsnIIgZHUjbSShAzsk0xK2OKQ
+ S10CXP4VFkB2C94WYu6f4aVKk1ia9DYdDLvzVG5pvx8gmolccCk59cquE0SVZXoq
+ 607tHXspYgCBhJZShrvLvxX8hxbstsoVa4E7AkjWQUMbD/5Kpt1V22F2EHWLIty1
+ UXSTdlNGTntfoS4PEkxQg32kF17x02fpWpvR1gINrZbpVUgWaiISs7XbAYsTukwV
+ BlUka1/yZMrZzB5q8GN2RXyT0bV1U5P1SqiF3M2Y5ffzVP/OTW/+bahnICvIeDlm
+ aXary2SCLFtpQ0UBbmGV35JtgjxPV3LjARYiNv1kpozKD4goX/q45JMmefHIf0NK
+ m90Jbk8ezxyMIYanrQ6rNLK6nUm68+mJdR+okNXuosCWaWG7uAl8yZ+Aw6yY8aRa
+ fBhX5HHY328gCbCPwf6W1gkfmdgTNIq339sIngyNlQAxQwW3DRf38P9U3PniZX6z
+ wqNNqGaE8+9OjHyJzSXCFiYHFuerfVYYmkm3zvuPaya+CLd+xqj2jV4P+GUW/3vJ
+ UNtNM4nUITgws2hNQS+oWEbcFiI7Z3M7JIAI1P2BsWvB6YCP63+p4Ij5Mij7IQso
+ 2B6j5/dohXKzQvSFt+r94bSjYfMGE5F3TGWERwmubGifap0hsEuxLdECdzC8yo6W
+ fie4V7AW+ssBRhyHf/FKUwix358xECRBYa/hm7gLMtGYU/Pt5wuK1APJGVbTizaC
+ y9LSEDkOTqGu/S8waJJkdgkethbMj7Z2uhpBRxlRKWr9I2GvUn+9kYnfuIINB9Iz
+ mMnUeK6L9mhu/+e2+8VSfr3VHASd9+9m4cVWqSEjPxgg91dHl5LkPvnQyj6gu7UC
+ IdIUziFEeEBp+7t7bqhyq+9ofxachn502tWeuureyzscoFrbfKlgb2yXg0K9TQLu
+ UFXGboRE9xS8xI6svAwKgWrjdbUlg+MLTVVLK5oA5srGLuzYc2bqu5KIL2ViWaeI
+ pnvYDWBIBtyqvbYUbb88DFTl35rcufpwXE9SBvWk8FO5pEvtncYDfU8bhdGg3esJ
+ s4uTg2BBb97kViYttyR0YWnWh2B63M+rqb4Kt0v2dGalUmHoTwKCBAEA5zjwfagb
+ Yg7tT4eN5MAvCRzeMKVKSKhPKmXVJWtul3SzcypUj8uB4wIU2HQwRUB6IIW7fbKt
+ 2vfJRCYKYMUtaavltQR1Vp6at5xkswIEeNfxe5vpOk7EiFuE+Im/MVnjvkTyKmYi
+ emO0Sv5reTHaQ0KbE+cgoNO85SHS+XXxm9VCdMbhGiYbhVNPzeIKX8Kwg/wiAP1N
+ hO1WgT/fXtD5/xtGjh6/IKruJ/CtQXvecT7+/QbT7rQ2Xx0YBu2/5W9XRobzIo6K
+ gLyUkRIIvnVC+sfnuFBMtmMT6e2P9FqMe3f9sIyqyJlQdjaBZeGM2S4sDNMZQEAX
+ uOjUK4sG+7KwQxVdNlOolrn1QTKGXIsbRH9cWbn3KM6qC4LkCEthoHB8oY9aC/Lm
+ gux4kJ4KM7cxXuHIDfcp2Zg8Cd2pUuH3U21HSpPkzEXEK0U7G5O8E24KpKfBUco9
+ CE+bReo1OKRgqpVyYiFbtC5xAbwIqd7+WFkEK8rwwMIwB/Yqhl+10T2KsA5vcvz/
+ fhjH4voEc4VWZGjjfyqkHhFTlJFpqaWchBI1EHhNezpIQj86eIGvzbgzZfGHg6I1
+ B+HQo3bqLvO0sO5XKJ/bid1Alm/RC4RfWIqSk6lNzM9yaMiHJ0LUrTf9bjpk30+y
+ nz5wbLpkxNZYP6LE7Zhm+um/TBCMRJ4eW0Oc5rcIEl+Q+h+dMeI6o26Nb3H7gQSf
+ L3L+VqxboT2tXxUKoGwadMqNrsoxHb87xX5E8l10DGdWgSer6bH44bVba7kz8Zny
+ XqNad8ZpBAUzXMbNmzBxe+q1jCcUI7simu7CO0UZgkeJhFY4tUFq07B63YRmP90y
+ fvoGp0MC5m+UlawIrRgq3oNOXhl4b6S6ow4JiDAndGz51KGRQLxZcqP3yBmjXwOb
+ GL2zlnGhoIe56qTuPBibmk50NLjsQIDo4heYG/h1MKa183yo1uwBxJfNFmcDQh/d
+ C27wiAF5fiZvU7yJ7d2EFgvyNOrcMaCq+1Yb4Au3gYTe5qpRNcl1kwFI+1DO87e6
+ rryA67YBDatghqhVuXwRgNqgpq3UcCDkczQPm9Hb+u9/kOz8vF9EwSBthivKpTxL
+ TD0u26vdJ+syiyzTPvHShgyyT5u5Zl1sAjzaCQte+WgsAtojgTrrYUDKDb/IhhQK
+ SzRit+0m/yXo4bAgHFklRdOs1OcIyTA9NzHke7JsCGgErqDTbCI5xqVp7dxAQcwJ
+ ybaX1tkRar66tS7DDsKw9PeHdW9LzUNCm4TGyT0SMbOz8PdVCb1TnbkHIJclLjz2
+ 4llQyPGEsqNWUF7FzNuybWSFl25/EhlT3lMMmH2T4yRA+K4Hc298gxofKgsnHEzh
+ CosyHu+csHDpLQKCBAEAqfQ2AtC+SkM0G45Shdf6eO7LfxTNfJ9SFOenuawcCUcv
+ 607IcK8Rr04EOet6apHoisJNJoe1n41m+hWyMjdQgoIvlxDvFczhV4BLcYkCEnPn
+ h7iKkANyWyHh3nGs1EuwQTzTCo43DdQLFbbHWFMNE9UF6mQwxzad9eaLF8mao1G0
+ OwFcGij1rEywHcqDgdT34LhS+da12W3z/7QTUjVBJ+G/E/0jzCtabcrlMtFBNPHz
+ EvbtqDsGnM2e2thId0NlKn4h/XAuDHojxLiIPdxOfCD+1NIPgr6e/UJOxF8Oqst1
+ A27ibXXEe5jCUlO5jtD0u2bTI8YXAdUgO7Eu+sSjnt8Ry9cr5YX8xdYCvak/FaCw
+ LTz27pF+oKXbL7wB6tyaTF0Jc+PHjeiTol3SYHLV0v494lhYjR/XleX1sPvRHu3V
+ oLuwAANg0y4MaVbwi9Bgg2/rlXkZwbwoH5HqSdoHGD0Vyiz0Z/qLdXkxntv7LPVm
+ B2NoHOJgHkE3VFpYLpx+wGSqySYr6oIzoenHRofVozWoWHIZsfbcQ6ufog/dJ1rG
+ mvenktm4/bGE22vNDKmNwZQ+IJE2vYSGLjMNosEn6x69Gy1pNf54ZNokQjKYpvVJ
+ nehrJK+MGe0wc3FwRH7avAyxPIBOujpId5bvTdHwfnpG7uKcP5iRjX468tuHicZO
+ wtvdTXtagc+UUyRm1M4ZVN1SCxwKo70b7ODyqBON52X3raHD3aJmkn1MViXhSdBw
+ lbbFHDHzhSo9E+LTVK5lOa+QlAe3DzqFCYaYO0rfDNIc9WOhL5FxtH5KL7b6uSkM
+ tYiQ7rEEVmnhCidCz/aBxgzVqCVY3dWtomAe45xXXRPNS0MzkQgA3R/BsE47ekAc
+ cSwCCIR5OxjHuAzGZHmSG2QdeG5rPAjFKpuWWneZZXBBr1Tnfsg43RNwM3VKsrb3
+ DceAmH/3ZguWcywqGnc+aSSlNaELznvdc7znG8+klnJvEaF6FrvaypxTMfnUcqLE
+ sJa0jzq+k5GEvi27MG4Y14R5EnupEIOVksJ4jMuFFH5NSHQ5TluKD1bzNQHAmF8K
+ fLXfSmotUPulCw6jsq0Z9JyOxwcV1ZDvc5Yzau2JmQ+wPYbGsccsmFvClc9zxlcz
+ S0FeZLXecxhM5+rUkh+McqpHVmmx4sDc5jDZbfgsDpMnSPL9uaeHQpPKM/oQWU/F
+ uwXs80nFIUZ5KFzhd1HXtg6rtPtpbscp8fL8MxmcyAK5rPM1rj4wU6QPDHamP4TZ
+ w4IY4YjAI23ZrPNmgW/k7t4j+1MsHfy/SbNVXxkpKwyPd5CQxepMvoWwVv+fbgHq
+ ygNMIbFf0ZsJdv8bwZDWUtc0nxr2JI2buuXdiVWJVQKCBAEA1tsADaOCHnJEbdxG
+ K8OxcURT6twM1MshFQKfNzBHCZG1llRFU4EFZs3uVNxSZmdtlH7wI/M+vfP2H89B
+ YX6XnlPPFY/ZAO5MUkWPBQ/g2/G9QOE1raq30QVJ4DEPampex9UFOgTCEPxI8k7L
+ y0hZypo/xBTHKurV4gy2IHxKUEWwhRaw4T174T3zMBrVDPq6T0qgxk6aE+T+tweF
+ JnQFedn8i99iNpbeylpIhEr3/j9Nbg1ELdFjnKpKQ1X1NNtrO+v2TawqY0nYu50I
+ ZwJLhQDw/0IOpoQWYw8O7z6cv7ZWFBICOHjOXap0PxmBaeYPpLMcCaoE4Rvo27VK
+ feQjCZL2lJ7UT4rorPaoB6Jzagj25aF6W37+X8f24QY653zfMrkkMWo6bHoT5j4U
+ uM2HoOUoomGDj+B4GarRxmSXD/zBfDlFJ9PEX3jrXcq/v0ZHuYzwhHHqmKhwXl0t
+ qz6DXL+WFD1vG1T0SWpSmpbNvYap64+ee192hk9mYIrbRl1rXAFt6mnRd3jLdMxi
+ Cn5iMteMXgRfkFkFU05z4uIzOD469Nz1Eoar0nMyf/vyQrThfd8bz2OQ54wb9Wlw
+ XsSyqJ4we11gARGJDMFGfO86MepCHdf6pVA2vctoW0EsovEeG6lDRoamMncwvLfP
+ H2EVi7xSRX2SY6GE0selr7VF/AQt7e0yIPCQpPtvdIUFfAwkfORrkg2bZdnzINL0
+ KjZHvcytnTgWtWPql/rl/QBQKEoXAyd3yHbV2RnmEzf/TqzZEJZ+AAjPQMWGMTo7
+ JzM18QYC1CwFp+IHZP6DJlij5VfrQGwLMhYLYN9FvpfVDnQ1F1YKNVnzrC3ktNP+
+ A+a3KQU84qtMWouk7Ke6U/O8QfuvO8+TOgpxc/XWJVNfwrk+a7/3ITkWi7zq/ecF
+ C0hTqAguH8W2AYLZVIxpa97diAnonEUZkGW5OVIjCeMwGV/9gM2kJ3O4UQF7nMXS
+ ATjxxduyR0fJjzr2i9mZVrw3ZWk0adI5aK7w+WJWKCbVjA5rpKwIQkv9upULLvxm
+ qi8PeNE/JyZ0lUmSco+gkbjez3YW8vHk+Z5G6YJtrxTPrK3XWA+lNDl8tpE704A1
+ 9vwEcXLrsNfAijOOFY9cjhRNYx7sc+8PB66XBudwiosXYb10g6YsTPqePheli8dg
+ r0Kozd59WBo2GlaBiSxN67VZjMpdx9uZq44Mm8Bx9U8wZLgcYJyDUSCqD7gOC+SU
+ 3J3ynJ2hPzwGdvrz8lnDFC9l22Fb3m9TUr/rewQ5Dt3QrwTZ7JzGPdsEhnv8J1zV
+ s7E3aWNHZf7YI/J+eKKCjWzfk/2T/LbkDvMHNI1x+wAjsSc6wjSu2QtPKh8CKeD5
+ trKU2QKCBAA3IgjiRuX5dCkgoq6T+nqCMy5/vrbMRMCMLfGsOwk9wNQk/NtsuCoP
+ fyFLWPD3RwWv17904oFWiSA+kfCQ8QAlLHJuweCUybbklh+RN8PYHs/llxt0G7Uf
+ sk1VV9XlWINE9x5uxoOQq8+aMFDvWjbq7H1kbJkVWSYJjQgjqk1Afp02FIQ6kWKJ
+ CBKcMT/AvONsFjjyBTUtCGop3ftytEFedw3zlqhn9y6HITTvmjSyZBoPZox+vacI
+ DDArlo5EC/BZf0zpaIgDJccGLJI/eOPqX4VqhX+EWJErVdX0dlLDQcoAlM6lIRer
+ OHHdvyKN5Dvv0eSlgp68jjrLTwEr46DcHrB/W3L5/S23rJ3G6IObvS6SljPYFQHo
+ l/hi3ZGM4wSazjQIfNPk4mlE/fB5jmFWolijGLc89hGhpmyVJ6bqISUowM2OqydV
+ NhK6oZhyd32ebCHp2XQ1CKPdfrEhdsEreLT9E5pMNVptsZ7ptxIhZ9ILuK9fCRC0
+ Mz1bQ8ZX/VgCvb6qcU/7aGZasqDMQopbwz6bVg8FXAy2Vx1vyy+GXyZqBRdi7Lep
+ 6XzspzM5AJ2Bpk2PkMbiCRBnjrxDHpXcokq+J5sTYQug599v6TXqNf0u6QHd3aJK
+ t0NRtiiOtErp8HZ2yWcocwcNnTTdLQ43bJX7jevzQ6KVZwdavMOebHz/XY/HTUzZ
+ taq3/vRx2EWmsBxqpkmu3FOKopkAB38j6ulUjMebamopw0vicLJj5cxjqAcOLrTB
+ 4Wgr0sF/sdlzSCyEyAAoQMPfbBTIXwErJylHEPLYncJG9QrH+GKDWbw5mDW63wYY
+ bHKjLJ62/rkJldyWGTsSMeHmKbt7SZUv3ntcOjeugQubZL2olZwzyAGqUtQ/tnJV
+ rBPuzRddPWA2Jq3ZOfpmLmmAlKX0n/udTNBbK+eBht31DZWZv3thqkp2hlcn70tL
+ jrJcQv9hpqCR3u96Xfqf+Fm2qx3IjhIzhtE6hlGBaDgl1+4Np2kNwwboWtT11Duv
+ kcwZ5dskO3i3KGEESyi3j/qzv6HVWEjq8ex/H23l9QigAN9ikPRdDuSmKPxMgr76
+ CmXc4AtLZA4iiosHgHZ2GxDmlPB+yohBy3AGuemv6sL4EU2C1Z6wDdOUapKJtgSu
+ M++J8FOtBW+yuySCIBCAYGVbjm/dVi3ay2VhMiW7uq1aBMYh8Dg9DppIPJ0KS8dB
+ dsyQz5OTBDRoQ1kT9vMYIRZrjbeCNqQnfkWAIxejT2TXQxPVJf+efVqrotrzOivb
+ AdoggNoChG89dOwEJqDRJ4OX4kvuNuJa1M9kvfdoEc9ToKXXY4m7GSaf4JyGEqe8
+ NtjUPwBEHYJ9bcJ/fgqxWtoc23l/1OfP
+ -----END PRIVATE KEY-----
+ KEY
+ end
+ end
+
+ trait :key_length_8192 do
+ certificate do
+ <<~CERT
+ -----BEGIN CERTIFICATE-----
+ MIIJLzCCBRegAwIBAgIUB6932CbRBXCmSbrDR1L1AeVQsq0wDQYJKoZIhvcNAQEL
+ BQAwJzELMAkGA1UEBhMCZGUxGDAWBgNVBAMMD2xvY2FsaG9zdC5sb2NhbDAeFw0y
+ MzA4MTcxOTA0MDlaFw0yMzA5MTYxOTA0MDlaMCcxCzAJBgNVBAYTAmRlMRgwFgYD
+ VQQDDA9sb2NhbGhvc3QubG9jYWwwggQiMA0GCSqGSIb3DQEBAQUAA4IEDwAwggQK
+ AoIEAQC5qEZ5QBj+nPYQjpbOBwRWIsNJVPgB54ADkjYSfL6kbPyeQsgAP2vU8zWA
+ Z1lIyiV3M5qa7KZ4oK4uin7O3rwho2XoWaaf1Z6ifpNeVl8Favyn/IeVJp+jchtl
+ VyDAEaX0GozOfo5X+xOAQE7twUNSlB8+6YIqxTQf28zB7ks/2w2qrJvXO3cSgQyB
+ 74tTUn/25hwATNlEzSBRdLzoNeK7ZInayLAvxwR5krIm+yjH0EGsI7XZQMLjZOwL
+ 95FrhfBLYODwPam5lAzPB6paeY+oGGkNwGhzECqmHLnXMSlY6lpeUs1vOCjVcE13
+ v5jb+nIwLaBWvpF4fnskgwI5+hiBcfsRuByKH+x5pW7ofCnqAOgkq8m1ErqRIvN8
+ SG54ySlA/+RoPI+G/1/x/AdQtdQxL5X80ZeUjpLGTGy3kPVEQ8feP8Ny3rSklsPt
+ nIMk6fRWgiw2ba2jt500F+FuntCj3PWDZfQ+LBBswuLpQCoB124RobXwFCCfqSd1
+ 8+S8PfGJQ4gDwc4K17lm/WJq/X3mUDmxJLhEIvpZUxi2App7b3KqdCMthOUxUFoa
+ IZiWtbK3h0A10c/Qr29BI3soKxD30dw/DylR42dTGZUX9fdEkPOulVaTXmApnGkv
+ HRomQ2VSJQq8wccceXvte/ZV2duehDk7032LhlNCeosfaJs4L5BXdB+xGfxDzQ65
+ p2xo/nA3k4eJhWhIejzOmJ1zUcsAl9rDUK312hajRh1UFDIr53yYnpAjcxyDYZqv
+ EcuFrjNZKi7zPsZC/17f48hOEHXC5PcFJrm7dvJa2z3+Vswy7IVljVhxu5CQQwk/
+ aIWDAqQXjXYL9L9y8kGspczucVhthOuDKad1Hopy3E5HpaNhXWOLqglWQihjRviG
+ Llt1DU5RnMRJJUp2JW8Ic+SAgtBpS39xFyJRj+gBrcWopdDWr/leTWY6NEKijTRg
+ SzxmvFr9uRlrAExO+FNafeTslYoStTQPysBwNC0HWa3SV04GU/LBRVko6f/ee5WQ
+ aPjGeRiSqBDZBL5HkJ90UMAXrj+2lv+yW16T9upD0H8gv/GpZZIPoZ9fcanSWMz4
+ 1BloOglzUC+AWe/8j6egcpwDbVMbOq51XxP5BkXp/wc2vzxMF8OEMLm6pu6arAup
+ 8JFtFwrS7Cn85iWIppWxNiuSptpBnYdCd+e51uKjigvA+KGyarnPyJOOHynLVwpg
+ hofK8hw0EzDgkQ2ysh9rwqvKtMJdXr9Vi4LGlacnTvO3modvk2B6zv4AsSsR88gP
+ HMTM+Y6J8pJVs8OkY8S/utlOXKrTv3Y1+PRueVWtyVggfvOaXy2UKkAp8e6zBZ6V
+ 5D0BheSOW99ImH63c0AXQ8JV6j1TAgMBAAGjUzBRMB0GA1UdDgQWBBQlK1yjMe+/
+ 2592Ei6EiNHBMnzMMjAfBgNVHSMEGDAWgBQlK1yjMe+/2592Ei6EiNHBMnzMMjAP
+ BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IEAQBYie/7NeBW6jzXhL73
+ qTfubvtpYkNSCSYj5OvunG5sCzHEpWv31yEWbhRxEoLlh4rZxGtsn7hqsa6D4u6U
+ /OXydn8NNPIJH+bOLkn2QLz9Rn8N3YLTAnbjtYIIPDSy2VXMnJAP99eBH/+p+V0V
+ ZIxgSVBNljs34DJsLzOaiDz5js8z/ZPuJbiNjmUOpPxFj2TS7zuVWvpmrei8yGLC
+ PPxe4+LzIRTSgOJAdfyVzgBBD4vjFyPyNi6z2e5t0PrZ2jMJIXtjJQ98CMDEmn+m
+ EYrB3RoSTIGAv1Y3MZfkcOY+SzkqEvb8ojqODX+6axso3Vw1ylVQnGhmOesSVV7Z
+ Fnf2AtaRdjOJD3rhsZw80fjv/PW8NKoS1CdDiylJ0t1GQycSTOs7aCZx0aVc9iDY
+ OAKZU6rBWbWBQx9Nddsll/oeTkRhd3tW3lZH6r8tXweDERQffrHpAv6GiAnqmQKQ
+ s5t8cp8iB7mkCH8YmONdaoR+e5jGwEAVgax1BDedt1Ryz85gG6udAN2xHy9NAxPH
+ z0tRSpcvBztkw5jTSUxT1cVyYt+fDQpB8LX085Vg07bniCZ3oZmq0QO0M3mGrrva
+ FX9VnsakfTJ7lMlaEE7qMfE58S5sLkbXfWG649MFs6lK0e3vkU81iy+DiAA8wA7D
+ t/pswj0DLAQHdrNNvpRTMrl4edHAZawQFMDuTO4EFMYygcYu7s/Q4RMZaoknYbWk
+ 57JMXg5iC4/GcwpMZ33zA/zIuf6TMXAS2p9yjCQOaPhiwWVCa4awtCZl4iVI5Q78
+ yxWN6mLmqH/zHiIBf4rtcwYmSzLyg8rz1FRjBR50BpC3Vt/F80rIe46QgBkyxwT3
+ FhtOJkGAyw6yko0JinA/14KmeJnpf3g0YZig+jhG1omhigms6U18KYdeVy6N/5wf
+ 35ut0cGvXE+Ii4XKRZuVx0UxszaS3aBxLvsQBOOKEBRJljCHimWGPqGYECgvb7BK
+ 20UYkYmxX/l/hYjlmE9/wI7Wozd3ho2PcPfI5kSBHvo8qVg/VouLAq1cxvHPWjz8
+ FOOgukZKnLRnVHz1wqH0HxoQKrW4iGZINHDBVh6R1zgEBWAm1X3xNQ6Sww+xIoUz
+ lYoDi58waKHs6ph82qoPfRtielxdZGLz7JhijQqWG+QYB45d55GG8FC15byH0PTx
+ JOlCjbDbHKShYdcB/UK7nr7fNTWgjsaF7gaLy0GOZS4zDE6/TtDNLC6zrwipcYOm
+ dqd6UrgdkMHccm7qCJpBdD09cTH8sJIgGSEC3YuHvqJ1/bUMaXAF5em2cQBcH+Ng
+ q98r/+7CUPADtb/H+OjIWhW2vLGISMbr7xgWqkETsrh+XZI3QOuz2/n38PZBcwhm
+ O5R7
+ -----END CERTIFICATE-----
+ CERT
+ end
+
+ key do
+ <<~KEY
+ -----BEGIN PRIVATE KEY-----
+ MIISQwIBADANBgkqhkiG9w0BAQEFAASCEi0wghIpAgEAAoIEAQC5qEZ5QBj+nPYQ
+ jpbOBwRWIsNJVPgB54ADkjYSfL6kbPyeQsgAP2vU8zWAZ1lIyiV3M5qa7KZ4oK4u
+ in7O3rwho2XoWaaf1Z6ifpNeVl8Favyn/IeVJp+jchtlVyDAEaX0GozOfo5X+xOA
+ QE7twUNSlB8+6YIqxTQf28zB7ks/2w2qrJvXO3cSgQyB74tTUn/25hwATNlEzSBR
+ dLzoNeK7ZInayLAvxwR5krIm+yjH0EGsI7XZQMLjZOwL95FrhfBLYODwPam5lAzP
+ B6paeY+oGGkNwGhzECqmHLnXMSlY6lpeUs1vOCjVcE13v5jb+nIwLaBWvpF4fnsk
+ gwI5+hiBcfsRuByKH+x5pW7ofCnqAOgkq8m1ErqRIvN8SG54ySlA/+RoPI+G/1/x
+ /AdQtdQxL5X80ZeUjpLGTGy3kPVEQ8feP8Ny3rSklsPtnIMk6fRWgiw2ba2jt500
+ F+FuntCj3PWDZfQ+LBBswuLpQCoB124RobXwFCCfqSd18+S8PfGJQ4gDwc4K17lm
+ /WJq/X3mUDmxJLhEIvpZUxi2App7b3KqdCMthOUxUFoaIZiWtbK3h0A10c/Qr29B
+ I3soKxD30dw/DylR42dTGZUX9fdEkPOulVaTXmApnGkvHRomQ2VSJQq8wccceXvt
+ e/ZV2duehDk7032LhlNCeosfaJs4L5BXdB+xGfxDzQ65p2xo/nA3k4eJhWhIejzO
+ mJ1zUcsAl9rDUK312hajRh1UFDIr53yYnpAjcxyDYZqvEcuFrjNZKi7zPsZC/17f
+ 48hOEHXC5PcFJrm7dvJa2z3+Vswy7IVljVhxu5CQQwk/aIWDAqQXjXYL9L9y8kGs
+ pczucVhthOuDKad1Hopy3E5HpaNhXWOLqglWQihjRviGLlt1DU5RnMRJJUp2JW8I
+ c+SAgtBpS39xFyJRj+gBrcWopdDWr/leTWY6NEKijTRgSzxmvFr9uRlrAExO+FNa
+ feTslYoStTQPysBwNC0HWa3SV04GU/LBRVko6f/ee5WQaPjGeRiSqBDZBL5HkJ90
+ UMAXrj+2lv+yW16T9upD0H8gv/GpZZIPoZ9fcanSWMz41BloOglzUC+AWe/8j6eg
+ cpwDbVMbOq51XxP5BkXp/wc2vzxMF8OEMLm6pu6arAup8JFtFwrS7Cn85iWIppWx
+ NiuSptpBnYdCd+e51uKjigvA+KGyarnPyJOOHynLVwpghofK8hw0EzDgkQ2ysh9r
+ wqvKtMJdXr9Vi4LGlacnTvO3modvk2B6zv4AsSsR88gPHMTM+Y6J8pJVs8OkY8S/
+ utlOXKrTv3Y1+PRueVWtyVggfvOaXy2UKkAp8e6zBZ6V5D0BheSOW99ImH63c0AX
+ Q8JV6j1TAgMBAAECggQAOlfCRco50JGc1hkpFPepijQEcKAOC/MnDHg/G9Itytgh
+ Ds7nsQQ9K79+OarAqRo1ad9Cn5rsuY2tDx0gunvOXTfPB5Rcw2/LGT9zqjq0Q6ya
+ V2QJa3qmwiNSrqcRuKoTH8HUK/QjYUyalTwgUaDhOisoIooZCL3OIpDdKLhs11VM
+ Vy1FD/807RC20IJpozaS1hD8DbAYuwFHPbHUx5hfdwoiNCnLDEibhGTwLUXSS/CL
+ IsBaHjq2w+TsNNqIzWRa3iVEqtqF4ra+y7SZ+TKoTWfWY6bqa/ZRoL/4OsLNPo7u
+ 9SNKQcBBPMm83nvMWpy6k59S+s+KQXZl1lSBN5z7ZHpgLvJPraxYkOXHE7IpLcs5
+ KIT/rzKChKeaIp1UcgqtNyrzKTqW1BKeoRnVZqytUQOmO7vVya6AO2a653jbSqeO
+ QK6DCi8oT2y9h4cew1PuH91qbXRME93Yvg0fH7cy07vVP4Sjm4IXa0ZXLnumd8uu
+ YEYUOazpj6MFrpCFeg5xP/SD4sJdsJSYQ+AutHaSwPTHHH7wlSD00WtGobPxvgaI
+ 3z397AkOSU/58KpMHFhfIEOVjxQvHWJ0MOEoi7f07hv5/asTDhPLXZb1foEiQl7W
+ 5S8y9L68s3beqxqXJB0b0xOm6yhuHOmkYz4IbHQ5Cvh8T+unUVhWA9ckrysdVCs9
+ Kcgs2knAuNpSXGCoDW0zGqXRQg11WaHJ2S0woLyrDfhk14tBwhojZblwN2d8nqsR
+ 3JxM5Dcc5tS722ousXqxiF9DuSA0ekwvp4mljwkw/9mJYpbk1568aE4aavjMZnfb
+ r0RGGwXhbIHG+41j1izxKIS0au8H8BsqEJbJubWxHAVFODdVlAckuRXV4ZzL9Nro
+ MPrJYdwTh8dVcFVtAZQk0cSlWmg7UqbsCZqZ5Kxk4HJhK3xbzF/cNPnY7zL2l3mo
+ 7qYONaRKRdelWZqcB9z+ZuGVMUfMxRhaN9InubRlpqoOTXK22GbmhcIApsXi38GU
+ Z3cgrBrGhG42Tqpp3Ub+goGPw8cnQM9+OItxm0hcL5BJOeC63uqavVemLhs9JM2g
+ 7pkxRPeTagdulHRWEFVTtpt1AN1Am9fivLSEXa6Oz5Fab4b36qOJx9wdCFYC0nh9
+ v/BEBJmlsxuVA+gL0e4tYcxMt67AMdgMyhKzT5A76Jxp3AcJ9QoHZODHkfgIcvAo
+ sCPkBcBNiRZ4EJyrB27fSCxSqqfIGo+ZttZ/K3+g412+2ALfA+TM4LpCmLN441Rb
+ XPa+Gz0ZAroXa+RVO3M4Jr9aJeDRKMSty5wIXavA43L372A+6WuwGKOVqiP1svzV
+ Bs6rhoKhoPsxjIPwFJluH9XMKgBhvWFdf4lLtrGDaQKCAgEA41NZ0VeOW+2D13w2
+ SUF2oq1pbZcloExPN1GKqBf61l/VzmJxrNiEHXm7oQlnX7f6KI9FlpC+Po3U59Wo
+ OhTe6t7eF+e3vvUObBssrbe6dkiwN7deJxq6W4bdddjH1MLBEpNu4qswgC8v17GA
+ hAG001DfYnphicZiRBayP3mgNM1xgnwW4YoyCL7+rBKYnE4HEfhNcgJxDiKBsx2y
+ sR4ARe8RORqaH3AkJh/M7IcsRdrtTEqovaaWGiiF5XRx0xUEj2MtnYaq4j0Cp2jl
+ vPVhZu63kxFmFZbz4G0qmuCp3KzH0JMSWMqM4zFV6ZLi9g5F/6mWCV6RJ2aK9hnD
+ JybaqN1tnriwBx2k1nclfMjTAGunfg8gFT+V9Zwqb9kSOmIaQonbKj9/Zu4OkiWh
+ QGJQKdQ7L0PBb82XyPEaltxrKJRf3rxc2xVjstcaSw75EukRP95jFV0IzYRbgzYX
+ NC/0eojriqR4K3JxlKTAYJTw+0WciCWFqnxD54w77SetUkbnV5kMmc8kDLyBli4a
+ Gtwu0tO7PR9M37hW2/ND1U1PWpV1II5aQNtRf8P/ZMA1A6lFa9R8AyxbCz7om5FH
+ vYjYsW8D+pGfPpFsXlye7kwG7dNx8HrENhUu+j34PqKVpdyNOxzuYOo8PQQDhOmf
+ +DDDqqQA8D4gcifPj1I4wsM9tocCggIBANETZm7i5EDnInSsfj7vVJV4DfxD245e
+ s1ZlqRCd/Jpgc3RbmXdPFTF8N12Q8ls4bXPDKiTiy1Oy4OeZ5+6lgkpRCz2Ptgtj
+ Q1+is9VhPLoXaqRHbZ2Y7F7auN9KcthFt6WYurDNiqXybmZ6X7EDM8uDLFsL+2nk
+ QswBNqJrfTZyvPe4ZUy+WkS3ma2Zo9xujTwV0SHXbzwW/o8pIMWDLR9hxMVUYuXG
+ uEYORT+n0TVDFmUyHxjyR5j1tQ8jyLigcMaR1ysjzMyM2LWY/VZXuVH1OdUr7xYn
+ Kq4q0BtGAWNzOzO8jJjhtPmacVJA1wgI53nik42nE7an4vcinFDJzwqFFdK0abWv
+ XMZ3E36XT/+QY8GZ9Y5fEBOegfS1DyQHoSgqbJONS/cfRe8NSTc1+e7JfeRNUtYD
+ SEk6sham0RXnTHLCOhP59DxX5RWY9oNgIBSXRa5ZS9AOx0PIMx0ZaSBBh2vsAe9j
+ rnOkI8k/X1aqD1wo+t2wR7xFG7jPnnzqNOqbT/FSop6NCZ8eEPGWJi9Sc5Yks85J
+ N6XJwvFdLfY+VvQjqgqbh13mdcosA0JVAzqnEZI6wj02KSqAvm3Bfmfp5qnmE8LY
+ TEEBhYzXcwEkGoQ5Em6/+zhXnoFACdkSgB5yw0UmQDCNE3I94WQ+qQI/7PnoSqHg
+ KTRufnjHrGnVAoICAQDdfcYCyeukOD0AhT8ji0w7XvldVSrND+0TOjj+ZTb7Dy90
+ Qsj9n4zCZ2zgkBgP1GNCh65G8MrcijcKmEusI8+7SuFcq2KGBaFCxgt3S4+7VkGU
+ V+697TXsnfBDta+m5wdVwR8GbcP48YENCR7t//ee+apd+l307r2qF+8fF7N4H0Bc
+ 4ektYgg0K1xablgR25jZ8nQLBMQBALAcxG/qUQ/1E+VVHU1UGmCuYMe7Ik2J1rDl
+ Z80X1CtmW1ty4U1SXKUvzHOSi7cObmGamgNWZEO+FhP5kLdFi+odHmCnvQTkRdj+
+ qX3z048IgnZx+bN4CRo865CLmn+VwzzcYueZyyq749u+Dbc9h62nZTm6ZrXoL/xn
+ P/eDnIvRXpKengM7rYBmmolXlbzdnk/GKDIAWIpA50+vUrYz6D7fA8Rjf2pNhJwQ
+ mrlioWmdxCYTQgh/W2V6NIWYOCiujirYIqjjKWJszeGqGWwY8Q4nxYrHz/co7H+C
+ zAR7w04qWqG9Ba7DfuBDopT7fC9k1XrxyAOZbjWVJ8XE3S16wdKnxlOujgAmg382
+ 9FyN2uOCuIasNPaylYhVcxhNwzcGMwpTIW+kBaUU5NUcnCxruye6nUYhayRJL39R
+ z1xEUcmO+zhYVvO2Qrm9Aghll3SQAswnAbbjDShoqBld+zqD37RFsdgqNC96GwKC
+ AgEAr615eNs1qEOO9DKssf0wOZfzSHFMX0i7sHEjqk7WHnHFEZSWU2YkDLyvWPOe
+ cX/smET5eJ0I9H9t862i8SgpXoDSzRugf9kcl5ODQFzARi2+8eMC/FWu59UpWpaY
+ AZozQfYfiMhtJBudIIbbOUXTk8HY13gt/UBL0FeErN1dDQ9EMXLDy8R23R7ZBsH+
+ qg5Kpp4+aA057mfz5h9M5infFGt2h8jsgN6FoHgFQAOnCvYgL0/6SV/rQV/Uj7Al
+ zN0jZfbNsfYW9Bm1ToIK/S4hDfjca37LGvY2KrrWutQL/qCoskRQb3XYN5PKfK73
+ AE1bE1OLYI9vRR+02qw+ZLPuQIyrVa061etQLYOI4eoK0ldlOxw+9S5zt8iMsi4h
+ VskCZVmgeitUFYY1oTSsvLOiGz87hUZjwGhpqP6k/duV/K2p0xPY8UgqLTo9x/QL
+ z0BKNIMXjfSCe4SvcwkZye28I9psDAb3aUt9HrZhS4zwc0XaOjpE8VpaLJx1Osla
+ BuRVKnzuo3woIMmpuAXvftAHreO+M/8LBt8G30u1flIpeKvRLLt6+gbNq90mRIbP
+ BkGgwPv5C8JLzFtiI9CiMl9P88jahRBKsoJFMKoyqbGvdNn9XfUGxACU+zbEfR5u
+ J/Qfq3YLFmOZtDIWkPvmE/GC2d0VJrhFXdeZR/FAXASLnzECggIAGcuDDxovnC5W
+ fxNR3xAHx7iLlxmAvYMJ8+zul6kPhymyJ5ib36RyhbdV+53sKssC9r2xesOivI1D
+ y4gUafqzEhZfNf3cvedCh5CVqWbAIjasYSSa/ZCqbZW7/wA1Zs4pG8xFXZKdLybo
+ jwDzidZ+NokW7mXZL9TTFqtOm3ShhfGZOMdx8TtAnn2iiemYTHjo3xuRROU3M0xa
+ ADbzpJl6/+pYNOt4VxghHNqcUFdQwqnnnueHCVRwCq3rtxsXPFULUZKF6KhZlu99
+ nmO3zVn85Os7lGa383RZHcu43LwxuXsAMHgay1Def83kwnKda7lKel/eWw7Cdj2v
+ uVr3V27aM1ZKOWYivm3aodlYLrhwcnqczkzAU1uP5PHZhJrUY3okZb/cPFShi4ok
+ ExpntdKzXVXg2zDdB8GyeqZ5ba6zpkoFxBBwMFWgd+PajLFvN7lHEC9BrFg817hT
+ vjPpz7M6hZmLLCkrIPA8lgM2r3AJF0Uu2IgshwTMP4TmLwMPDkDfctp3qWR4EA92
+ DH76UVaDfcfE0WSCh1Znk/2DPxVv8yYVK0elqpM+JiS0xsLvzksTk39rbv7qdy6f
+ 1EoJJFfrfqvrowyGG0f946bb5k2nsNFjKwHexQMpYx35pGbSp1CCOHqzLIS+LjOb
+ c5RerXbZTDEafdHsyGlkhu+nOjYvyGM=
+ -----END PRIVATE KEY-----
+ KEY
+ end
+ end
+
trait :instance_serverless do
wildcard { true }
scope { :instance }
diff --git a/spec/factories/project_alerting_settings.rb b/spec/factories/project_alerting_settings.rb
index 2c8ca7c70a8..ef0beb6b98a 100644
--- a/spec/factories/project_alerting_settings.rb
+++ b/spec/factories/project_alerting_settings.rb
@@ -3,6 +3,24 @@
FactoryBot.define do
factory :project_alerting_setting, class: 'Alerting::ProjectAlertingSetting' do
project
- token { 'access_token_123' }
+ token { SecureRandom.hex }
+
+ # Remove in next required stop after %16.4
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/338838
+ transient do
+ sync_http_integrations { false }
+ end
+
+ trait :with_http_integration do
+ sync_http_integrations { true }
+ end
+
+ # for simplicity, let factory exclude the AlertManagement::HttpIntegration
+ # created in after_commit callback on model
+ after(:create) do |setting, evaluator|
+ next if evaluator.sync_http_integrations
+
+ setting.project.alert_management_http_integrations.last!.destroy!
+ end
end
end
diff --git a/spec/factories/project_authorizations.rb b/spec/factories/project_authorizations.rb
index 1726da55c99..4e4330b37a6 100644
--- a/spec/factories/project_authorizations.rb
+++ b/spec/factories/project_authorizations.rb
@@ -7,7 +7,9 @@ FactoryBot.define do
access_level { Gitlab::Access::REPORTER }
end
- trait :owner do
- access_level { Gitlab::Access::OWNER }
- end
+ trait(:guest) { access_level { Gitlab::Access::GUEST } }
+ trait(:reporter) { access_level { Gitlab::Access::REPORTER } }
+ trait(:developer) { access_level { Gitlab::Access::DEVELOPER } }
+ trait(:maintainer) { access_level { Gitlab::Access::MAINTAINER } }
+ trait(:owner) { access_level { Gitlab::Access::OWNER } }
end
diff --git a/spec/factories/project_metrics_settings.rb b/spec/factories/project_metrics_settings.rb
deleted file mode 100644
index b5c0fd88a6c..00000000000
--- a/spec/factories/project_metrics_settings.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :project_metrics_setting, class: 'ProjectMetricsSetting' do
- project
- external_dashboard_url { 'https://grafana.com' }
- end
-end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 0111083298c..443bca6030c 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -535,7 +535,7 @@ FactoryBot.define do
factory :project_with_design, parent: :project do
after(:create) do |project|
issue = create(:issue, project: project)
- create(:design, project: project, issue: issue)
+ create(:design, :with_file, project: project, issue: issue)
end
end
diff --git a/spec/factories/self_managed_prometheus_alert_event.rb b/spec/factories/self_managed_prometheus_alert_event.rb
deleted file mode 100644
index 3a48aba5f54..00000000000
--- a/spec/factories/self_managed_prometheus_alert_event.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :self_managed_prometheus_alert_event do
- project
- sequence(:payload_key) { |n| "hash payload key #{n}" }
- status { SelfManagedPrometheusAlertEvent.status_value_for(:firing) }
- title { 'alert' }
- query_expression { 'vector(2)' }
- started_at { Time.now }
- end
-end
diff --git a/spec/factories/service_desk/custom_email_verification.rb b/spec/factories/service_desk/custom_email_verification.rb
index a3b72da2e9e..4c3322169fa 100644
--- a/spec/factories/service_desk/custom_email_verification.rb
+++ b/spec/factories/service_desk/custom_email_verification.rb
@@ -11,5 +11,10 @@ FactoryBot.define do
trait :overdue do
triggered_at { (ServiceDesk::CustomEmailVerification::TIMEFRAME + 1).minutes.ago }
end
+
+ trait :finished do
+ state { 'finished' }
+ token { nil }
+ end
end
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 0e944b90d0c..1084891b07f 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -35,8 +35,8 @@ FactoryBot.define do
create(:custom_issue_tracker_integration, project: projects[2], active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
- alert_bot_issues = create_list(:incident, 2, project: projects[0], author: User.alert_bot)
- create_list(:incident, 2, project: projects[1], author: User.alert_bot)
+ alert_bot_issues = create_list(:incident, 2, project: projects[0], author: Users::Internal.alert_bot)
+ create_list(:incident, 2, project: projects[1], author: Users::Internal.alert_bot)
issues = create_list(:issue, 4, project: projects[0])
create_list(:prometheus_alert, 2, project: projects[0])
create(:prometheus_alert, project: projects[1])
@@ -62,7 +62,6 @@ FactoryBot.define do
# Alert Issues
create(:alert_management_alert, issue: issues[0], project: projects[0])
create(:alert_management_alert, issue: alert_bot_issues[0], project: projects[0])
- create(:self_managed_prometheus_alert_event, related_issues: [issues[1]], project: projects[0])
# Kubernetes agents
create(:cluster_agent, project: projects[0])
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 67c857165fc..d61d5cc2d78 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -126,6 +126,10 @@ FactoryBot.define do
end
end
+ trait :no_super_sidebar do
+ use_new_navigation { false }
+ end
+
trait :two_factor_via_webauthn do
transient { registrations_count { 5 } }
diff --git a/spec/factories/users/group_visit.rb b/spec/factories/users/group_visit.rb
new file mode 100644
index 00000000000..a98ee61332a
--- /dev/null
+++ b/spec/factories/users/group_visit.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :group_visit, class: 'Users::GroupVisit' do
+ transient { target_user { association(:user) } }
+ transient { target_group { association(:group) } }
+
+ user_id { target_user.id }
+ entity_id { target_group.id }
+ visited_at { Time.now }
+ end
+end
diff --git a/spec/factories/users/project_visit.rb b/spec/factories/users/project_visit.rb
new file mode 100644
index 00000000000..40ead720061
--- /dev/null
+++ b/spec/factories/users/project_visit.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_visit, class: 'Users::ProjectVisit' do
+ transient { target_user { association(:user) } }
+ transient { target_project { association(:project) } }
+
+ user_id { target_user.id }
+ entity_id { target_project.id }
+ visited_at { Time.now }
+ end
+end
diff --git a/spec/factories/work_items.rb b/spec/factories/work_items.rb
index 4a2186f2fcf..1827f2a590b 100644
--- a/spec/factories/work_items.rb
+++ b/spec/factories/work_items.rb
@@ -26,6 +26,18 @@ FactoryBot.define do
closed_at { Time.now }
end
+ trait :group_level do
+ project { nil }
+ association :namespace, factory: :group
+ association :author, factory: :user
+ end
+
+ trait :user_namespace_level do
+ project { nil }
+ association :namespace, factory: :user_namespace
+ association :author, factory: :user
+ end
+
trait :issue do
association :work_item_type, :default, :issue
end
diff --git a/spec/features/abuse_report_spec.rb b/spec/features/abuse_report_spec.rb
index f934736ced9..f1df5c2d6f0 100644
--- a/spec/features/abuse_report_spec.rb
+++ b/spec/features/abuse_report_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
- let_it_be(:abusive_user) { create(:user) }
+ let_it_be(:abusive_user) { create(:user, :no_super_sidebar) }
- let_it_be(:reporter1) { create(:user) }
+ let_it_be(:reporter1) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:issue) { create(:issue, project: project, author: abusive_user) }
@@ -57,7 +57,7 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
describe 'when user_profile_overflow_menu FF turned on' do
context 'when reporting a user profile for abuse' do
- let_it_be(:reporter2) { create(:user) }
+ let_it_be(:reporter2) { create(:user, :no_super_sidebar) }
before do
visit user_path(abusive_user)
@@ -108,7 +108,7 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
describe 'when user_profile_overflow_menu FF turned off' do
context 'when reporting a user profile for abuse' do
- let_it_be(:reporter2) { create(:user) }
+ let_it_be(:reporter2) { create(:user, :no_super_sidebar) }
before do
stub_feature_flags(user_profile_overflow_menu_vue: false)
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 18bc851558d..973988560b3 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -11,291 +11,195 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
let_it_be(:closed_report) { create(:abuse_report, :closed, user: user, category: 'spam') }
describe 'as an admin' do
+ include FilteredSearchHelpers
+
before do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
- end
-
- context 'when abuse_reports_list feature flag is enabled' do
- include FilteredSearchHelpers
-
- before do
- visit admin_abuse_reports_path
- end
-
- let(:abuse_report_row_selector) { '[data-testid="abuse-report-row"]' }
-
- it 'only includes open reports by default' do
- expect_displayed_reports_count(2)
-
- expect_report_shown(open_report, open_report2)
-
- within '[data-testid="abuse-reports-filtered-search-bar"]' do
- expect(page).to have_content 'Status = Open'
- end
- end
-
- it 'can be filtered by status, user, reporter, and category', :aggregate_failures do
- # filter by status
- filter %w[Status Closed]
- expect_displayed_reports_count(1)
- expect_report_shown(closed_report)
- expect_report_not_shown(open_report, open_report2)
-
- filter %w[Status Open]
- expect_displayed_reports_count(2)
- expect_report_shown(open_report, open_report2)
- expect_report_not_shown(closed_report)
-
- # filter by user
- filter(['User', open_report2.user.username])
- expect_displayed_reports_count(1)
- expect_report_shown(open_report2)
- expect_report_not_shown(open_report, closed_report)
+ visit admin_abuse_reports_path
+ end
- # filter by reporter
- filter(['Reporter', open_report.reporter.username])
+ let(:abuse_report_row_selector) { '[data-testid="abuse-report-row"]' }
- expect_displayed_reports_count(1)
- expect_report_shown(open_report)
- expect_report_not_shown(open_report2, closed_report)
+ it 'only includes open reports by default' do
+ expect_displayed_reports_count(2)
- # filter by category
- filter(['Category', open_report2.category])
+ expect_report_shown(open_report, open_report2)
- expect_displayed_reports_count(1)
- expect_report_shown(open_report2)
- expect_report_not_shown(open_report, closed_report)
+ within_testid('abuse-reports-filtered-search-bar') do
+ expect(page).to have_content 'Status = Open'
end
+ end
- it 'can be sorted by created_at and updated_at in desc and asc order', :aggregate_failures do
- sort_by 'Created date'
- # created_at desc
- expect(report_rows[0].text).to include(report_text(open_report2))
- expect(report_rows[1].text).to include(report_text(open_report))
-
- # created_at asc
- toggle_sort_direction
-
- expect(report_rows[0].text).to include(report_text(open_report))
- expect(report_rows[1].text).to include(report_text(open_report2))
+ it 'can be filtered by status, user, reporter, and category', :aggregate_failures do
+ # filter by status
+ filter %w[Status Closed]
+ expect_displayed_reports_count(1)
+ expect_report_shown(closed_report)
+ expect_report_not_shown(open_report, open_report2)
- # updated_at asc
- sort_by 'Updated date'
+ filter %w[Status Open]
+ expect_displayed_reports_count(2)
+ expect_report_shown(open_report, open_report2)
+ expect_report_not_shown(closed_report)
- expect(report_rows[0].text).to include(report_text(open_report2))
- expect(report_rows[1].text).to include(report_text(open_report))
+ # filter by user
+ filter(['User', open_report2.user.username])
- # updated_at desc
- toggle_sort_direction
+ expect_displayed_reports_count(1)
+ expect_report_shown(open_report2)
+ expect_report_not_shown(open_report, closed_report)
- expect(report_rows[0].text).to include(report_text(open_report))
- expect(report_rows[1].text).to include(report_text(open_report2))
- end
+ # filter by reporter
+ filter(['Reporter', open_report.reporter.username])
- context 'when multiple reports for the same user are created' do
- let_it_be(:open_report3) { create(:abuse_report, category: 'spam', user: user) }
- let_it_be(:closed_report2) { create(:abuse_report, :closed, user: user, category: 'spam') }
+ expect_displayed_reports_count(1)
+ expect_report_shown(open_report)
+ expect_report_not_shown(open_report2, closed_report)
- it 'aggregates open reports by user & category', :aggregate_failures do
- expect_displayed_reports_count(2)
+ # filter by category
+ filter(['Category', open_report2.category])
- expect_aggregated_report_shown(open_report, 2)
- expect_report_shown(open_report2)
- end
+ expect_displayed_reports_count(1)
+ expect_report_shown(open_report2)
+ expect_report_not_shown(open_report, closed_report)
+ end
- it 'can sort aggregated reports by number_of_reports in desc order only', :aggregate_failures do
- sort_by 'Number of Reports'
+ it 'can be sorted by created_at and updated_at in desc and asc order', :aggregate_failures do
+ sort_by 'Created date'
+ # created_at desc
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(report_text(open_report))
- expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
- expect(report_rows[1].text).to include(report_text(open_report2))
+ # created_at asc
+ toggle_sort_direction
- toggle_sort_direction
+ expect(report_rows[0].text).to include(report_text(open_report))
+ expect(report_rows[1].text).to include(report_text(open_report2))
- expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
- expect(report_rows[1].text).to include(report_text(open_report2))
- end
+ # updated_at asc
+ sort_by 'Updated date'
- it 'can sort aggregated reports by created_at and updated_at in desc and asc order', :aggregate_failures do
- # number_of_reports desc (default)
- expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
- expect(report_rows[1].text).to include(report_text(open_report2))
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(report_text(open_report))
- # created_at desc
- sort_by 'Created date'
+ # updated_at desc
+ toggle_sort_direction
- expect(report_rows[0].text).to include(report_text(open_report2))
- expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
-
- # created_at asc
- toggle_sort_direction
+ expect(report_rows[0].text).to include(report_text(open_report))
+ expect(report_rows[1].text).to include(report_text(open_report2))
+ end
- expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
- expect(report_rows[1].text).to include(report_text(open_report2))
+ context 'when multiple reports for the same user are created' do
+ let_it_be(:open_report3) { create(:abuse_report, category: 'spam', user: user) }
+ let_it_be(:closed_report2) { create(:abuse_report, :closed, user: user, category: 'spam') }
- sort_by 'Updated date'
+ it 'aggregates open reports by user & category', :aggregate_failures do
+ expect_displayed_reports_count(2)
- # updated_at asc
- expect(report_rows[0].text).to include(report_text(open_report2))
- expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
+ expect_aggregated_report_shown(open_report, 2)
+ expect_report_shown(open_report2)
+ end
- # updated_at desc
- toggle_sort_direction
+ it 'can sort aggregated reports by number_of_reports in desc order only', :aggregate_failures do
+ sort_by 'Number of Reports'
- expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
- expect(report_rows[1].text).to include(report_text(open_report2))
- end
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
- it 'does not aggregate closed reports', :aggregate_failures do
- filter %w[Status Closed]
+ toggle_sort_direction
- expect_displayed_reports_count(2)
- expect_report_shown(closed_report, closed_report2)
- end
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
end
- def report_rows
- page.all(abuse_report_row_selector)
- end
+ it 'can sort aggregated reports by created_at and updated_at in desc and asc order', :aggregate_failures do
+ # number_of_reports desc (default)
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
- def report_text(report)
- "#{report.user.name} reported for #{report.category} by #{report.reporter.name}"
- end
+ # created_at desc
+ sort_by 'Created date'
- def aggregated_report_text(report, count)
- "#{report.user.name} reported for #{report.category} by #{count} users"
- end
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
- def expect_report_shown(*reports)
- reports.each do |r|
- expect(page).to have_content(report_text(r))
- end
- end
+ # created_at asc
+ toggle_sort_direction
- def expect_report_not_shown(*reports)
- reports.each do |r|
- expect(page).not_to have_content(report_text(r))
- end
- end
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
- def expect_aggregated_report_shown(*reports, count)
- reports.each do |r|
- expect(page).to have_content(aggregated_report_text(r, count))
- end
- end
+ sort_by 'Updated date'
- def expect_displayed_reports_count(count)
- expect(page).to have_css(abuse_report_row_selector, count: count)
- end
+ # updated_at asc
+ expect(report_rows[0].text).to include(report_text(open_report2))
+ expect(report_rows[1].text).to include(aggregated_report_text(open_report, 2))
- def filter(tokens)
- # remove all existing filters first
- page.find_all('.gl-token-close').each(&:click)
+ # updated_at desc
+ toggle_sort_direction
- select_tokens(*tokens, submit: true, input_text: 'Filter reports')
+ expect(report_rows[0].text).to include(aggregated_report_text(open_report, 2))
+ expect(report_rows[1].text).to include(report_text(open_report2))
end
- def sort_by(sort)
- page.within('.vue-filtered-search-bar-container .sort-dropdown-container') do
- page.find('.gl-dropdown-toggle').click
+ it 'does not aggregate closed reports', :aggregate_failures do
+ filter %w[Status Closed]
- page.within('.dropdown-menu') do
- click_button sort
- wait_for_requests
- end
- end
+ expect_displayed_reports_count(2)
+ expect_report_shown(closed_report, closed_report2)
end
end
- context 'when abuse_reports_list feature flag is disabled' do
- before do
- stub_feature_flags(abuse_reports_list: false)
-
- visit admin_abuse_reports_path
- end
+ def report_rows
+ page.all(abuse_report_row_selector)
+ end
- it 'displays all abuse reports', :aggregate_failures do
- expect_report_shown(open_report)
- expect_report_actions_shown(open_report)
+ def report_text(report)
+ "#{report.user.name} reported for #{report.category} by #{report.reporter.name}"
+ end
- expect_report_shown(open_report2)
- expect_report_actions_shown(open_report2)
+ def aggregated_report_text(report, count)
+ "#{report.user.name} reported for #{report.category} by #{count} users"
+ end
- expect_report_shown(closed_report)
- expect_report_actions_shown(closed_report)
+ def expect_report_shown(*reports)
+ reports.each do |r|
+ expect(page).to have_content(report_text(r))
end
+ end
- context 'when an admin has been reported for abuse' do
- let_it_be(:admin_abuse_report) { create(:abuse_report, user: admin) }
-
- it 'displays the abuse report without actions' do
- expect_report_shown(admin_abuse_report)
- expect_report_actions_not_shown(admin_abuse_report)
- end
+ def expect_report_not_shown(*reports)
+ reports.each do |r|
+ expect(page).not_to have_content(report_text(r))
end
+ end
- context 'when multiple users have been reported for abuse' do
- let(:report_count) { AbuseReport.default_per_page + 3 }
-
- before do
- report_count.times do
- create(:abuse_report, user: create(:user))
- end
- end
-
- context 'in the abuse report view', :aggregate_failures do
- it 'adds pagination' do
- visit admin_abuse_reports_path
-
- expect(page).to have_selector('.pagination')
- expect(page).to have_selector('.pagination .js-pagination-page', count: (report_count.to_f / AbuseReport.default_per_page).ceil)
- end
- end
+ def expect_aggregated_report_shown(*reports, count)
+ reports.each do |r|
+ expect(page).to have_content(aggregated_report_text(r, count))
end
+ end
- context 'when filtering reports' do
- it 'can be filtered by reported-user', :aggregate_failures do
- visit admin_abuse_reports_path
-
- page.within '.filter-form' do
- click_button 'User'
- wait_for_requests
-
- page.within '.dropdown-menu-user' do
- click_link user.name
- end
-
- wait_for_requests
- end
+ def expect_displayed_reports_count(count)
+ expect(page).to have_css(abuse_report_row_selector, count: count)
+ end
- expect_report_shown(open_report)
- expect_report_shown(closed_report)
- end
- end
+ def filter(tokens)
+ # remove all existing filters first
+ page.find_all('.gl-token-close').each(&:click)
- def expect_report_shown(report)
- page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
- expect(page).to have_content(report.user.name)
- expect(page).to have_content(report.reporter.name)
- expect(page).to have_content(report.message)
- expect(page).to have_link(report.user.name, href: user_path(report.user))
- end
- end
+ select_tokens(*tokens, submit: true, input_text: 'Filter reports')
+ end
- def expect_report_actions_shown(report)
- page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
- expect(page).to have_link('Remove user & report')
- expect(page).to have_link('Block user')
- expect(page).to have_link('Remove user')
- end
- end
+ def sort_by(sort)
+ page.within('.vue-filtered-search-bar-container .sort-dropdown-container') do
+ page.find('.gl-dropdown-toggle').click
- def expect_report_actions_not_shown(report)
- page.within(:table_row, { "User" => report.user.name, "Reported by" => report.reporter.name }) do
- expect(page).not_to have_link('Remove user & report')
- expect(page).not_to have_link('Block user')
- expect(page).not_to have_link('Remove user')
+ page.within('.dropdown-menu') do
+ click_button sort
+ wait_for_requests
end
end
end
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index b4f64cbfa7b..a5acba1fe4a 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Admin::Hooks', feature_category: :webhooks do
include Spec::Support::Helpers::ModalHelpers
- let_it_be(:user) { create(:admin) }
+ let_it_be(:user) { create(:admin, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/admin/admin_jobs_spec.rb b/spec/features/admin/admin_jobs_spec.rb
index d46b314c144..b305bec6493 100644
--- a/spec/features/admin/admin_jobs_spec.rb
+++ b/spec/features/admin/admin_jobs_spec.rb
@@ -2,9 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Admin Jobs', feature_category: :continuous_integration do
+RSpec.describe 'Admin Jobs', :js, feature_category: :continuous_integration do
before do
- stub_feature_flags(admin_jobs_vue: false)
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
@@ -23,12 +22,13 @@ RSpec.describe 'Admin Jobs', feature_category: :continuous_integration do
visit admin_jobs_path
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
- expect(page).to have_selector('.row-content-block', text: 'All jobs')
- expect(page.all('.build-link').size).to eq(4)
- expect(page).to have_button 'Stop all jobs'
+ wait_for_requests
- click_button 'Stop all jobs'
+ expect(page).to have_selector('[data-testid="jobs-all-tab"]')
+ expect(page.all('[data-testid="jobs-table-row"]').size).to eq(4)
+ expect(page).to have_button 'Cancel all jobs'
+
+ click_button 'Cancel all jobs'
expect(page).to have_button 'Yes, proceed'
expect(page).to have_content 'Are you sure?'
end
@@ -38,73 +38,11 @@ RSpec.describe 'Admin Jobs', feature_category: :continuous_integration do
it 'shows a message' do
visit admin_jobs_path
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
- expect(page).to have_content 'No jobs to show'
- expect(page).not_to have_button 'Stop all jobs'
- end
- end
- end
-
- context 'Pending tab' do
- context 'when have pending jobs' do
- it 'shows pending jobs' do
- build1 = create(:ci_build, pipeline: pipeline, status: :pending)
- build2 = create(:ci_build, pipeline: pipeline, status: :running)
- build3 = create(:ci_build, pipeline: pipeline, status: :success)
- build4 = create(:ci_build, pipeline: pipeline, status: :failed)
-
- visit admin_jobs_path(scope: :pending)
-
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
- expect(page.find('.build-link')).to have_content(build1.id)
- expect(page.find('.build-link')).not_to have_content(build2.id)
- expect(page.find('.build-link')).not_to have_content(build3.id)
- expect(page.find('.build-link')).not_to have_content(build4.id)
- expect(page).to have_button 'Stop all jobs'
- end
- end
-
- context 'when have no jobs pending' do
- it 'shows a message' do
- create(:ci_build, pipeline: pipeline, status: :success)
-
- visit admin_jobs_path(scope: :pending)
-
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
- expect(page).to have_content 'No jobs to show'
- expect(page).not_to have_button 'Stop all jobs'
- end
- end
- end
-
- context 'Running tab' do
- context 'when have running jobs' do
- it 'shows running jobs' do
- build1 = create(:ci_build, pipeline: pipeline, status: :running)
- build2 = create(:ci_build, pipeline: pipeline, status: :success)
- build3 = create(:ci_build, pipeline: pipeline, status: :failed)
- build4 = create(:ci_build, pipeline: pipeline, status: :pending)
-
- visit admin_jobs_path(scope: :running)
-
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
- expect(page.find('.build-link')).to have_content(build1.id)
- expect(page.find('.build-link')).not_to have_content(build2.id)
- expect(page.find('.build-link')).not_to have_content(build3.id)
- expect(page.find('.build-link')).not_to have_content(build4.id)
- expect(page).to have_button 'Stop all jobs'
- end
- end
-
- context 'when have no jobs running' do
- it 'shows a message' do
- create(:ci_build, pipeline: pipeline, status: :success)
-
- visit admin_jobs_path(scope: :running)
+ wait_for_requests
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
- expect(page).to have_content 'No jobs to show'
- expect(page).not_to have_button 'Stop all jobs'
+ expect(page).to have_selector('[data-testid="jobs-all-tab"]')
+ expect(page).to have_selector('[data-testid="jobs-empty-state"]')
+ expect(page).not_to have_button 'Cancel all jobs'
end
end
end
@@ -116,13 +54,19 @@ RSpec.describe 'Admin Jobs', feature_category: :continuous_integration do
build2 = create(:ci_build, pipeline: pipeline, status: :running)
build3 = create(:ci_build, pipeline: pipeline, status: :success)
- visit admin_jobs_path(scope: :finished)
+ visit admin_jobs_path
+
+ wait_for_requests
+
+ find_by_testid('jobs-finished-tab').click
+
+ wait_for_requests
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
- expect(page.find('.build-link')).not_to have_content(build1.id)
- expect(page.find('.build-link')).not_to have_content(build2.id)
- expect(page.find('.build-link')).to have_content(build3.id)
- expect(page).to have_button 'Stop all jobs'
+ expect(page).to have_selector('[data-testid="jobs-finished-tab"]')
+ expect(find_by_testid('job-id-link')).not_to have_content(build1.id)
+ expect(find_by_testid('job-id-link')).not_to have_content(build2.id)
+ expect(find_by_testid('job-id-link')).to have_content(build3.id)
+ expect(page).to have_button 'Cancel all jobs'
end
end
@@ -130,11 +74,17 @@ RSpec.describe 'Admin Jobs', feature_category: :continuous_integration do
it 'shows a message' do
create(:ci_build, pipeline: pipeline, status: :running)
- visit admin_jobs_path(scope: :finished)
+ visit admin_jobs_path
+
+ wait_for_requests
+
+ find_by_testid('jobs-finished-tab').click
+
+ wait_for_requests
- expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
+ expect(page).to have_selector('[data-testid="jobs-finished-tab"]')
expect(page).to have_content 'No jobs to show'
- expect(page).to have_button 'Stop all jobs'
+ expect(page).to have_button 'Cancel all jobs'
end
end
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index a64d3f241f6..5d9106fea02 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do
include UserLoginHelper
include Features::TopNavSpecHelpers
- let(:user) { create(:admin) }
+ let(:user) { create(:admin, :no_super_sidebar) }
before do
# TODO: This used to use gitlab_sign_in, instead of sign_in, but that is buggy. See
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
index 124c43eef9d..2a862c750d7 100644
--- a/spec/features/admin/admin_mode/workers_spec.rb
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -6,8 +6,8 @@ require 'spec_helper'
RSpec.describe 'Admin mode for workers', :request_store, feature_category: :system_access do
include Features::AdminUsersHelpers
- let(:user) { create(:user) }
- let(:user_to_delete) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
+ let(:user_to_delete) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
@@ -22,7 +22,7 @@ RSpec.describe 'Admin mode for workers', :request_store, feature_category: :syst
end
context 'as an admin user' do
- let(:user) { create(:admin) }
+ let(:user) { create(:admin, :no_super_sidebar) }
context 'when admin mode disabled' do
it 'cannot delete user', :js do
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 65249fa0235..edfa58567ad 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
include Features::TopNavSpecHelpers
include StubENV
- let(:admin) { create(:admin) }
+ let(:admin) { create(:admin, :no_super_sidebar) }
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index af6ba318ac6..e0f4473c80c 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -54,6 +54,11 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
let(:runner) { instance_runner }
end
+ it_behaves_like 'shows runner details from list' do
+ let(:runner) { instance_runner }
+ let(:runner_page_path) { admin_runner_path(instance_runner) }
+ end
+
it_behaves_like 'pauses, resumes and deletes a runner' do
let(:runner) { instance_runner }
end
@@ -575,6 +580,8 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
let(:runner_page_path) { admin_runner_path(project_runner) }
end
+ it_behaves_like 'shows locked field'
+
describe 'breadcrumbs' do
it 'contains the current runner id and token' do
page.within '[data-testid="breadcrumb-links"]' do
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index 7d4d3deb6d8..7423e74bf3a 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe "Admin > Admin sees background migrations", feature_category: :database do
include ListboxHelpers
- let_it_be(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin, :no_super_sidebar) }
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
let_it_be(:active_migration) { create(:batched_background_migration, :active, table_name: 'active') }
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index b78d6777a1a..e87f47e5234 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
include TermsHelper
include UsageDataHelpers
- let_it_be(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin, :no_super_sidebar) }
context 'application setting :admin_mode is enabled', :request_store do
before do
@@ -53,7 +53,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
it 'modify import sources' do
expect(current_settings.import_sources).to be_empty
- page.within('[data-testid="admin-visibility-access-settings"]') do
+ page.within('[data-testid="admin-import-export-settings"]') do
check "Repository by URL"
click_button 'Save changes'
end
@@ -63,7 +63,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
end
it 'change Visibility and Access Controls' do
- page.within('[data-testid="admin-visibility-access-settings"]') do
+ page.within('[data-testid="admin-import-export-settings"]') do
page.within('[data-testid="project-export"]') do
uncheck 'Enabled'
end
@@ -113,7 +113,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
end
it 'change Maximum export size' do
- page.within(find('[data-testid="account-limit"]')) do
+ page.within(find('[data-testid="admin-import-export-settings"]')) do
fill_in 'Maximum export size (MiB)', with: 25
click_button 'Save changes'
end
@@ -123,7 +123,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
end
it 'change Maximum import size' do
- page.within(find('[data-testid="account-limit"]')) do
+ page.within(find('[data-testid="admin-import-export-settings"]')) do
fill_in 'Maximum import size (MiB)', with: 15
click_button 'Save changes'
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index a95fd133133..7dc329e6909 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Admin::Users::User', feature_category: :user_management do
include Features::AdminUsersHelpers
include Spec::Support::Helpers::ModalHelpers
- let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:current_user) { create(:admin) }
+ let_it_be(:user) { create(:omniauth_user, :no_super_sidebar, provider: 'twitter', extern_uid: '123456') }
+ let_it_be(:current_user) { create(:admin, :no_super_sidebar) }
before do
sign_in(current_user)
@@ -145,7 +145,7 @@ RSpec.describe 'Admin::Users::User', feature_category: :user_management do
end
describe 'Impersonation' do
- let_it_be(:another_user) { create(:user) }
+ let_it_be(:another_user) { create(:user, :no_super_sidebar) }
context 'before impersonating' do
subject { visit admin_user_path(user_to_visit) }
@@ -156,7 +156,7 @@ RSpec.describe 'Admin::Users::User', feature_category: :user_management do
it 'disables impersonate button' do
subject
- impersonate_btn = find('[data-testid="impersonate_user_link"]')
+ impersonate_btn = find('[data-testid="impersonate-user-link"]')
expect(impersonate_btn).not_to be_nil
expect(impersonate_btn['disabled']).not_to be_nil
@@ -174,7 +174,7 @@ RSpec.describe 'Admin::Users::User', feature_category: :user_management do
subject
expect(page).to have_content('Impersonate')
- impersonate_btn = find('[data-testid="impersonate_user_link"]')
+ impersonate_btn = find('[data-testid="impersonate-user-link"]')
expect(impersonate_btn['disabled']).to be_nil
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 8e80ce5edd9..8ee30c50a7d 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -350,7 +350,8 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
let_it_be(:ghost_user) { create(:user, :ghost) }
it 'does not render actions dropdown' do
- expect(page).not_to have_css("[data-testid='user-actions-#{ghost_user.id}'] [data-testid='dropdown-toggle']")
+ expect(page).not_to have_css(
+ "[data-testid='user-actions-#{ghost_user.id}'] [data-testid='user-actions-dropdown-toggle']")
end
end
@@ -358,7 +359,8 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
let_it_be(:bot_user) { create(:user, user_type: :alert_bot) }
it 'does not render actions dropdown' do
- expect(page).not_to have_css("[data-testid='user-actions-#{bot_user.id}'] [data-testid='dropdown-toggle']")
+ expect(page).not_to have_css(
+ "[data-testid='user-actions-#{bot_user.id}'] [data-testid='user-actions-dropdown-toggle']")
end
end
end
diff --git a/spec/features/alert_management/alert_details_spec.rb b/spec/features/alert_management/alert_details_spec.rb
index 45fa4d810aa..b377d3a092b 100644
--- a/spec/features/alert_management/alert_details_spec.rb
+++ b/spec/features/alert_management/alert_details_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Alert details', :js, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user) }
+ let_it_be(:developer) { create(:user, :no_super_sidebar) }
let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered', title: 'Alert') }
before_all do
diff --git a/spec/features/alert_management/alert_management_list_spec.rb b/spec/features/alert_management/alert_management_list_spec.rb
index 6ed3bdec5f5..cc54af249e1 100644
--- a/spec/features/alert_management/alert_management_list_spec.rb
+++ b/spec/features/alert_management/alert_management_list_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Alert Management index', :js, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user) }
+ let_it_be(:developer) { create(:user, :no_super_sidebar) }
before_all do
project.add_developer(developer)
diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb
index 006b7ce45d4..1ee02de9a66 100644
--- a/spec/features/boards/board_filters_spec.rb
+++ b/spec/features/boards/board_filters_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:user) { create(:user) }
- let_it_be(:board) { create(:board, project: project) }
let_it_be(:project_label) { create(:label, project: project, title: 'Label') }
let_it_be(:milestone_1) { create(:milestone, project: project, due_date: 3.days.from_now) }
let_it_be(:milestone_2) { create(:milestone, project: project, due_date: Date.tomorrow) }
@@ -21,166 +21,195 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
let(:filter_first_suggestion) { find('.gl-filtered-search-suggestion-list').first('.gl-filtered-search-suggestion') }
let(:filter_submit) { find('.gl-search-box-by-click-search-button') }
- before do
- stub_feature_flags(apollo_boards: false)
- project.add_maintainer(user)
- sign_in(user)
+ context 'for a project board' do
+ let_it_be(:board) { create(:board, project: project) }
- visit_project_board
- end
-
- shared_examples 'loads all the users when opened' do
- it 'and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ before do
+ stub_feature_flags(apollo_boards: false)
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_board_path(project, board)
wait_for_requests
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 4)
+ shared_examples 'loads all the users when opened' do
+ it 'and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
- click_on user.username
- filter_submit.click
+ wait_for_requests
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(issue.title)
- end
- end
+ expect_filtered_search_dropdown_results(filter_dropdown, 3)
- describe 'filters by assignee' do
- before do
- set_filter('assignee')
- end
+ click_on user.username
+ filter_submit.click
- it_behaves_like 'loads all the users when opened', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/351426' do
- let(:issue) { issue_2 }
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(issue.title)
+ end
end
- end
- describe 'filters by author' do
- before do
- set_filter('author')
- end
+ describe 'filters by assignee' do
+ before do
+ set_filter('assignee')
+ end
- it_behaves_like 'loads all the users when opened' do
- let(:issue) { issue_1 }
+ it_behaves_like 'loads all the users when opened', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/351426' do
+ let(:issue) { issue_2 }
+ end
end
- end
- describe 'filters by label' do
- before do
- set_filter('label')
+ describe 'filters by author' do
+ before do
+ set_filter('author')
+ end
+
+ it_behaves_like 'loads all the users when opened' do
+ let(:issue) { issue_1 }
+ end
end
- it 'loads all the labels when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ describe 'filters by label' do
+ before do
+ set_filter('label')
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 3)
+ it 'loads all the labels when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
- filter_dropdown.click_on project_label.title
- filter_submit.click
+ expect_filtered_search_dropdown_results(filter_dropdown, 3)
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(issue_2.title)
- end
- end
+ filter_dropdown.click_on project_label.title
+ filter_submit.click
- describe 'filters by releases' do
- before do
- set_filter('release')
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(issue_2.title)
+ end
end
- it 'loads all the releases when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ describe 'filters by releases' do
+ before do
+ set_filter('release')
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 2)
+ it 'loads all the releases when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
- click_on release.tag
- filter_submit.click
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(issue_1.title)
- end
- end
+ click_on release.tag
+ filter_submit.click
- describe 'filters by confidentiality' do
- before do
- filter_input.click
- filter_input.set("confidential:")
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(issue_1.title)
+ end
end
- it 'loads all the confidentiality options when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ describe 'filters by confidentiality' do
+ before do
+ filter_input.click
+ filter_input.set("confidential:")
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 2)
+ it 'loads all the confidentiality options when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
- filter_dropdown.click_on 'Yes'
- filter_submit.click
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(issue_2.title)
- end
- end
+ filter_dropdown.click_on 'Yes'
+ filter_submit.click
- describe 'filters by milestone' do
- before do
- set_filter('milestone')
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(issue_2.title)
+ end
end
- it 'loads all the milestones when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ describe 'filters by milestone' do
+ before do
+ set_filter('milestone')
+ end
+
+ it 'loads all the milestones when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
- expect_filtered_search_dropdown_results(filter_dropdown, 6)
- expect(filter_dropdown).to have_content('None')
- expect(filter_dropdown).to have_content('Any')
- expect(filter_dropdown).to have_content('Started')
- expect(filter_dropdown).to have_content('Upcoming')
+ expect_filtered_search_dropdown_results(filter_dropdown, 6)
+ expect(filter_dropdown).to have_content('None')
+ expect(filter_dropdown).to have_content('Any')
+ expect(filter_dropdown).to have_content('Started')
+ expect(filter_dropdown).to have_content('Upcoming')
- dropdown_nodes = page.find_all('.gl-filtered-search-suggestion-list > .gl-filtered-search-suggestion')
+ dropdown_nodes = page.find_all('.gl-filtered-search-suggestion-list > .gl-filtered-search-suggestion')
- expect(dropdown_nodes[4]).to have_content(milestone_2.title)
- expect(dropdown_nodes.last).to have_content(milestone_1.title)
+ expect(dropdown_nodes[4]).to have_content(milestone_2.title)
+ expect(dropdown_nodes.last).to have_content(milestone_1.title)
- click_on milestone_1.title
- filter_submit.click
+ click_on milestone_1.title
+ filter_submit.click
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ end
end
- end
- describe 'filters by reaction emoji' do
- before do
- set_filter('my-reaction')
+ describe 'filters by reaction emoji' do
+ before do
+ set_filter('my-reaction')
+ end
+
+ it 'loads all the emojis when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+
+ expect_filtered_search_dropdown_results(filter_dropdown, 3)
+
+ click_on 'thumbsup'
+ filter_submit.click
+
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(issue_1.title)
+ end
end
- it 'loads all the emojis when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
+ describe 'filters by type' do
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ before do
+ set_filter('type')
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 3)
+ it 'loads all the types when opened and submit one as filter', :aggregate_failures do
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 3)
- click_on 'thumbsup'
- filter_submit.click
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(issue_1.title)
+ click_on 'Incident'
+ filter_submit.click
+
+ expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
+ expect(find('.board-card')).to have_content(incident.title)
+ end
end
end
- describe 'filters by type' do
- let_it_be(:incident) { create(:incident, project: project) }
+ context 'for a group board' do
+ let_it_be(:board) { create(:board, group: group) }
+
+ let_it_be(:child_project_member) { create(:user).tap { |u| project.add_maintainer(u) } }
before do
- set_filter('type')
- end
+ stub_feature_flags(apollo_boards: false)
- it 'loads all the types when opened and submit one as filter', :aggregate_failures do
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 3)
+ group.add_maintainer(user)
+ sign_in(user)
+ end
- expect_filtered_search_dropdown_results(filter_dropdown, 2)
+ context 'when filtering by assignee' do
+ it 'includes descendant project members in autocomplete' do
+ visit group_board_path(group, board)
+ wait_for_requests
- click_on 'Incident'
- filter_submit.click
+ set_filter('assignee')
- expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
- expect(find('.board-card')).to have_content(incident.title)
+ expect(page).to have_css('.gl-filtered-search-suggestion', text: child_project_member.name)
+ end
end
end
@@ -193,9 +222,4 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
def expect_filtered_search_dropdown_results(filter_dropdown, count)
expect(filter_dropdown).to have_selector('.gl-dropdown-item', count: count)
end
-
- def visit_project_board
- visit project_board_path(project, board)
- wait_for_requests
- end
end
diff --git a/spec/features/boards/multiple_boards_spec.rb b/spec/features/boards/multiple_boards_spec.rb
index e9d34c6f87f..9d59d3dd02a 100644
--- a/spec/features/boards/multiple_boards_spec.rb
+++ b/spec/features/boards/multiple_boards_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Multiple Issue Boards', :js, feature_category: :team_planning do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:planning) { create(:label, project: project, name: 'Planning') }
let_it_be(:board) { create(:board, name: 'board1', project: project) }
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 4807b691e4f..358da1e1279 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project issue boards sidebar', :js, feature_category: :team_planning, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/416414' do
+RSpec.describe 'Project issue boards sidebar', :js, feature_category: :team_planning do
include BoardHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 8ad27b65f11..e22ae4f51fb 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Contributions Calendar', :js, feature_category: :user_profile do
include MobileHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:contributed_project) { create(:project, :public, :repository) }
let(:issue_note) { create(:note, project: contributed_project) }
diff --git a/spec/features/contextual_sidebar_spec.rb b/spec/features/contextual_sidebar_spec.rb
index 132c8eb7192..ab322f18240 100644
--- a/spec/features/contextual_sidebar_spec.rb
+++ b/spec/features/contextual_sidebar_spec.rb
@@ -4,9 +4,8 @@ require 'spec_helper'
RSpec.describe 'Contextual sidebar', :js, feature_category: :remote_development do
context 'when context is a project' do
- let_it_be(:project) { create(:project) }
-
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 56272f58e0d..4fe05abd73b 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -60,8 +60,8 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
# NOTE: in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68595 travel back
# 5 days in time before we create data for these specs, to mitigate some flakiness
# So setting the date range to be the last 2 days should skip past the existing data
- from = 2.days.ago.strftime("%Y-%m-%d")
- to = 1.day.ago.strftime("%Y-%m-%d")
+ from = 2.days.ago.to_date.iso8601
+ to = 1.day.ago.to_date.iso8601
max_items_per_page = 20
around do |example|
diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb
index 60621f57bde..61631d28aa9 100644
--- a/spec/features/dashboard/activity_spec.rb
+++ b/spec/features/dashboard/activity_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Dashboard > Activity', feature_category: :user_profile do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
index c1849cbee83..a00666c2376 100644
--- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
+++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'The group dashboard', :js, feature_category: :groups_and_project
include ExternalAuthorizationServiceHelpers
include Features::TopNavSpecHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in user
diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb
index b077b554773..e1da163cdf5 100644
--- a/spec/features/dashboard/groups_list_spec.rb
+++ b/spec/features/dashboard/groups_list_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Dashboard Groups page', :js, feature_category: :groups_and_projects do
- let(:user) { create :user }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:nested_group) { create(:group, :nested) }
let(:another_group) { create(:group) }
@@ -237,4 +237,15 @@ RSpec.describe 'Dashboard Groups page', :js, feature_category: :groups_and_proje
expect(page).to have_link("Explore groups", href: explore_groups_path)
end
+
+ context 'when there are no groups to display' do
+ before do
+ sign_in(user)
+ visit dashboard_groups_path
+ end
+
+ it 'shows empty state' do
+ expect(page).to have_content(s_('GroupsEmptyState|A group is a collection of several projects'))
+ end
+ end
end
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 5e6ec007569..501405c5662 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching, feature_category: :team_planning do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, namespace: user.namespace) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index 70d9f7e5137..69b32113bba 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Dashboard Issues', feature_category: :team_planning do
include FilteredSearchHelpers
- let_it_be(:current_user) { create :user }
+ let_it_be(:current_user) { create(:user, :no_super_sidebar) }
let_it_be(:user) { current_user } # Shared examples depend on this being available
let_it_be(:public_project) { create(:project, :public) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 624f3530f81..4bb04f4ff80 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review_workfl
include FilteredSearchHelpers
include ProjectForksHelper
- let(:current_user) { create :user }
+ let(:current_user) { create(:user, :no_super_sidebar) }
let(:user) { current_user }
let(:project) { create(:project) }
diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb
index 0dd25ffaa94..38637115246 100644
--- a/spec/features/dashboard/milestones_spec.rb
+++ b/spec/features/dashboard/milestones_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Dashboard > Milestones', feature_category: :team_planning do
end
describe 'as logged-in user' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: user.namespace) }
let!(:milestone) { create(:milestone, project: project) }
@@ -50,7 +50,7 @@ RSpec.describe 'Dashboard > Milestones', feature_category: :team_planning do
end
describe 'with merge requests disabled' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:project) { create(:project, :merge_requests_disabled, namespace: user.namespace) }
let!(:milestone) { create(:milestone, project: project) }
diff --git a/spec/features/dashboard/navbar_spec.rb b/spec/features/dashboard/navbar_spec.rb
index ff0ff899fc2..30e7f2d2e4e 100644
--- a/spec/features/dashboard/navbar_spec.rb
+++ b/spec/features/dashboard/navbar_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe '"Your work" navbar', feature_category: :navigation do
include_context 'dashboard navbar structure'
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
it_behaves_like 'verified navigation bar' do
before do
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 747d09f5d08..e5ad9808f83 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Dashboard Projects', feature_category: :groups_and_projects do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project, reload: true) { create(:project, :repository, creator: build(:user)) } # ensure creator != owner to avoid N+1 false-positive
let_it_be(:project2) { create(:project, :public) }
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 2e01c1304de..c8013d364e3 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -50,13 +50,13 @@ RSpec.describe 'Dashboard shortcuts', :js, feature_category: :shared do
context 'logged out' do
before do
+ stub_feature_flags(super_sidebar_logged_out: false)
visit explore_root_path
end
it 'navigate to tabs' do
find('body').send_keys([:shift, 'G'])
- find('.nothing-here-block')
expect(page).to have_content('No public groups')
find('body').send_keys([:shift, 'S'])
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index da985c6dc07..f9284f9479e 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Dashboard snippets', feature_category: :source_code_management do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_snippets_path, :snippets
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 9d59126df8d..5642d083673 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
include DesignManagementTestHelpers
- let_it_be(:user) { create(:user, username: 'john') }
- let_it_be(:user2) { create(:user, username: 'diane') }
- let_it_be(:author) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar, username: 'john') }
+ let_it_be(:user2) { create(:user, :no_super_sidebar, username: 'diane') }
+ let_it_be(:author) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project, due_date: Date.today, title: "Fix bug") }
diff --git a/spec/features/explore/groups_list_spec.rb b/spec/features/explore/groups_list_spec.rb
index 39cd3c80307..91ee6d48a48 100644
--- a/spec/features/explore/groups_list_spec.rb
+++ b/spec/features/explore/groups_list_spec.rb
@@ -4,88 +4,104 @@ require 'spec_helper'
RSpec.describe 'Explore Groups page', :js, feature_category: :groups_and_projects do
let!(:user) { create :user }
- let!(:group) { create(:group) }
- let!(:public_group) { create(:group, :public) }
- let!(:private_group) { create(:group, :private) }
- let!(:empty_project) { create(:project, group: public_group) }
- before do
- group.add_owner(user)
+ context 'when there are groups to show' do
+ let!(:group) { create(:group) }
+ let!(:public_group) { create(:group, :public) }
+ let!(:private_group) { create(:group, :private) }
+ let!(:empty_project) { create(:project, group: public_group) }
- sign_in(user)
+ before do
+ group.add_owner(user)
- visit explore_groups_path
- wait_for_requests
- end
+ sign_in(user)
- it 'shows groups user is member of' do
- expect(page).to have_content(group.full_name)
- expect(page).to have_content(public_group.full_name)
- expect(page).not_to have_content(private_group.full_name)
- end
+ visit explore_groups_path
+ wait_for_requests
+ end
- it 'filters groups' do
- fill_in 'filter', with: group.name
- wait_for_requests
+ it 'shows groups user is member of' do
+ expect(page).to have_content(group.full_name)
+ expect(page).to have_content(public_group.full_name)
+ expect(page).not_to have_content(private_group.full_name)
+ end
- expect(page).to have_content(group.full_name)
- expect(page).not_to have_content(public_group.full_name)
- expect(page).not_to have_content(private_group.full_name)
- end
+ it 'filters groups' do
+ fill_in 'filter', with: group.name
+ wait_for_requests
- it 'resets search when user cleans the input' do
- fill_in 'filter', with: group.name
- wait_for_requests
+ expect(page).to have_content(group.full_name)
+ expect(page).not_to have_content(public_group.full_name)
+ expect(page).not_to have_content(private_group.full_name)
+ end
- expect(page).to have_content(group.full_name)
- expect(page).not_to have_content(public_group.full_name)
+ it 'resets search when user cleans the input' do
+ fill_in 'filter', with: group.name
+ wait_for_requests
- fill_in 'filter', with: ""
- page.find('[name="filter"]').send_keys(:enter)
- wait_for_requests
+ expect(page).to have_content(group.full_name)
+ expect(page).not_to have_content(public_group.full_name)
- expect(page).to have_content(group.full_name)
- expect(page).to have_content(public_group.full_name)
- expect(page).not_to have_content(private_group.full_name)
- expect(page.all('.js-groups-list-holder .groups-list li').length).to eq 2
- end
+ fill_in 'filter', with: ""
+ page.find('[name="filter"]').send_keys(:enter)
+ wait_for_requests
- it 'shows non-archived projects count' do
- # Initially project is not archived
- expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("1")
+ expect(page).to have_content(group.full_name)
+ expect(page).to have_content(public_group.full_name)
+ expect(page).not_to have_content(private_group.full_name)
+ expect(page.all('.js-groups-list-holder .groups-list li').length).to eq 2
+ end
- # Archive project
- ::Projects::UpdateService.new(empty_project, user, archived: true).execute
- visit explore_groups_path
+ it 'shows non-archived projects count' do
+ # Initially project is not archived
+ expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("1")
- # Check project count
- expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("0")
+ # Archive project
+ ::Projects::UpdateService.new(empty_project, user, archived: true).execute
+ visit explore_groups_path
- # Unarchive project
- ::Projects::UpdateService.new(empty_project, user, archived: false).execute
- visit explore_groups_path
+ # Check project count
+ expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("0")
- # Check project count
- expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("1")
- end
+ # Unarchive project
+ ::Projects::UpdateService.new(empty_project, user, archived: false).execute
+ visit explore_groups_path
- describe 'landing component' do
- it 'shows a landing component' do
- expect(page).to have_content('Below you will find all the groups that are public.')
+ # Check project count
+ expect(find('.js-groups-list-holder .groups-list li:first-child .stats .number-projects')).to have_text("1")
end
- it 'is dismissable' do
- find('.dismiss-button').click
+ describe 'landing component' do
+ it 'shows a landing component' do
+ expect(page).to have_content('Below you will find all the groups that are public.')
+ end
+
+ it 'is dismissable' do
+ find('.dismiss-button').click
+
+ expect(page).not_to have_content('Below you will find all the groups that are public.')
+ end
- expect(page).not_to have_content('Below you will find all the groups that are public.')
+ it 'does not show persistently once dismissed' do
+ find('.dismiss-button').click
+
+ visit explore_groups_path
+
+ expect(page).not_to have_content('Below you will find all the groups that are public.')
+ end
end
+ end
- it 'does not show persistently once dismissed' do
- find('.dismiss-button').click
+ context 'when there are no groups to show' do
+ before do
+ sign_in(user)
visit explore_groups_path
+ wait_for_requests
+ end
- expect(page).not_to have_content('Below you will find all the groups that are public.')
+ it 'shows empty state' do
+ expect(page).to have_content(_('No public groups'))
end
end
end
diff --git a/spec/features/explore/navbar_spec.rb b/spec/features/explore/navbar_spec.rb
index 8f281abe6a7..853d66ed4d1 100644
--- a/spec/features/explore/navbar_spec.rb
+++ b/spec/features/explore/navbar_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe '"Explore" navbar', feature_category: :navigation do
it_behaves_like 'verified navigation bar' do
before do
+ stub_feature_flags(super_sidebar_logged_out: false)
visit explore_projects_path
end
end
diff --git a/spec/features/explore/user_explores_projects_spec.rb b/spec/features/explore/user_explores_projects_spec.rb
index f259ba6a167..43d464e0c9f 100644
--- a/spec/features/explore/user_explores_projects_spec.rb
+++ b/spec/features/explore/user_explores_projects_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe 'User explores projects', feature_category: :user_profile do
+ before do
+ stub_feature_flags(super_sidebar_logged_out: false)
+ end
+
describe '"All" tab' do
it_behaves_like 'an "Explore" page with sidebar and breadcrumbs', :explore_projects_path, :projects
end
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index f94f0288f99..dfafacf48e2 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Global search', :js, feature_category: :global_search do
include AfterNextHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
before do
diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb
index d68b4ccf8f8..953a8e27547 100644
--- a/spec/features/groups/container_registry_spec.rb
+++ b/spec/features/groups/container_registry_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Container Registry', :js, feature_category: :container_registry do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/features/groups/dependency_proxy_for_containers_spec.rb b/spec/features/groups/dependency_proxy_for_containers_spec.rb
index c0456140291..1e15b97c5aa 100644
--- a/spec/features/groups/dependency_proxy_for_containers_spec.rb
+++ b/spec/features/groups/dependency_proxy_for_containers_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Group Dependency Proxy for containers', :js, feature_category: :
include DependencyProxyHelpers
include_context 'file upload requests helpers'
+ include_context 'with a server running the dependency proxy'
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -21,17 +22,6 @@ RSpec.describe 'Group Dependency Proxy for containers', :js, feature_category: :
HTTParty.get(url, headers: headers)
end
- def run_server(handler)
- default_server = Capybara.server
-
- Capybara.server = Capybara.servers[:puma]
- server = Capybara::Server.new(handler)
- server.boot
- server
- ensure
- Capybara.server = default_server
- end
-
let_it_be(:external_server) do
handler = lambda do |env|
if env['REQUEST_PATH'] == '/token'
diff --git a/spec/features/groups/dependency_proxy_spec.rb b/spec/features/groups/dependency_proxy_spec.rb
index 60922f813df..2d4f6d4fbf2 100644
--- a/spec/features/groups/dependency_proxy_spec.rb
+++ b/spec/features/groups/dependency_proxy_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Group Dependency Proxy', feature_category: :dependency_proxy do
- let(:owner) { create(:user) }
- let(:reporter) { create(:user) }
+ let(:owner) { create(:user, :no_super_sidebar) }
+ let(:reporter) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:path) { group_dependency_proxy_path(group) }
let(:settings_path) { group_settings_packages_and_registries_path(group) }
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index 5b373aecce8..4cc0fe4171d 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'The group page', feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
before do
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index e9d2d185e8a..4e5d7c6f8e8 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -7,182 +7,235 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:group_owner) { create(:user) }
+ let_it_be(:group_maintainer) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
before do
group.add_owner(group_owner)
- sign_in(group_owner)
+ group.add_maintainer(group_maintainer)
end
describe "Group runners page", :js do
- context "with no runners" do
+ context 'when logged in as group maintainer' do
before do
- visit group_runners_path(group)
+ sign_in(group_maintainer)
end
- it_behaves_like 'shows no runners registered'
-
- it 'shows tabs with total counts equal to 0' do
- expect(page).to have_link('All 0')
- expect(page).to have_link('Group 0')
- expect(page).to have_link('Project 0')
- end
- end
+ context "with no runners" do
+ before do
+ visit group_runners_path(group)
+ end
- context "with an online group runner" do
- let!(:group_runner) do
- create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
- end
+ it_behaves_like 'shows no runners registered'
- before do
- visit group_runners_path(group)
+ it 'shows tabs with total counts equal to 0' do
+ expect(page).to have_link('All 0')
+ expect(page).to have_link('Group 0')
+ expect(page).to have_link('Project 0')
+ end
end
- it_behaves_like 'shows runner in list' do
- let(:runner) { group_runner }
- end
+ context "with an online group runner" do
+ let_it_be(:group_runner) do
+ create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
+ end
- it_behaves_like 'pauses, resumes and deletes a runner' do
- let(:runner) { group_runner }
- end
+ before do
+ visit group_runners_path(group)
+ end
- it 'shows an editable group badge' do
- within_runner_row(group_runner.id) do
- expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, group_runner))
+ it_behaves_like 'shows runner in list' do
+ let(:runner) { group_runner }
+ end
- expect(page).to have_selector '.badge', text: s_('Runners|Group')
+ it_behaves_like 'shows runner details from list' do
+ let(:runner) { group_runner }
+ let(:runner_page_path) { group_runner_path(group, group_runner) }
end
- end
- context 'when description does not match' do
- before do
- input_filtered_search_keys('runner-baz')
+ it 'shows a group runner badge' do
+ within_runner_row(group_runner.id) do
+ expect(page).to have_selector '.badge', text: s_('Runners|Group')
+ end
end
- it_behaves_like 'shows no runners found'
+ context 'when description does not match' do
+ before do
+ input_filtered_search_keys('runner-baz')
+ end
+
+ it_behaves_like 'shows no runners found'
- it 'shows no runner' do
- expect(page).not_to have_content 'runner-foo'
+ it 'shows no runner' do
+ expect(page).not_to have_content 'runner-foo'
+ end
end
end
- end
- context "with an online project runner" do
- let!(:project_runner) do
- create(:ci_runner, :project, projects: [project], description: 'runner-bar', contacted_at: Time.zone.now)
- end
+ context "with an online project runner" do
+ let_it_be(:project_runner) do
+ create(:ci_runner, :project, projects: [project], description: 'runner-bar', contacted_at: Time.zone.now)
+ end
- before do
- visit group_runners_path(group)
- end
+ before do
+ visit group_runners_path(group)
+ end
- it_behaves_like 'shows runner in list' do
- let(:runner) { project_runner }
- end
+ it_behaves_like 'shows runner in list' do
+ let(:runner) { project_runner }
+ end
- it_behaves_like 'pauses, resumes and deletes a runner' do
- let(:runner) { project_runner }
+ it_behaves_like 'shows runner details from list' do
+ let(:runner) { project_runner }
+ let(:runner_page_path) { group_runner_path(group, project_runner) }
+ end
+
+ it 'shows a project runner badge' do
+ within_runner_row(project_runner.id) do
+ expect(page).to have_selector '.badge', text: s_('Runners|Project')
+ end
+ end
end
- it 'shows an editable project runner' do
- within_runner_row(project_runner.id) do
- expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, project_runner))
+ context "with an online instance runner" do
+ let_it_be(:instance_runner) do
+ create(:ci_runner, :instance, description: 'runner-baz', contacted_at: Time.zone.now)
+ end
- expect(page).to have_selector '.badge', text: s_('Runners|Project')
+ before do
+ visit group_runners_path(group)
end
- end
- end
- context "with an online instance runner" do
- let!(:instance_runner) do
- create(:ci_runner, :instance, description: 'runner-baz', contacted_at: Time.zone.now)
- end
+ context "when selecting 'Show only inherited'" do
+ before do
+ find("[data-testid='runner-membership-toggle'] button").click
- before do
- visit group_runners_path(group)
- end
+ wait_for_requests
+ end
- context "when selecting 'Show only inherited'" do
- before do
- find("[data-testid='runner-membership-toggle'] button").click
+ it_behaves_like 'shows runner in list' do
+ let(:runner) { instance_runner }
+ end
- wait_for_requests
+ it_behaves_like 'shows runner details from list' do
+ let(:runner) { instance_runner }
+ let(:runner_page_path) { group_runner_path(group, instance_runner) }
+ end
end
+ end
- it_behaves_like 'shows runner in list' do
- let(:runner) { instance_runner }
+ describe 'filtered search' do
+ before do
+ visit group_runners_path(group)
end
- it 'shows runner details page' do
- click_link("##{instance_runner.id} (#{instance_runner.short_sha})")
+ it 'allows user to search by paused and status', :js do
+ focus_filtered_search
- expect(current_url).to include(group_runner_path(group, instance_runner))
- expect(page).to have_content "#{s_('Runners|Description')} runner-baz"
+ page.within(search_bar_selector) do
+ expect(page).to have_link(s_('Runners|Paused'))
+ expect(page).to have_content('Status')
+ end
end
end
- end
- context 'with a multi-project runner' do
- let(:project) { create(:project, group: group) }
- let(:project_2) { create(:project, group: group) }
- let!(:runner) { create(:ci_runner, :project, projects: [project, project_2], description: 'group-runner') }
+ describe 'filter by tag' do
+ let!(:rnr_1) { create(:ci_runner, :group, groups: [group], description: 'runner-blue', tag_list: ['blue']) }
+ let!(:rnr_2) { create(:ci_runner, :group, groups: [group], description: 'runner-red', tag_list: ['red']) }
- it 'user cannot remove the project runner' do
- visit group_runners_path(group)
+ before do
+ visit group_runners_path(group)
+ end
- within_runner_row(runner.id) do
- expect(page).not_to have_button 'Delete runner'
+ it_behaves_like 'filters by tag' do
+ let(:tag) { 'blue' }
+ let(:found_runner) { rnr_1.description }
+ let(:missing_runner) { rnr_2.description }
end
end
end
- context "with multiple runners" do
+ context 'when logged in as group owner' do
before do
- create(:ci_runner, :group, groups: [group], description: 'runner-foo')
- create(:ci_runner, :group, groups: [group], description: 'runner-bar')
-
- visit group_runners_path(group)
+ sign_in(group_owner)
end
- it_behaves_like 'deletes runners in bulk' do
- let(:runner_count) { '2' }
- end
- end
+ context "with an online group runner" do
+ let_it_be(:group_runner) do
+ create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
+ end
- describe 'filtered search' do
- before do
- visit group_runners_path(group)
+ before do
+ visit group_runners_path(group)
+ end
+
+ it_behaves_like 'pauses, resumes and deletes a runner' do
+ let(:runner) { group_runner }
+ end
+
+ it 'shows an edit link' do
+ within_runner_row(group_runner.id) do
+ expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, group_runner))
+ end
+ end
end
- it 'allows user to search by paused and status', :js do
- focus_filtered_search
+ context "with an online project runner" do
+ let_it_be(:project_runner) do
+ create(:ci_runner, :project, projects: [project], description: 'runner-bar', contacted_at: Time.zone.now)
+ end
+
+ before do
+ visit group_runners_path(group)
+ end
- page.within(search_bar_selector) do
- expect(page).to have_link(s_('Runners|Paused'))
- expect(page).to have_content('Status')
+ it_behaves_like 'pauses, resumes and deletes a runner' do
+ let(:runner) { project_runner }
+ end
+
+ it 'shows an editable project runner' do
+ within_runner_row(project_runner.id) do
+ expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, project_runner))
+ end
end
end
- end
- describe 'filter by tag' do
- let!(:runner_1) { create(:ci_runner, :group, groups: [group], description: 'runner-blue', tag_list: ['blue']) }
- let!(:runner_2) { create(:ci_runner, :group, groups: [group], description: 'runner-red', tag_list: ['red']) }
+ context 'with a multi-project runner' do
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project_2) { create(:project, group: group) }
+ let_it_be(:runner) do
+ create(:ci_runner, :project, projects: [project, project_2], description: 'group-runner')
+ end
- before do
- visit group_runners_path(group)
+ it 'owner cannot remove the project runner' do
+ visit group_runners_path(group)
+
+ within_runner_row(runner.id) do
+ expect(page).not_to have_button 'Delete runner'
+ end
+ end
end
- it_behaves_like 'filters by tag' do
- let(:tag) { 'blue' }
- let(:found_runner) { runner_1.description }
- let(:missing_runner) { runner_2.description }
+ context "with multiple runners" do
+ before do
+ create(:ci_runner, :group, groups: [group], description: 'runner-foo')
+ create(:ci_runner, :group, groups: [group], description: 'runner-bar')
+
+ visit group_runners_path(group)
+ end
+
+ it_behaves_like 'deletes runners in bulk' do
+ let(:runner_count) { '2' }
+ end
end
end
end
describe "Group runner create page", :js do
before do
+ sign_in(group_owner)
+
visit new_group_runner_path(group)
end
@@ -196,23 +249,39 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
create(:ci_runner, :group, groups: [group], description: 'runner-foo')
end
- let_it_be(:group_runner_job) { create(:ci_build, runner: group_runner) }
+ let_it_be(:group_runner_job) { create(:ci_build, runner: group_runner, project: project) }
- before do
- visit group_runner_path(group, group_runner)
- end
+ context 'when logged in as group maintainer' do
+ before do
+ sign_in(group_maintainer)
- it 'user views runner details' do
- expect(page).to have_content "#{s_('Runners|Description')} runner-foo"
+ visit group_runner_path(group, group_runner)
+ end
+
+ it 'user views runner details' do
+ expect(page).to have_content "#{s_('Runners|Description')} runner-foo"
+ end
end
- it_behaves_like 'shows runner jobs tab' do
- let(:job_count) { '1' }
- let(:job) { group_runner_job }
+ context 'when logged in as group owner' do
+ before do
+ sign_in(group_owner)
+
+ visit group_runner_path(group, group_runner)
+ end
+
+ it_behaves_like 'shows runner jobs tab' do
+ let(:job_count) { '1' }
+ let(:job) { group_runner_job }
+ end
end
end
describe "Group runner edit page", :js do
+ before do
+ sign_in(group_owner)
+ end
+
context 'when updating a group runner' do
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
@@ -239,6 +308,8 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do
let(:runner) { project_runner }
let(:runner_page_path) { group_runner_path(group, project_runner) }
end
+
+ it_behaves_like 'shows locked field'
end
end
end
diff --git a/spec/features/groups/labels/create_spec.rb b/spec/features/groups/labels/create_spec.rb
index 5b57e670c1d..8242f422e6e 100644
--- a/spec/features/groups/labels/create_spec.rb
+++ b/spec/features/groups/labels/create_spec.rb
@@ -9,15 +9,17 @@ RSpec.describe 'Create a group label', feature_category: :team_planning do
before do
group.add_owner(user)
sign_in(user)
- visit group_labels_path(group)
+
+ visit new_group_label_path(group)
end
it 'creates a new label' do
- click_link 'New label'
fill_in 'Title', with: 'test-label'
click_button 'Create label'
expect(page).to have_content 'test-label'
expect(page).to have_current_path(group_labels_path(group), ignore_query: true)
end
+
+ it_behaves_like 'lock_on_merge when creating labels'
end
diff --git a/spec/features/groups/labels/edit_spec.rb b/spec/features/groups/labels/edit_spec.rb
index 6e056d35435..70568d4baa2 100644
--- a/spec/features/groups/labels/edit_spec.rb
+++ b/spec/features/groups/labels/edit_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Edit group label', feature_category: :team_planning do
before do
group.add_owner(user)
sign_in(user)
+
visit edit_group_label_path(group, label)
end
@@ -34,4 +35,17 @@ RSpec.describe 'Edit group label', feature_category: :team_planning do
expect(page).to have_content("#{label.title} was removed").and have_no_content("#{label.title}</span>")
end
+
+ describe 'lock_on_merge' do
+ let(:label_unlocked) { create(:group_label, group: group, lock_on_merge: false) }
+ let(:label_locked) { create(:group_label, group: group, lock_on_merge: true) }
+ let(:edit_label_path_unlocked) { edit_group_label_path(group, label_unlocked) }
+ let(:edit_label_path_locked) { edit_group_label_path(group, label_locked) }
+
+ before do
+ visit edit_label_path_unlocked
+ end
+
+ it_behaves_like 'lock_on_merge when editing labels'
+ end
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 138031ffaac..dd64ddcede5 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe 'Groups > Members > Manage members', feature_category: :groups_an
end
end
- it_behaves_like 'inviting members', 'group-members-page' do
+ it_behaves_like 'inviting members', 'group_members_page' do
let_it_be(:entity) { group }
let_it_be(:members_page_path) { group_group_members_path(entity) }
let_it_be(:subentity) { create(:group, parent: group) }
diff --git a/spec/features/groups/members/request_access_spec.rb b/spec/features/groups/members/request_access_spec.rb
index cd0c9bfe3eb..c04b84be90e 100644
--- a/spec/features/groups/members/request_access_spec.rb
+++ b/spec/features/groups/members/request_access_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Groups > Members > Request access', feature_category: :groups_and_projects do
- let(:user) { create(:user) }
- let(:owner) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
+ let(:owner) { create(:user, :no_super_sidebar) }
let(:group) { create(:group, :public) }
let!(:project) { create(:project, :private, namespace: group) }
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index a52e2d95fed..6a38f0c59a8 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
include_context 'group navbar structure'
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
@@ -18,7 +18,6 @@ RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
stub_config(dependency_proxy: { enabled: false })
stub_config(registry: { enabled: false })
- stub_feature_flags(harbor_registry_integration: false)
stub_feature_flags(observability_group_tab: false)
stub_group_wikis(false)
group.add_maintainer(user)
@@ -87,8 +86,6 @@ RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
before do
group.update!(harbor_integration: harbor_integration)
- stub_feature_flags(harbor_registry_integration: true)
-
insert_harbor_registry_nav(_('Package Registry'))
visit group_path(group)
diff --git a/spec/features/groups/new_group_page_spec.rb b/spec/features/groups/new_group_page_spec.rb
index c3731565ddf..e1034f2bb9d 100644
--- a/spec/features/groups/new_group_page_spec.rb
+++ b/spec/features/groups/new_group_page_spec.rb
@@ -39,14 +39,14 @@ RSpec.describe 'New group page', :js, feature_category: :groups_and_projects do
context 'for a new top-level group' do
it 'shows the "Your work" navigation' do
visit new_group_path
- expect(page).to have_selector(".super-sidebar .context-switcher-toggle", text: "Your work")
+ expect(page).to have_selector(".super-sidebar", text: "Your work")
end
end
context 'for a new subgroup' do
it 'shows the group navigation of the parent group' do
visit new_group_path(parent_id: parent_group.id, anchor: 'create-group-pane')
- expect(page).to have_selector(".super-sidebar .context-switcher-toggle", text: parent_group.name)
+ expect(page).to have_selector(".super-sidebar", text: parent_group.name)
end
end
end
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index ec8215928e4..1d9269501be 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Group Packages', feature_category: :package_registry do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index 8ea8dc9219a..fa310722860 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Group Package and registry settings', feature_category: :package_registry do
include WaitForRequests
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
let(:sub_group) { create(:group, parent: group) }
diff --git a/spec/features/groups/user_sees_package_sidebar_spec.rb b/spec/features/groups/user_sees_package_sidebar_spec.rb
index 6a91dfb92bf..4efb9ff7608 100644
--- a/spec/features/groups/user_sees_package_sidebar_spec.rb
+++ b/spec/features/groups/user_sees_package_sidebar_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Groups > sidebar', feature_category: :groups_and_projects do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:group) { create(:group) }
before do
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 67133b1856f..7af58bf460c 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Group', feature_category: :groups_and_projects do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/help_dropdown_spec.rb b/spec/features/help_dropdown_spec.rb
index 5f1d3a5e2b7..08d7dba4d79 100644
--- a/spec/features/help_dropdown_spec.rb
+++ b/spec/features/help_dropdown_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe "Help Dropdown", :js, feature_category: :shared do
- let_it_be(:user) { create(:user) }
- let_it_be(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:admin) { create(:admin, :no_super_sidebar) }
before do
stub_application_setting(version_check_enabled: true)
diff --git a/spec/features/ide/user_opens_merge_request_spec.rb b/spec/features/ide/user_opens_merge_request_spec.rb
index dc280133a20..2aa89cadb7d 100644
--- a/spec/features/ide/user_opens_merge_request_spec.rb
+++ b/spec/features/ide/user_opens_merge_request_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'IDE merge request', :js, feature_category: :web_ide do
include CookieHelper
- let(:merge_request) { create(:merge_request, :simple, source_project: project) }
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
+ let_it_be(:merge_request) { create(:merge_request, :simple, source_project: project) }
before do
stub_feature_flags(vscode_web_ide: false)
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index d6feb008d47..7e447ae32c0 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -150,6 +150,9 @@ RSpec.describe 'Incident details', :js, feature_category: :incident_management d
wait_for_requests
sticky_header = find_by_scrolling('[data-testid=issue-sticky-header]')
- expect(sticky_header.find('[data-testid=confidential]')).to be_present
+
+ page.within(sticky_header) do
+ expect(page).to have_text 'Confidential'
+ end
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 03ec72980e5..a56df7bdecc 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_category: :experimentation_expansion do
let_it_be(:owner) { create(:user, name: 'John Doe') }
- let_it_be(:group) { create(:group, name: 'Owned') }
+ # private will ensure we really have access to the group when we land on the activity page
+ let_it_be(:group) { create(:group, :private, name: 'Owned') }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let(:group_invite) { group.group_members.invite.last }
@@ -22,18 +23,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
visit user_confirmation_path(confirmation_token: new_user_token)
end
- def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
- fill_in 'new_user_password', with: new_user.password
-
- wait_for_all_requests
-
- click_button submit_button_text
- end
-
def fill_in_welcome_form
select 'Software Developer', from: 'user_role'
click_button 'Get started!'
@@ -58,10 +47,10 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
expect(page).to have_content('To accept this invitation, create an account or sign in')
end
- it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
+ it 'pre-fills the "Username or primary email" field on the sign in box with the invite_email from the invite' do
click_link 'Sign in'
- expect(find_field('Username or email').value).to eq(group_invite.invite_email)
+ expect(find_field('Username or primary email').value).to eq(group_invite.invite_email)
end
it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
@@ -70,11 +59,11 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
context 'when invite is sent before account is created;ldap or service sign in for manual acceptance edge case' do
- let(:user) { create(:user, email: 'user@example.com') }
+ let(:user) { create(:user, :no_super_sidebar, email: 'user@example.com') }
context 'when invite clicked and not signed in' do
before do
- visit invite_path(group_invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token, invite_type: Emails::Members::INITIAL_INVITE)
end
it 'sign in, grants access and redirects to group activity page' do
@@ -82,7 +71,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
gitlab_sign_in(user, remember: true, visit: false)
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect_to_be_on_group_activity_page(group)
end
end
@@ -143,6 +132,10 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
end
end
+
+ def expect_to_be_on_group_activity_page(group)
+ expect(page).to have_current_path(activity_group_path(group))
+ end
end
end
end
@@ -195,12 +188,11 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
context 'when the user sign-up using a different email address' do
let(:invite_email) { build_stubbed(:user).email }
- it 'signs up and redirects to the activity page',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/414971' do
+ it 'signs up and redirects to the projects dashboard' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
end
end
@@ -232,8 +224,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
end
context 'when the user signs up for an account with the invitation email address' do
- it 'redirects to the most recent membership activity page with all invitations automatically accepted',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/417092' do
+ it 'redirects to the most recent membership activity page with all invitations automatically accepted' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
@@ -250,13 +241,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
stub_feature_flags(identity_verification: false)
end
- it 'signs up and redirects to the group activity page' do
+ it 'signs up and redirects to the projects dashboard' do
fill_in_sign_up_form(new_user)
confirm_email(new_user)
gitlab_sign_in(new_user, remember: true, visit: false)
fill_in_welcome_form
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
end
@@ -266,15 +257,22 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
- it 'signs up and redirects to the group activity page' do
+ it 'signs up and redirects to the projects dashboard' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect_to_be_on_projects_dashboard_with_zero_authorized_projects
end
end
end
end
+
+ def expect_to_be_on_projects_dashboard_with_zero_authorized_projects
+ expect(page).to have_current_path(dashboard_projects_path)
+
+ expect(page).to have_content _('Welcome to GitLab')
+ expect(page).to have_content _('Faster releases. Better code. Less pain.')
+ end
end
context 'when accepting an invite without an account' do
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 0a06a052bc2..0c5b33c2530 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -113,33 +113,5 @@ RSpec.describe 'Dropdown assignee', :js, feature_category: :team_planning do
expect(page).to have_text invited_to_group_group_user.name
expect(page).not_to have_text subsubgroup_user.name
end
-
- context 'when new_graphql_users_autocomplete is disabled' do
- before do
- stub_feature_flags(new_graphql_users_autocomplete: false)
- end
-
- it 'shows inherited, direct, and invited group members but not descendent members', :aggregate_failures do
- visit issues_group_path(subgroup)
-
- select_tokens 'Assignee', '='
-
- expect(page).to have_text group_user.name
- expect(page).to have_text subgroup_user.name
- expect(page).to have_text invited_to_group_group_user.name
- expect(page).not_to have_text subsubgroup_user.name
- expect(page).not_to have_text invited_to_project_group_user.name
-
- visit project_issues_path(subgroup_project)
-
- select_tokens 'Assignee', '='
-
- expect(page).to have_text group_user.name
- expect(page).to have_text subgroup_user.name
- expect(page).to have_text invited_to_project_group_user.name
- expect(page).to have_text invited_to_group_group_user.name
- expect(page).not_to have_text subsubgroup_user.name
- end
- end
end
end
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 3031b20eb7c..e51c82081ff 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Visual tokens', :js, feature_category: :team_planning do
include FilteredSearchHelpers
let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user, name: 'administrator', username: 'root') }
- let_it_be(:user_rock) { create(:user, name: 'The Rock', username: 'rock') }
+ let_it_be(:user) { create(:user, :no_super_sidebar, name: 'administrator', username: 'root') }
+ let_it_be(:user_rock) { create(:user, :no_super_sidebar, name: 'The Rock', username: 'rock') }
let_it_be(:milestone_nine) { create(:milestone, title: '9.0', project: project) }
let_it_be(:milestone_ten) { create(:milestone, title: '10.0', project: project) }
let_it_be(:label) { create(:label, project: project, title: 'abc') }
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 5f7a4f26a98..ed2c712feb1 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -8,14 +8,13 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
include ContentEditorHelpers
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:user2) { create(:user) }
- let_it_be(:guest) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:user2) { create(:user, :no_super_sidebar) }
+ let_it_be(:guest) { create(:user, :no_super_sidebar) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let_it_be(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) }
- let_it_be(:issue2) { create(:issue, project: project, assignees: [user], milestone: milestone) }
let_it_be(:confidential_issue) { create(:issue, project: project, assignees: [user], milestone: milestone, confidential: true) }
let(:current_user) { user }
@@ -666,69 +665,59 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do
end
end
- describe 'inline edit' do
- context 'within issue 1' do
- before do
- visit project_issue_path(project, issue)
- wait_for_requests
- end
+ describe 'editing an issue by hotkey' do
+ let_it_be(:issue2) { create(:issue, project: project) }
- it 'opens inline edit form with shortcut' do
- find('body').send_keys('e')
+ before do
+ visit project_issue_path(project, issue2)
+ end
- expect(page).to have_selector('.detail-page-description form')
- end
+ it 'opens inline edit form with shortcut' do
+ find('body').send_keys('e')
- describe 'when user has made no changes' do
- it 'let user leave the page without warnings' do
- expected_content = 'Issue created'
- expect(page).to have_content(expected_content)
+ expect(page).to have_selector('.detail-page-description form')
+ end
- find('body').send_keys('e')
+ context 'when user has made no changes' do
+ it 'let user leave the page without warnings' do
+ expected_content = 'Issue created'
+ expect(page).to have_content(expected_content)
- click_link 'Boards'
+ find('body').send_keys('e')
- expect(page).not_to have_content(expected_content)
- end
- end
+ click_link 'Boards'
- describe 'when user has made changes' do
- it 'shows a warning and can stay on page', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397683' do
- content = 'new issue content'
+ expect(page).not_to have_content(expected_content)
+ end
+ end
- find('body').send_keys('e')
- fill_in 'issue-description', with: content
+ context 'when user has made changes' do
+ it 'shows a warning and can stay on page' do
+ content = 'new issue content'
- click_link 'Boards'
+ find('body').send_keys('e')
+ fill_in 'issue-description', with: content
+ click_link 'Boards' do
page.driver.browser.switch_to.alert.dismiss
-
- click_button 'Save changes'
- wait_for_requests
-
- expect(page).to have_content(content)
end
- end
- end
- context 'within issue 2' do
- before do
- visit project_issue_path(project, issue2)
+ click_button 'Save changes'
wait_for_requests
- end
- describe 'when user has made changes' do
- it 'shows a warning and can leave page', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410497' do
- content = 'new issue content'
- find('body').send_keys('e')
- fill_in 'issue-description', with: content
+ expect(page).to have_content(content)
+ end
- click_link 'Boards'
+ it 'shows a warning and can leave page' do
+ content = 'new issue content'
+ find('body').send_keys('e')
+ fill_in 'issue-description', with: content
+ click_link 'Boards' do
page.driver.browser.switch_to.alert.accept
-
- expect(page).not_to have_content(content)
end
+
+ expect(page).not_to have_content(content)
end
end
end
diff --git a/spec/features/issues/issue_state_spec.rb b/spec/features/issues/issue_state_spec.rb
index 758dafccb86..2a8b33183bb 100644
--- a/spec/features/issues/issue_state_spec.rb
+++ b/spec/features/issues/issue_state_spec.rb
@@ -3,53 +3,71 @@
require 'spec_helper'
RSpec.describe 'issue state', :js, feature_category: :team_planning do
- let_it_be(:project) { create(:project) }
+ include CookieHelper
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
before do
project.add_developer(user)
sign_in(user)
+ set_cookie('new-actions-popover-viewed', 'true')
end
shared_examples 'issue closed' do |selector|
it 'can close an issue' do
- wait_for_requests
+ expect(page).to have_selector('[data-testid="issue-state-badge"]')
- expect(find('.status-box')).to have_content 'Open'
+ expect(find('[data-testid="issue-state-badge"]')).to have_content 'Open'
within selector do
click_button 'Close issue'
wait_for_requests
end
- expect(find('.status-box')).to have_content 'Closed'
+ expect(find('[data-testid="issue-state-badge"]')).to have_content 'Closed'
end
end
shared_examples 'issue reopened' do |selector|
it 'can reopen an issue' do
- wait_for_requests
+ expect(page).to have_selector('[data-testid="issue-state-badge"]')
- expect(find('.status-box')).to have_content 'Closed'
+ expect(find('[data-testid="issue-state-badge"]')).to have_content 'Closed'
within selector do
click_button 'Reopen issue'
wait_for_requests
end
- expect(find('.status-box')).to have_content 'Open'
+ expect(find('[data-testid="issue-state-badge"]')).to have_content 'Open'
end
end
- describe 'when open', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297348' do
+ describe 'when open' do
context 'when clicking the top `Close issue` button', :aggregate_failures do
- let(:open_issue) { create(:issue, project: project) }
+ context 'when move_close_into_dropdown FF is disabled' do
+ let(:open_issue) { create(:issue, project: project) }
- before do
- visit project_issue_path(project, open_issue)
+ before do
+ stub_feature_flags(move_close_into_dropdown: false)
+ visit project_issue_path(project, open_issue)
+ end
+
+ it_behaves_like 'issue closed', '.detail-page-header-actions'
end
- it_behaves_like 'issue closed', '.detail-page-header'
+ context 'when move_close_into_dropdown FF is enabled' do
+ let(:open_issue) { create(:issue, project: project) }
+
+ before do
+ visit project_issue_path(project, open_issue)
+ find('#new-actions-header-dropdown > button').click
+ end
+
+ it_behaves_like 'issue closed', '.dropdown-menu-right'
+ end
end
context 'when clicking the bottom `Close issue` button', :aggregate_failures do
@@ -63,15 +81,29 @@ RSpec.describe 'issue state', :js, feature_category: :team_planning do
end
end
- describe 'when closed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297201' do
+ describe 'when closed' do
context 'when clicking the top `Reopen issue` button', :aggregate_failures do
- let(:closed_issue) { create(:issue, project: project, state: 'closed') }
+ context 'when move_close_into_dropdown FF is disabled' do
+ let(:closed_issue) { create(:issue, project: project, state: 'closed', author: user) }
- before do
- visit project_issue_path(project, closed_issue)
+ before do
+ stub_feature_flags(move_close_into_dropdown: false)
+ visit project_issue_path(project, closed_issue)
+ end
+
+ it_behaves_like 'issue reopened', '.detail-page-header-actions'
end
- it_behaves_like 'issue reopened', '.detail-page-header'
+ context 'when move_close_into_dropdown FF is enabled' do
+ let(:closed_issue) { create(:issue, project: project, state: 'closed', author: user) }
+
+ before do
+ visit project_issue_path(project, closed_issue)
+ find('#new-actions-header-dropdown > button').click
+ end
+
+ it_behaves_like 'issue reopened', '.dropdown-menu-right'
+ end
end
context 'when clicking the bottom `Reopen issue` button', :aggregate_failures do
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index 4512e88ae72..a6ed0b52e7d 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe 'issue move to another project', feature_category: :team_planning
let(:namespace) { create(:namespace) }
let(:regular_project) { create(:project, title: project_title, service_desk_enabled: false) }
let(:service_desk_project) { build(:project, :private, namespace: namespace, service_desk_enabled: true) }
- let(:service_desk_issue) { create(:issue, project: service_desk_project, author: ::User.support_bot) }
+ let(:service_desk_issue) { create(:issue, project: service_desk_project, author: ::Users::Internal.support_bot) }
before do
allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true)
diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb
index a390dca6822..293b6c53eb5 100644
--- a/spec/features/issues/note_polling_spec.rb
+++ b/spec/features/issues/note_polling_spec.rb
@@ -19,22 +19,6 @@ RSpec.describe 'Issue notes polling', :js, feature_category: :team_planning do
expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!')
end
-
- context 'when action_cable_notes is disabled' do
- before do
- stub_feature_flags(action_cable_notes: false)
- end
-
- it 'displays the new comment' do
- visit project_issue_path(project, issue)
- close_rich_text_promo_popover_if_present
-
- note = create(:note, noteable: issue, project: project, note: 'Looks good!')
- wait_for_requests
-
- expect(page).to have_selector("#note_#{note.id}", text: 'Looks good!')
- end
- end
end
describe 'updates' do
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 1b99c8b39d3..120b4ddb6e1 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_desk do
let(:project) { create(:project, :private, service_desk_enabled: true) }
- let_it_be(:user) { create(:user) }
- let_it_be(:support_bot) { User.support_bot }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:support_bot) { Users::Internal.support_bot }
before do
# The following two conditions equate to Gitlab::ServiceDesk.supported == true
@@ -252,7 +252,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :service_des
end
it 'shows service_desk_reply_to in issues list' do
- expect(page).to have_text('by GitLab Support Bot')
+ expect(page).to have_text('by service.desk@example.com via GitLab Support Bot')
end
end
end
diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb
index 2c537cefa5e..c503c18be8d 100644
--- a/spec/features/issues/todo_spec.rb
+++ b/spec/features/issues/todo_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Manually create a todo item from issue', :js, feature_category: :team_planning do
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
- let!(:user) { create(:user) }
+ let!(:user) { create(:user, :no_super_sidebar) }
before do
project.add_maintainer(user)
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 76b07d903bc..857cb1f39a2 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
sign_out(:user)
end
- it "redirects to signin then back to new issue after signin", :js, quarantine: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/1486' do
+ it "redirects to signin then back to new issue after signin", :js do
create(:issue, project: project)
visit project_issues_path(project)
diff --git a/spec/features/issues/user_sees_live_update_spec.rb b/spec/features/issues/user_sees_live_update_spec.rb
index 860603ad546..0822542ca02 100644
--- a/spec/features/issues/user_sees_live_update_spec.rb
+++ b/spec/features/issues/user_sees_live_update_spec.rb
@@ -19,34 +19,32 @@ RSpec.describe 'Issues > User sees live update', :js, feature_category: :team_pl
expect(page).to have_text("new title")
issue.update!(title: "updated title")
-
wait_for_requests
+
expect(page).to have_text("updated title")
end
end
describe 'confidential issue#show' do
- it 'shows confidential sibebar information as confidential and can be turned off', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/254644' do
+ it 'shows confidential sidebar information as confidential and can be turned off' do
issue = create(:issue, :confidential, project: project)
visit project_issue_path(project, issue)
- expect(page).to have_css('.issuable-note-warning')
- expect(find('.issuable-sidebar-item.confidentiality')).to have_css('.is-active')
- expect(find('.issuable-sidebar-item.confidentiality')).not_to have_css('.not-active')
-
- find('.confidential-edit').click
- expect(page).to have_css('.sidebar-item-warning-message')
+ expect(page).to have_text('This is a confidential issue. People without permission will never get a notification.')
- within('.sidebar-item-warning-message') do
- find('[data-testid="confidential-toggle"]').click
+ within '.block.confidentiality' do
+ click_button 'Edit'
end
- wait_for_requests
+ expect(page).to have_text('You are going to turn off the confidentiality. This means everyone will be able to see and leave a comment on this issue.')
+
+ click_button 'Turn off'
visit project_issue_path(project, issue)
- expect(page).not_to have_css('.is-active')
+ expect(page).not_to have_css('.gl-badge', text: 'Confidential')
+ expect(page).not_to have_text('This is a confidential issue. People without permission will never get a notification.')
end
end
end
diff --git a/spec/features/issues/user_uses_quick_actions_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb
index dc149ccc698..c15716243ae 100644
--- a/spec/features/issues/user_uses_quick_actions_spec.rb
+++ b/spec/features/issues/user_uses_quick_actions_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Issues > User uses quick actions', :js, feature_category: :team_
context "issuable common quick actions" do
let(:new_url_opts) { {} }
- let(:maintainer) { create(:user) }
+ let(:maintainer) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public) }
let!(:label_bug) { create(:label, project: project, title: 'bug') }
let!(:label_feature) { create(:label, project: project, title: 'feature') }
@@ -26,7 +26,7 @@ RSpec.describe 'Issues > User uses quick actions', :js, feature_category: :team_
end
describe 'issue-only commands' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project, due_date: Date.new(2016, 8, 28)) }
diff --git a/spec/features/jira_connect/branches_spec.rb b/spec/features/jira_connect/branches_spec.rb
index 25dc14a1dc9..ae1dd551c47 100644
--- a/spec/features/jira_connect/branches_spec.rb
+++ b/spec/features/jira_connect/branches_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integrations do
include ListboxHelpers
- let_it_be(:alice) { create(:user, name: 'Alice') }
- let_it_be(:bob) { create(:user, name: 'Bob') }
+ let_it_be(:alice) { create(:user, :no_super_sidebar, name: 'Alice') }
+ let_it_be(:bob) { create(:user, :no_super_sidebar, name: 'Bob') }
let_it_be(:project1) { create(:project, :repository, namespace: alice.namespace, title: 'foo') }
let_it_be(:project2) { create(:project, :repository, namespace: alice.namespace, title: 'bar') }
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index eb79d6e64f3..0cb712622f2 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Labels Hierarchy', :js, feature_category: :team_planning do
include FilteredSearchHelpers
include ContentEditorHelpers
- let!(:user) { create(:user) }
+ let!(:user) { create(:user, :no_super_sidebar) }
let!(:grandparent) { create(:group) }
let!(:parent) { create(:group, parent: grandparent) }
let!(:child) { create(:group, parent: parent) }
diff --git a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
index 446f6a470de..fea4841c5ea 100644
--- a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
+++ b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
@@ -2,8 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User closes/reopens a merge request', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297500',
- feature_category: :code_review_workflow do
+RSpec.describe 'User closes/reopens a merge request', :js, feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -13,89 +12,67 @@ RSpec.describe 'User closes/reopens a merge request', :js, quarantine: 'https://
end
describe 'when open' do
- context 'when clicking the top `Close merge request` link', :aggregate_failures do
- let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
- before do
- visit merge_request_path(open_merge_request)
- end
+ before do
+ visit merge_request_path(open_merge_request)
+ end
- it 'can close a merge request' do
- expect(find('.status-box')).to have_content 'Open'
+ context 'when clicking the top `Close merge request` button', :aggregate_failures do
+ it 'closes the merge request' do
+ expect(page).to have_css('.gl-badge', text: 'Open')
within '.detail-page-header' do
- click_button 'Toggle dropdown'
- click_link 'Close merge request'
+ click_button 'Merge request actions'
+ click_button 'Close merge request'
end
- wait_for_requests
-
- expect(find('.status-box')).to have_content 'Closed'
+ expect(page).to have_css('.gl-badge', text: 'Closed')
end
end
context 'when clicking the bottom `Close merge request` button', :aggregate_failures do
- let(:open_merge_request) { create(:merge_request, source_project: project, target_project: project) }
-
- before do
- visit merge_request_path(open_merge_request)
- end
-
- it 'can close a merge request' do
- expect(find('.status-box')).to have_content 'Open'
+ it 'closes the merge request' do
+ expect(page).to have_css('.gl-badge', text: 'Open')
within '.timeline-content-form' do
click_button 'Close merge request'
-
- # Clicking the bottom `Close merge request` button does not yet update
- # the header status so for now we'll check that the button text changes
- expect(page).not_to have_button 'Close merge request'
- expect(page).to have_button 'Reopen merge request'
end
+
+ expect(page).to have_css('.gl-badge', text: 'Closed')
end
end
end
describe 'when closed' do
- context 'when clicking the top `Reopen merge request` link', :aggregate_failures do
- let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
+ let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
- before do
- visit merge_request_path(closed_merge_request)
- end
+ before do
+ visit merge_request_path(closed_merge_request)
+ end
- it 'can reopen a merge request' do
- expect(find('.status-box')).to have_content 'Closed'
+ context 'when clicking the top `Reopen merge request` button', :aggregate_failures do
+ it 'reopens the merge request' do
+ expect(page).to have_css('.gl-badge', text: 'Closed')
within '.detail-page-header' do
- click_button 'Toggle dropdown'
- click_link 'Reopen merge request'
+ click_button 'Merge request actions'
+ click_button 'Reopen merge request'
end
- wait_for_requests
-
- expect(find('.status-box')).to have_content 'Open'
+ expect(page).to have_css('.gl-badge', text: 'Open')
end
end
context 'when clicking the bottom `Reopen merge request` button', :aggregate_failures do
- let(:closed_merge_request) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
-
- before do
- visit merge_request_path(closed_merge_request)
- end
-
- it 'can reopen a merge request' do
- expect(find('.status-box')).to have_content 'Closed'
+ it 'reopens the merge request' do
+ expect(page).to have_css('.gl-badge', text: 'Closed')
within '.timeline-content-form' do
click_button 'Reopen merge request'
-
- # Clicking the bottom `Reopen merge request` button does not yet update
- # the header status so for now we'll check that the button text changes
- expect(page).not_to have_button 'Reopen merge request'
- expect(page).to have_button 'Close merge request'
end
+
+ expect(page).to have_css('.gl-badge', text: 'Open')
end
end
end
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index a96ec1f68aa..df39fe492c1 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -48,7 +48,8 @@ RSpec.describe 'Batch diffs', :js, feature_category: :code_review_workflow do
context 'when user visits a URL with a link directly to to a discussion' do
context 'which is in the first batched page of diffs' do
- it 'scrolls to the correct discussion' do
+ it 'scrolls to the correct discussion',
+ quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410029' } do
page.within get_first_diff do
click_link('just now')
end
diff --git a/spec/features/merge_request/user_merges_merge_request_spec.rb b/spec/features/merge_request/user_merges_merge_request_spec.rb
index 402405e1fb6..aee42784d05 100644
--- a/spec/features/merge_request/user_merges_merge_request_spec.rb
+++ b/spec/features/merge_request/user_merges_merge_request_spec.rb
@@ -5,7 +5,7 @@ require "spec_helper"
RSpec.describe "User merges a merge request", :js, feature_category: :code_review_workflow do
include ContentEditorHelpers
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
@@ -24,7 +24,7 @@ RSpec.describe "User merges a merge request", :js, feature_category: :code_revie
end
context 'sidebar merge requests counter' do
- let(:project) { create(:project, :public, :repository) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
let!(:merge_request) { create(:merge_request, source_project: project) }
it 'decrements the open MR count', :sidekiq_inline do
diff --git a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
index 63f03ae64e0..c12816b6521 100644
--- a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
@@ -6,17 +6,16 @@ RSpec.describe 'Merge request > User opens checkout branch modal', :js, feature_
include ProjectForksHelper
include CookieHelper
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.creator }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
before do
- project.add_maintainer(user)
sign_in(user)
set_cookie('new-actions-popover-viewed', 'true')
end
describe 'for fork' do
- let(:author) { create(:user) }
+ let(:author) { create(:user, :no_super_sidebar) }
let(:source_project) { fork_project(project, author, repository: true) }
let(:merge_request) do
diff --git a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
index 21c62b0d0d8..e55ecd2a531 100644
--- a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_category: :code_review_workflow do
include CookieHelper
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.creator }
- let(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :public, :repository, creator: user) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let(:modal_window_title) { 'Check out, review, and resolve locally' }
before do
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index d237faba663..dd1119c5648 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -119,37 +119,6 @@ RSpec.describe 'Merge request > User sees deployment widget', :js, feature_categ
end
before do
- stub_feature_flags(review_apps_redeploy_mr_widget: false)
- build.success!
- deployment.update!(on_stop: manual.name)
- visit project_merge_request_path(project, merge_request)
- wait_for_requests
- end
-
- it 'does start build when stop button clicked' do
- accept_gl_confirm(button_text: 'Stop environment') do
- find('.js-stop-env').click
- end
-
- expect(page).to have_content('close_app')
- end
-
- context 'for reporter' do
- let(:role) { :reporter }
-
- it 'does not show stop button' do
- expect(page).not_to have_selector('.js-stop-env')
- end
- end
- end
-
- context 'with stop action with the review_apps_redeploy_mr_widget feature flag turned on' do
- let(:manual) do
- create(:ci_build, :manual, pipeline: pipeline, name: 'close_app', environment: environment.name)
- end
-
- before do
- stub_feature_flags(review_apps_redeploy_mr_widget: true)
build.success!
deployment.update!(on_stop: manual.name)
visit project_merge_request_path(project, merge_request)
@@ -173,9 +142,8 @@ RSpec.describe 'Merge request > User sees deployment widget', :js, feature_categ
end
end
- context 'with redeploy action and with the review_apps_redeploy_mr_widget feature flag turned on' do
+ context 'with redeploy action' do
before do
- stub_feature_flags(review_apps_redeploy_mr_widget: true)
build.success!
environment.update!(state: 'stopped')
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index add8e9f30de..e052d06c158 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -48,60 +48,29 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
# rubocop:enable RSpec/AvoidConditionalStatements
- context 'when a user created a merge request in the parent project' do
- let!(:merge_request) do
- create(
- :merge_request,
- source_project: project,
- target_project: project,
- source_branch: 'feature',
- target_branch: 'master'
- )
- end
-
- let!(:push_pipeline) do
- Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:push)
- .payload
- end
-
- let!(:detached_merge_request_pipeline) do
- Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
- .payload
- end
-
+ context 'with feature flag `mr_pipelines_graphql turned off`' do
before do
- visit project_merge_request_path(project, merge_request)
-
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
- end
- end
-
- it 'sees branch pipelines and detached merge request pipelines in correct order' do
- page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-created"]', count: 2)
- expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
- end
+ stub_feature_flags(mr_pipelines_graphql: false)
end
- it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
- click_link "Overview"
-
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
+ context 'when a user created a merge request in the parent project' do
+ let!(:merge_request) do
+ create(
+ :merge_request,
+ source_project: project,
+ target_project: project,
+ source_branch: 'feature',
+ target_branch: 'master'
+ )
end
- end
- context 'when a user updated a merge request in the parent project', :sidekiq_might_not_need_inline do
- let!(:push_pipeline_2) do
+ let!(:push_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:push)
.payload
end
- let!(:detached_merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.payload
@@ -117,192 +86,184 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
-
- expect(all('[data-testid="pipeline-url-link"]')[0])
- .to have_content("##{detached_merge_request_pipeline_2.id}")
-
- expect(all('[data-testid="pipeline-url-link"]')[1])
- .to have_content("##{detached_merge_request_pipeline.id}")
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'created', count: 2)
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
+ end
+ end
- expect(all('[data-testid="pipeline-url-link"]')[2])
- .to have_content("##{push_pipeline_2.id}")
+ it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
+ click_link "Overview"
- expect(all('[data-testid="pipeline-url-link"]')[3])
- .to have_content("##{push_pipeline.id}")
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
- it 'sees detached tag for detached merge request pipelines' do
- page.within('.ci-table') do
- expect(all('.pipeline-tags')[0])
- .to have_content(expected_detached_mr_tag)
+ context 'when a user updated a merge request in the parent project', :sidekiq_might_not_need_inline do
+ let!(:push_pipeline_2) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:push)
+ .payload
+ end
- expect(all('.pipeline-tags')[1])
- .to have_content(expected_detached_mr_tag)
+ let!(:detached_merge_request_pipeline_2) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
+ end
- expect(all('.pipeline-tags')[2])
- .not_to have_content(expected_detached_mr_tag)
+ before do
+ visit project_merge_request_path(project, merge_request)
- expect(all('.pipeline-tags')[3])
- .not_to have_content(expected_detached_mr_tag)
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
end
- end
- it 'sees the latest detached merge request pipeline as the head pipeline' do
- click_link 'Overview'
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
+ expect(all('[data-testid="pipeline-url-link"]')[0])
+ .to have_content("##{detached_merge_request_pipeline_2.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[1])
+ .to have_content("##{detached_merge_request_pipeline.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[2])
+ .to have_content("##{push_pipeline_2.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[3])
+ .to have_content("##{push_pipeline.id}")
+ end
end
- end
- end
- context 'when a user created a merge request in the parent project' do
- before do
- visit project_merge_request_path(project, merge_request)
+ it 'sees detached tag for detached merge request pipelines' do
+ page.within('.ci-table') do
+ expect(all('.pipeline-tags')[0])
+ .to have_content(expected_detached_mr_tag)
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
+ expect(all('.pipeline-tags')[1])
+ .to have_content(expected_detached_mr_tag)
+
+ expect(all('.pipeline-tags')[2])
+ .not_to have_content(expected_detached_mr_tag)
+
+ expect(all('.pipeline-tags')[3])
+ .not_to have_content(expected_detached_mr_tag)
+ end
end
- end
- context 'when a user merges a merge request in the parent project', :sidekiq_might_not_need_inline do
- before do
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
click_link 'Overview'
- click_button 'Set to auto-merge'
- wait_for_requests
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
+ end
end
+ end
- context 'when detached merge request pipeline is pending' do
- it 'waits the head pipeline' do
- expect(page).to have_content mr_widget_title
- expect(page).to have_button('Cancel auto-merge')
+ context 'when a user created a merge request in the parent project' do
+ before do
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
end
end
- context 'when branch pipeline succeeds' do
+ context 'when a user merges a merge request in the parent project', :sidekiq_might_not_need_inline do
before do
click_link 'Overview'
- push_pipeline.reload.succeed!
+ click_button 'Set to auto-merge'
wait_for_requests
end
- it 'waits the head pipeline' do
- expect(page).to have_content mr_widget_title
- expect(page).to have_button('Cancel auto-merge')
+ context 'when detached merge request pipeline is pending' do
+ it 'waits the head pipeline' do
+ expect(page).to have_content mr_widget_title
+ expect(page).to have_button('Cancel auto-merge')
+ end
end
- end
- end
- end
- context 'when there are no `merge_requests` keyword in .gitlab-ci.yml' do
- let(:config) do
- {
- build: {
- script: 'build'
- },
- test: {
- script: 'test'
- },
- deploy: {
- script: 'deploy'
- }
- }
- end
-
- it 'sees a branch pipeline in pipeline tab' do
- page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-created"]', count: 1)
- expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
- end
- end
+ context 'when branch pipeline succeeds' do
+ before do
+ click_link 'Overview'
+ push_pipeline.reload.succeed!
- it 'sees the latest branch pipeline as the head pipeline', :sidekiq_might_not_need_inline do
- click_link 'Overview'
+ wait_for_requests
+ end
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{push_pipeline.id}")
+ it 'waits the head pipeline' do
+ expect(page).to have_content mr_widget_title
+ expect(page).to have_button('Cancel auto-merge')
+ end
+ end
end
end
- end
- end
-
- context 'when a user created a merge request from a forked project to the parent project', :sidekiq_might_not_need_inline do
- let(:merge_request) do
- create(
- :merge_request,
- source_project: forked_project,
- target_project: project,
- source_branch: 'feature',
- target_branch: 'master'
- )
- end
-
- let!(:push_pipeline) do
- Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:push)
- .payload
- end
-
- let!(:detached_merge_request_pipeline) do
- Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:merge_request_event, merge_request: merge_request)
- .payload
- end
- let(:forked_project) { fork_project(project, user2, repository: true) }
- let(:user2) { create(:user) }
-
- before do
- forked_project.add_maintainer(user2)
-
- stub_feature_flags(auto_merge_labels_mr_widget: false)
+ context 'when there are no `merge_requests` keyword in .gitlab-ci.yml' do
+ let(:config) do
+ {
+ build: {
+ script: 'build'
+ },
+ test: {
+ script: 'test'
+ },
+ deploy: {
+ script: 'deploy'
+ }
+ }
+ end
- visit project_merge_request_path(project, merge_request)
+ it 'sees a branch pipeline in pipeline tab' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'created', count: 1)
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
+ end
+ end
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
- end
- end
+ it 'sees the latest branch pipeline as the head pipeline', :sidekiq_might_not_need_inline do
+ click_link 'Overview'
- it 'sees branch pipelines and detached merge request pipelines in correct order' do
- page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 2)
- expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{push_pipeline.id}")
+ end
+ end
end
end
- it 'sees the latest detached merge request pipeline as the head pipeline' do
- click_link "Overview"
-
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
+ context 'when a user created a merge request from a forked project to the parent project', :sidekiq_might_not_need_inline do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ source_project: forked_project,
+ target_project: project,
+ source_branch: 'feature',
+ target_branch: 'master'
+ )
end
- end
- it 'sees pipeline list in forked project' do
- visit project_pipelines_path(forked_project)
-
- expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 2)
- end
-
- context 'when a user updated a merge request from a forked project to the parent project' do
- let!(:push_pipeline_2) do
+ let!(:push_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:push)
.payload
end
- let!(:detached_merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.payload
end
+ let(:forked_project) { fork_project(project, user2, repository: true) }
+ let(:user2) { create(:user) }
+
before do
+ forked_project.add_maintainer(user2)
+
visit project_merge_request_path(project, merge_request)
page.within('.merge-request-tabs') do
@@ -312,35 +273,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
-
- expect(all('[data-testid="pipeline-url-link"]')[0])
- .to have_content("##{detached_merge_request_pipeline_2.id}")
-
- expect(all('[data-testid="pipeline-url-link"]')[1])
- .to have_content("##{detached_merge_request_pipeline.id}")
-
- expect(all('[data-testid="pipeline-url-link"]')[2])
- .to have_content("##{push_pipeline_2.id}")
-
- expect(all('[data-testid="pipeline-url-link"]')[3])
- .to have_content("##{push_pipeline.id}")
- end
- end
-
- it 'sees detached tag for detached merge request pipelines' do
- page.within('.ci-table') do
- expect(all('.pipeline-tags')[0])
- .to have_content(expected_detached_mr_tag)
-
- expect(all('.pipeline-tags')[1])
- .to have_content(expected_detached_mr_tag)
-
- expect(all('.pipeline-tags')[2])
- .not_to have_content(expected_detached_mr_tag)
-
- expect(all('.pipeline-tags')[3])
- .not_to have_content(expected_detached_mr_tag)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 2)
+ expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -348,88 +282,158 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
click_link "Overview"
page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
it 'sees pipeline list in forked project' do
visit project_pipelines_path(forked_project)
- expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 2)
end
- end
- context 'when the latest pipeline is running in the parent project' do
- before do
- create(:ci_pipeline,
- source: :merge_request_event,
- project: project,
- ref: 'feature',
- sha: merge_request.diff_head_sha,
- user: user,
- merge_request: merge_request,
- status: :running)
- merge_request.update_head_pipeline
- end
+ context 'when a user updated a merge request from a forked project to the parent project' do
+ let!(:push_pipeline_2) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:push)
+ .payload
+ end
+
+ let!(:detached_merge_request_pipeline_2) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:merge_request_event, merge_request: merge_request)
+ .payload
+ end
- context 'when the previous pipeline failed in the fork project' do
before do
- detached_merge_request_pipeline.reload.drop!
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
end
- context 'when the parent project enables pipeline must succeed' do
- before do
- project.update!(only_allow_merge_if_pipeline_succeeds: true)
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
+
+ expect(all('[data-testid="pipeline-url-link"]')[0])
+ .to have_content("##{detached_merge_request_pipeline_2.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[1])
+ .to have_content("##{detached_merge_request_pipeline.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[2])
+ .to have_content("##{push_pipeline_2.id}")
+
+ expect(all('[data-testid="pipeline-url-link"]')[3])
+ .to have_content("##{push_pipeline.id}")
end
+ end
+
+ it 'sees detached tag for detached merge request pipelines' do
+ page.within('.ci-table') do
+ expect(all('.pipeline-tags')[0])
+ .to have_content(expected_detached_mr_tag)
+
+ expect(all('.pipeline-tags')[1])
+ .to have_content(expected_detached_mr_tag)
- it 'shows Set to auto-merge button' do
- visit project_merge_request_path(project, merge_request)
+ expect(all('.pipeline-tags')[2])
+ .not_to have_content(expected_detached_mr_tag)
- expect(page).to have_button('Set to auto-merge')
+ expect(all('.pipeline-tags')[3])
+ .not_to have_content(expected_detached_mr_tag)
end
end
- end
- end
- context 'when a user merges a merge request from a forked project to the parent project' do
- before do
- click_link("Overview")
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
+ click_link "Overview"
- click_button 'Set to auto-merge'
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
+ end
+ end
- wait_for_requests
- end
+ it 'sees pipeline list in forked project' do
+ visit project_pipelines_path(forked_project)
- context 'when detached merge request pipeline is pending' do
- it 'waits the head pipeline' do
- expect(page).to have_content mr_widget_title
- expect(page).to have_button('Cancel auto-merge')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending', count: 4)
end
end
- context 'when detached merge request pipeline succeeds' do
+ context 'when the latest pipeline is running in the parent project' do
before do
- detached_merge_request_pipeline.reload.succeed!
-
- refresh
+ create(:ci_pipeline,
+ source: :merge_request_event,
+ project: project,
+ ref: 'feature',
+ sha: merge_request.diff_head_sha,
+ user: user,
+ merge_request: merge_request,
+ status: :running)
+ merge_request.update_head_pipeline
end
- it 'merges the merge request' do
- expect(page).to have_content('Merged by')
- expect(page).to have_button('Revert')
+ context 'when the previous pipeline failed in the fork project' do
+ before do
+ detached_merge_request_pipeline.reload.drop!
+ end
+
+ context 'when the parent project enables pipeline must succeed' do
+ before do
+ project.update!(only_allow_merge_if_pipeline_succeeds: true)
+ end
+
+ it 'shows Set to auto-merge button' do
+ visit project_merge_request_path(project, merge_request)
+
+ expect(page).to have_button('Set to auto-merge')
+ end
+ end
end
end
- context 'when branch pipeline succeeds' do
+ context 'when a user merges a merge request from a forked project to the parent project' do
before do
- push_pipeline.reload.succeed!
+ click_link("Overview")
+
+ click_button 'Set to auto-merge'
wait_for_requests
end
- it 'waits the head pipeline' do
- expect(page).to have_content mr_widget_title
- expect(page).to have_button('Cancel auto-merge')
+ context 'when detached merge request pipeline is pending' do
+ it 'waits the head pipeline' do
+ expect(page).to have_content mr_widget_title
+ expect(page).to have_button('Cancel auto-merge')
+ end
+ end
+
+ context 'when detached merge request pipeline succeeds' do
+ before do
+ detached_merge_request_pipeline.reload.succeed!
+
+ wait_for_requests
+ end
+
+ it 'merges the merge request' do
+ expect(page).to have_content('Merged by')
+ expect(page).to have_button('Revert')
+ end
+ end
+
+ context 'when branch pipeline succeeds' do
+ before do
+ push_pipeline.reload.succeed!
+
+ wait_for_requests
+ end
+
+ it 'waits the head pipeline' do
+ expect(page).to have_content mr_widget_title
+ expect(page).to have_button('Cancel auto-merge')
+ end
end
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 75df93d1a6c..1db09790e1c 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -646,7 +646,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'addTest'
+ click_button 'View details'
end
end
@@ -693,7 +693,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'Test#sum when a is 1 and b is 3 returns summary'
+ click_button 'View details'
end
end
@@ -741,7 +741,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'addTest'
+ click_button 'View details'
end
end
@@ -788,7 +788,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'addTest'
+ click_button 'View details'
end
end
@@ -834,7 +834,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'Test#sum when a is 4 and b is 4 returns summary'
+ click_button 'View details'
end
end
@@ -881,7 +881,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
click_expand_button
within('[data-testid="widget-extension-collapsed-section"]') do
- click_link 'addTest'
+ click_button 'View details'
end
end
@@ -958,4 +958,21 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
end
end
end
+
+ context 'views MR when pipeline has code coverage enabled' do
+ let!(:pipeline) { create(:ci_pipeline, status: 'success', project: project, ref: merge_request.source_branch) }
+ let!(:build) { create(:ci_build, :success, :coverage, pipeline: pipeline) }
+
+ before do
+ merge_request.update!(head_pipeline: pipeline)
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows the coverage' do
+ within '.ci-widget' do
+ expect(find_by_testid('pipeline-coverage')).to have_content('Test coverage 99.90% ')
+ end
+ end
+ end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
index 5801e8a1a11..77cd116ecc9 100644
--- a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
@@ -30,15 +30,33 @@ RSpec.describe 'Merge request > User sees pipelines from forked project', :js,
before do
create(:ci_build, pipeline: pipeline, name: 'rspec')
create(:ci_build, pipeline: pipeline, name: 'spinach')
-
sign_in(user)
- visit project_merge_request_path(target_project, merge_request)
end
- it 'user visits a pipelines page', :sidekiq_might_not_need_inline do
- page.within('.merge-request-tabs') { click_link 'Pipelines' }
+ context 'with feature flag `mr_pipelines_graphql` turned off' do
+ before do
+ stub_feature_flags(mr_pipelines_graphql: false)
+ visit project_merge_request_path(target_project, merge_request)
+ end
+
+ it 'user visits a pipelines page', :sidekiq_might_not_need_inline do
+ page.within('.merge-request-tabs') { click_link 'Pipelines' }
+
+ page.within('.ci-table') do
+ expect(page).to have_content(pipeline.id)
+ end
+ end
+ end
+
+ context 'with feature flag `mr_pipelines_graphql` turned on' do
+ before do
+ stub_feature_flags(mr_pipelines_graphql: true)
+ visit project_merge_request_path(target_project, merge_request)
+ end
+
+ it 'user visits a pipelines page', :sidekiq_might_not_need_inline do
+ page.within('.merge-request-tabs') { click_link 'Pipelines' }
- page.within('.ci-table') do
expect(page).to have_content(pipeline.id)
end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 5ce919fe2e6..bb3890f5242 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -3,285 +3,291 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :code_review_workflow do
- describe 'pipeline tab' do
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.target_project }
- let(:user) { project.creator }
-
+ context 'with feature flag `mr_pipelines_graphql turned off`' do
before do
- project.add_maintainer(user)
- sign_in(user)
+ stub_feature_flags(mr_pipelines_graphql: false)
end
- context 'with pipelines' do
- let!(:pipeline) do
- create(
- :ci_pipeline,
- :success,
- project: merge_request.source_project,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha
- )
- end
-
- let!(:manual_job) { create(:ci_build, :manual, name: 'job1', stage: 'deploy', pipeline: pipeline) }
-
- let!(:job) { create(:ci_build, :success, name: 'job2', stage: 'test', pipeline: pipeline) }
+ describe 'pipeline tab' do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.target_project }
+ let(:user) { project.creator }
before do
- merge_request.update_attribute(:head_pipeline_id, pipeline.id)
+ project.add_maintainer(user)
+ sign_in(user)
end
- it 'pipelines table displays correctly' do
- visit project_merge_request_path(project, merge_request)
-
- expect(page.find('.ci-widget')).to have_content('passed')
-
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
+ context 'with pipelines' do
+ let!(:pipeline) do
+ create(
+ :ci_pipeline,
+ :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
end
- wait_for_requests
+ let!(:manual_job) { create(:ci_build, :manual, name: 'job1', stage: 'deploy', pipeline: pipeline) }
- page.within(find('[data-testid="pipeline-table-row"]', match: :first)) do
- expect(page).to have_selector('[data-testid="ci-badge-passed"]')
- expect(page).to have_content(pipeline.id)
- expect(page).to have_content('API')
- expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
- expect(page).to have_css('[data-testid="pipelines-manual-actions-dropdown"]')
- expect(page).to have_css('[data-testid="pipeline-multi-actions-dropdown"]')
- end
- end
+ let!(:job) { create(:ci_build, :success, name: 'job2', stage: 'test', pipeline: pipeline) }
- context 'with a detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ before do
+ merge_request.update_attribute(:head_pipeline_id, pipeline.id)
+ end
- it 'displays the "Run pipeline" button' do
+ it 'pipelines table displays correctly' do
visit project_merge_request_path(project, merge_request)
+ expect(page.find('.ci-widget')).to have_content('passed')
+
page.within('.merge-request-tabs') do
click_link('Pipelines')
end
wait_for_requests
- expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ page.within(find('[data-testid="pipeline-table-row"]', match: :first)) do
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
+ expect(page).to have_content(pipeline.id)
+ expect(page).to have_content('API')
+ expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
+ expect(page).to have_css('[data-testid="pipelines-manual-actions-dropdown"]')
+ expect(page).to have_css('[data-testid="pipeline-multi-actions-dropdown"]')
+ end
end
- end
- context 'with a merged results pipeline' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ context 'with a detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- it 'displays the "Run pipeline" button' do
- visit project_merge_request_path(project, merge_request)
+ it 'displays the "Run pipeline" button' do
+ visit project_merge_request_path(project, merge_request)
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
- end
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
- wait_for_requests
+ wait_for_requests
- expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ end
end
- end
- end
- context 'without pipelines' do
- before do
- visit project_merge_request_path(project, merge_request)
- end
+ context 'with a merged results pipeline' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+
+ it 'displays the "Run pipeline" button' do
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+
+ wait_for_requests
- it 'user visits merge request page' do
- page.within('.merge-request-tabs') do
- expect(page).to have_link('Pipelines')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ end
end
end
- it 'shows empty state with run pipeline button' do
- page.within('.merge-request-tabs') do
- click_link('Pipelines')
+ context 'without pipelines' do
+ before do
+ visit project_merge_request_path(project, merge_request)
end
- expect(page).to have_content('There are currently no pipelines.')
- expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
- end
- end
- end
+ it 'user visits merge request page' do
+ page.within('.merge-request-tabs') do
+ expect(page).to have_link('Pipelines')
+ end
+ end
- describe 'fork MRs in parent project', :sidekiq_inline do
- include ProjectForksHelper
-
- let_it_be(:parent_project) { create(:project, :public, :repository) }
- let_it_be(:forked_project) { fork_project(parent_project, developer_in_fork, repository: true, target_project: create(:project, :public, :repository)) }
- let_it_be(:developer_in_parent) { create(:user) }
- let_it_be(:developer_in_fork) { create(:user) }
- let_it_be(:reporter_in_parent_and_developer_in_fork) { create(:user) }
-
- let(:merge_request) do
- create(
- :merge_request,
- :with_detached_merge_request_pipeline,
- source_project: forked_project,
- source_branch: 'feature',
- target_project: parent_project,
- target_branch: 'master'
- )
- end
+ it 'shows empty state with run pipeline button' do
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
- let(:config) do
- { test: { script: 'test', rules: [{ if: '$CI_MERGE_REQUEST_ID' }] } }
+ expect(page).to have_content('There are currently no pipelines.')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ end
+ end
end
- before_all do
- parent_project.add_developer(developer_in_parent)
- parent_project.add_reporter(reporter_in_parent_and_developer_in_fork)
- forked_project.add_developer(developer_in_fork)
- forked_project.add_developer(reporter_in_parent_and_developer_in_fork)
- end
+ describe 'fork MRs in parent project', :sidekiq_inline do
+ include ProjectForksHelper
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
- sign_in(actor)
- end
+ let_it_be(:parent_project) { create(:project, :public, :repository) }
+ let_it_be(:forked_project) { fork_project(parent_project, developer_in_fork, repository: true, target_project: create(:project, :public, :repository)) }
+ let_it_be(:developer_in_parent) { create(:user) }
+ let_it_be(:developer_in_fork) { create(:user) }
+ let_it_be(:reporter_in_parent_and_developer_in_fork) { create(:user) }
- after do
- parent_project.all_pipelines.delete_all
- forked_project.all_pipelines.delete_all
- end
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :with_detached_merge_request_pipeline,
+ source_project: forked_project,
+ source_branch: 'feature',
+ target_project: parent_project,
+ target_branch: 'master'
+ )
+ end
- context 'when actor is a developer in parent project' do
- let(:actor) { developer_in_parent }
+ let(:config) do
+ { test: { script: 'test', rules: [{ if: '$CI_MERGE_REQUEST_ID' }] } }
+ end
- it 'creates a pipeline in the parent project when user proceeds with the warning' do
- visit project_merge_request_path(parent_project, merge_request)
+ before_all do
+ parent_project.add_developer(developer_in_parent)
+ parent_project.add_reporter(reporter_in_parent_and_developer_in_fork)
+ forked_project.add_developer(developer_in_fork)
+ forked_project.add_developer(reporter_in_parent_and_developer_in_fork)
+ end
- create_merge_request_pipeline
- act_on_security_warning(action: 'Run pipeline')
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ sign_in(actor)
+ end
- check_pipeline(expected_project: parent_project)
- check_head_pipeline(expected_project: parent_project)
+ after do
+ parent_project.all_pipelines.delete_all
+ forked_project.all_pipelines.delete_all
end
- it 'does not create a pipeline in the parent project when user cancels the action', :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state do
- visit project_merge_request_path(parent_project, merge_request)
+ context 'when actor is a developer in parent project' do
+ let(:actor) { developer_in_parent }
- create_merge_request_pipeline
- act_on_security_warning(action: 'Cancel')
+ it 'creates a pipeline in the parent project when user proceeds with the warning' do
+ visit project_merge_request_path(parent_project, merge_request)
- check_no_new_pipeline_created
- end
- end
+ create_merge_request_pipeline
+ act_on_security_warning(action: 'Run pipeline')
- context 'when actor is a developer in fork project' do
- let(:actor) { developer_in_fork }
+ check_pipeline(expected_project: parent_project)
+ check_head_pipeline(expected_project: parent_project)
+ end
- it 'creates a pipeline in the fork project' do
- visit project_merge_request_path(parent_project, merge_request)
+ it 'does not create a pipeline in the parent project when user cancels the action', :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state do
+ visit project_merge_request_path(parent_project, merge_request)
- create_merge_request_pipeline
+ create_merge_request_pipeline
+ act_on_security_warning(action: 'Cancel')
- check_pipeline(expected_project: forked_project)
- check_head_pipeline(expected_project: forked_project)
+ check_no_new_pipeline_created
+ end
end
- end
- context 'when actor is a reporter in parent project and a developer in fork project' do
- let(:actor) { reporter_in_parent_and_developer_in_fork }
+ context 'when actor is a developer in fork project' do
+ let(:actor) { developer_in_fork }
- it 'creates a pipeline in the fork project' do
- visit project_merge_request_path(parent_project, merge_request)
+ it 'creates a pipeline in the fork project' do
+ visit project_merge_request_path(parent_project, merge_request)
- create_merge_request_pipeline
+ create_merge_request_pipeline
- check_pipeline(expected_project: forked_project)
- check_head_pipeline(expected_project: forked_project)
+ check_pipeline(expected_project: forked_project)
+ check_head_pipeline(expected_project: forked_project)
+ end
end
- end
- def create_merge_request_pipeline
- page.within('.merge-request-tabs') { click_link('Pipelines') }
- click_on('Run pipeline')
- end
+ context 'when actor is a reporter in parent project and a developer in fork project' do
+ let(:actor) { reporter_in_parent_and_developer_in_fork }
- def check_pipeline(expected_project:)
- page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 4)
+ it 'creates a pipeline in the fork project' do
+ visit project_merge_request_path(parent_project, merge_request)
- page.within(first('[data-testid="pipeline-table-row"]')) do
- page.within('.pipeline-tags') do
- expect(page.find('[data-testid="pipeline-url-link"]')[:href]).to include(expected_project.full_path)
- expect(page).to have_content('merge request')
- end
- page.within('.pipeline-triggerer') do
- expect(page).to have_link(href: user_path(actor))
+ create_merge_request_pipeline
+
+ check_pipeline(expected_project: forked_project)
+ check_head_pipeline(expected_project: forked_project)
+ end
+ end
+
+ def create_merge_request_pipeline
+ page.within('.merge-request-tabs') { click_link('Pipelines') }
+ click_on('Run pipeline')
+ end
+
+ def check_pipeline(expected_project:)
+ page.within('.ci-table') do
+ expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 4)
+
+ page.within(first('[data-testid="pipeline-table-row"]')) do
+ page.within('.pipeline-tags') do
+ expect(page.find('[data-testid="pipeline-url-link"]')[:href]).to include(expected_project.full_path)
+ expect(page).to have_content('merge request')
+ end
+ page.within('.pipeline-triggerer') do
+ expect(page).to have_link(href: user_path(actor))
+ end
end
end
end
- end
- def check_head_pipeline(expected_project:)
- page.within('.merge-request-tabs') { click_link('Overview') }
+ def check_head_pipeline(expected_project:)
+ page.within('.merge-request-tabs') { click_link('Overview') }
- page.within('.ci-widget-content') do
- expect(page.find('.pipeline-id')[:href]).to include(expected_project.full_path)
+ page.within('.ci-widget-content') do
+ expect(page.find('.pipeline-id')[:href]).to include(expected_project.full_path)
+ end
end
- end
- def act_on_security_warning(action:)
- page.within('#create-pipeline-for-fork-merge-request-modal') do
- expect(page).to have_content('Are you sure you want to run this pipeline?')
- click_button(action)
+ def act_on_security_warning(action:)
+ page.within('#create-pipeline-for-fork-merge-request-modal') do
+ expect(page).to have_content('Are you sure you want to run this pipeline?')
+ click_button(action)
+ end
end
- end
- def check_no_new_pipeline_created
- page.within('.ci-table') do
- expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 2)
+ def check_no_new_pipeline_created
+ page.within('.ci-table') do
+ expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 2)
+ end
end
end
- end
-
- describe 'race condition' do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
- let(:build_push_data) { { ref: 'feature', checkout_sha: TestEnv::BRANCH_SHA['feature'] } }
- let(:merge_request_params) do
- { "source_branch" => "feature", "source_project_id" => project.id,
- "target_branch" => "master", "target_project_id" => project.id, "title" => "A" }
- end
+ describe 'race condition' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+ let(:build_push_data) { { ref: 'feature', checkout_sha: TestEnv::BRANCH_SHA['feature'] } }
- before do
- project.add_maintainer(user)
- sign_in user
- end
+ let(:merge_request_params) do
+ { "source_branch" => "feature", "source_project_id" => project.id,
+ "target_branch" => "master", "target_project_id" => project.id, "title" => "A" }
+ end
- context 'when pipeline and merge request were created simultaneously', :delete do
before do
- stub_ci_pipeline_to_return_yaml_file
+ project.add_maintainer(user)
+ sign_in user
+ end
+
+ context 'when pipeline and merge request were created simultaneously', :delete do
+ before do
+ stub_ci_pipeline_to_return_yaml_file
- threads = []
+ threads = []
- threads << Thread.new do
- Sidekiq::Worker.skipping_transaction_check do
- @merge_request = MergeRequests::CreateService.new(project: project, current_user: user, params: merge_request_params).execute
+ threads << Thread.new do
+ Sidekiq::Worker.skipping_transaction_check do
+ @merge_request = MergeRequests::CreateService.new(project: project, current_user: user, params: merge_request_params).execute
+ end
end
- end
- threads << Thread.new do
- Sidekiq::Worker.skipping_transaction_check do
- @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push).payload
+ threads << Thread.new do
+ Sidekiq::Worker.skipping_transaction_check do
+ @pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push).payload
+ end
end
- end
- threads.each { |thr| thr.join }
- end
+ threads.each { |thr| thr.join }
+ end
- it 'user sees pipeline in merge request widget', :sidekiq_might_not_need_inline do
- visit project_merge_request_path(project, @merge_request)
+ it 'user sees pipeline in merge request widget', :sidekiq_might_not_need_inline do
+ visit project_merge_request_path(project, @merge_request)
- expect(page.find(".ci-widget")).to have_content(TestEnv::BRANCH_SHA['feature'])
- expect(page.find(".ci-widget")).to have_content("##{@pipeline.id}")
+ expect(page.find(".ci-widget")).to have_content(TestEnv::BRANCH_SHA['feature'])
+ expect(page.find(".ci-widget")).to have_content("##{@pipeline.id}")
+ end
end
end
end
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index e3be99254dc..16578af238d 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_
include ListboxHelpers
include CookieHelper
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.creator }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: user.namespace) }
def select_source_branch(branch_name)
find('.js-source-branch', match: :first).click
@@ -16,7 +16,6 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_
end
before do
- project.add_maintainer(user)
sign_in(user)
set_cookie('new-actions-popover-viewed', 'true')
end
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_sets_to_auto_merge_spec.rb
index ebec8a6d2ea..4dc0c03aedc 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_sets_to_auto_merge_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, feature_category: :code_review_workflow do
+RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category: :code_review_workflow do
+ include ContentEditorHelpers
+
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) do
@@ -32,22 +34,23 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
context 'when there is active pipeline for merge request' do
before do
create(:ci_build, pipeline: pipeline)
- stub_feature_flags(auto_merge_labels_mr_widget: true)
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
- describe 'enabling Merge when pipeline succeeds' do
+ describe 'setting to auto-merge when pipeline succeeds' do
shared_examples 'Set to auto-merge activator' do
- it 'activates the Merge when pipeline succeeds feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
+ it 'activates auto-merge feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
+ close_rich_text_promo_popover_if_present
+ expect(page).to have_content 'Set to auto-merge'
click_button "Set to auto-merge"
+ wait_for_requests
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds"
expect(page).to have_content "Source branch will not be deleted"
expect(page).to have_selector ".js-cancel-auto-merge"
- visit project_merge_request_path(project, merge_request) # Needed to refresh the page
- expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i
+ expect(page).to have_content(/enabled an automatic merge when the pipeline for \h{8} succeeds/i)
end
end
@@ -57,6 +60,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
context 'when enabled after it was previously canceled' do
before do
+ close_rich_text_promo_popover_if_present
click_button "Set to auto-merge"
wait_for_requests
@@ -64,14 +68,12 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
click_button "Cancel auto-merge"
wait_for_requests
-
- expect(page).to have_content 'Set to auto-merge'
end
it_behaves_like 'Set to auto-merge activator'
end
- context 'when it was enabled and then canceled' do
+ context 'when it is enabled and then canceled' do
let(:merge_request) do
create(
:merge_request_with_diffs,
@@ -94,7 +96,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
end
- context 'when merge when pipeline succeeds is enabled' do
+ context 'when there is an active pipeline' do
let(:merge_request) do
create(
:merge_request_with_diffs,
@@ -112,12 +114,13 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
before do
- stub_feature_flags(auto_merge_labels_mr_widget: true)
sign_in user
visit project_merge_request_path(project, merge_request)
end
- it 'allows to cancel the automatic merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410494' do
+ it 'allows to cancel the auto-merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
+ close_rich_text_promo_popover_if_present
+
click_button "Cancel auto-merge"
expect(page).to have_button "Set to auto-merge"
@@ -128,22 +131,13 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
end
- context 'when pipeline is not active' do
- it 'does not allow to enable merge when pipeline succeeds' do
- stub_feature_flags(auto_merge_labels_mr_widget: false)
-
- visit project_merge_request_path(project, merge_request)
-
- expect(page).not_to have_link 'Merge when pipeline succeeds'
+ context 'when there is no active pipeline' do
+ before do
+ sign_in user
+ visit project_merge_request_path(project, merge_request.reload)
end
- end
-
- context 'when pipeline is not active and auto_merge_labels_mr_widget on' do
- it 'does not allow to enable merge when pipeline succeeds' do
- stub_feature_flags(auto_merge_labels_mr_widget: true)
-
- visit project_merge_request_path(project, merge_request)
+ it 'does not allow to set to auto-merge' do
expect(page).not_to have_link 'Set to auto-merge'
end
end
diff --git a/spec/features/merge_request/user_uses_quick_actions_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb
index 1c63f5b56b0..b2cc25f1c34 100644
--- a/spec/features/merge_request/user_uses_quick_actions_spec.rb
+++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb
@@ -11,15 +11,9 @@ RSpec.describe 'Merge request > User uses quick actions', :js, :use_clean_rails_
feature_category: :code_review_workflow do
include Features::NotesHelpers
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.creator }
- let(:guest) { create(:user) }
- let(:merge_request) { create(:merge_request, source_project: project) }
- let!(:milestone) { create(:milestone, project: project, title: 'ASAP') }
-
context "issuable common quick actions" do
let!(:new_url_opts) { { merge_request: { source_branch: 'feature', target_branch: 'master' } } }
- let(:maintainer) { create(:user) }
+ let(:maintainer) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public, :repository) }
let!(:label_bug) { create(:label, project: project, title: 'bug') }
let!(:label_feature) { create(:label, project: project, title: 'feature') }
@@ -32,7 +26,8 @@ RSpec.describe 'Merge request > User uses quick actions', :js, :use_clean_rails_
end
describe 'merge-request-only commands' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
+ let(:guest) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:milestone) { create(:milestone, project: project, title: 'ASAP') }
diff --git a/spec/features/monitor_sidebar_link_spec.rb b/spec/features/monitor_sidebar_link_spec.rb
index 6e464cb8752..1d39f749ca7 100644
--- a/spec/features/monitor_sidebar_link_spec.rb
+++ b/spec/features/monitor_sidebar_link_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category: :shared do
let_it_be_with_reload(:project) { create(:project, :internal, :repository) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let(:role) { nil }
diff --git a/spec/features/nav/pinned_nav_items_spec.rb b/spec/features/nav/pinned_nav_items_spec.rb
index cf53e0a322a..1a3ac973ed4 100644
--- a/spec/features/nav/pinned_nav_items_spec.rb
+++ b/spec/features/nav/pinned_nav_items_spec.rb
@@ -168,17 +168,19 @@ RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigatio
private
- def add_pin(menu_item_title)
- menu_item = find("[data-testid=\"nav-item-link\"]", text: menu_item_title)
- menu_item.hover
- menu_item.find("[data-testid=\"thumbtack-icon\"]").click
+ def add_pin(nav_item_title)
+ nav_item = find("[data-testid=\"nav-item\"]", text: nav_item_title)
+ nav_item.hover
+ pin_button = nav_item.find("[data-testid=\"nav-item-pin\"]")
+ pin_button.click
wait_for_requests
end
- def remove_pin(menu_item_title)
- menu_item = find("[data-testid=\"nav-item-link\"]", text: menu_item_title)
- menu_item.hover
- menu_item.find("[data-testid=\"thumbtack-solid-icon\"]").click
+ def remove_pin(nav_item_title)
+ nav_item = find("[data-testid=\"nav-item\"]", text: nav_item_title)
+ nav_item.hover
+ unpin_button = nav_item.find("[data-testid=\"nav-item-unpin\"]")
+ unpin_button.click
wait_for_requests
end
diff --git a/spec/features/nav/top_nav_responsive_spec.rb b/spec/features/nav/top_nav_responsive_spec.rb
index ff8132dc087..2a07742c91e 100644
--- a/spec/features/nav/top_nav_responsive_spec.rb
+++ b/spec/features/nav/top_nav_responsive_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'top nav responsive', :js, feature_category: :navigation do
include MobileHelpers
include Features::InviteMembersModalHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/nav/top_nav_spec.rb b/spec/features/nav/top_nav_spec.rb
index ccbf4646273..bf91897eb26 100644
--- a/spec/features/nav/top_nav_spec.rb
+++ b/spec/features/nav/top_nav_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'top nav responsive', :js, feature_category: :navigation do
include Features::InviteMembersModalHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/oauth_login_spec.rb b/spec/features/oauth_login_spec.rb
index ca20a1cd81b..b65416ee618 100644
--- a/spec/features/oauth_login_spec.rb
+++ b/spec/features/oauth_login_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe 'OAuth Login', :allow_forgery_protection, feature_category: :syst
# record as the host / port depends on whether or not the spec uses
# JS.
let(:application) do
- create(:oauth_application, scopes: 'api', redirect_uri: redirect_uri, confidential: false)
+ create(:oauth_application, scopes: 'api', redirect_uri: redirect_uri, confidential: true)
end
let(:params) do
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index a756c524cbb..697ad4c87f7 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'User edit profile', feature_category: :user_profile do
include Features::NotesHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
stub_feature_flags(edit_user_profile_vue: false)
@@ -478,7 +478,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
end
context 'Remove status button' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
user.status = UserStatus.new(message: 'Eating bread', emoji: 'stuffed_flatbread')
diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb
index 7d858e3c92c..3af8dadcde0 100644
--- a/spec/features/profiles/user_visits_notifications_tab_spec.rb
+++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb
@@ -12,14 +12,6 @@ RSpec.describe 'User visits the notifications tab', :js, feature_category: :user
visit(profile_notifications_path)
end
- it 'turns on the receive product marketing emails setting' do
- expect(page).to have_content('Notifications')
-
- expect do
- check 'Receive product marketing emails'
- end.to change { user.reload.email_opted_in }.to(true)
- end
-
it 'changes the project notifications setting' do
expect(page).to have_content('Notifications')
diff --git a/spec/features/profiles/user_visits_profile_account_page_spec.rb b/spec/features/profiles/user_visits_profile_account_page_spec.rb
index 8ff9cbc242e..8569cefd1f4 100644
--- a/spec/features/profiles/user_visits_profile_account_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_account_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits the profile account page', feature_category: :user_profile do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
index ac0ed91468c..f92b8e2e751 100644
--- a/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
+++ b/spec/features/profiles/user_visits_profile_authentication_log_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits the authentication log', feature_category: :user_profile do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
context 'when user signed in' do
before do
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index d690589b893..033711f699e 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'User visits the profile preferences page', :js, feature_category: :user_profile do
include ListboxHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index 14fc6ed33b3..821c3d5ef2b 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits their profile', feature_category: :user_profile do
- let_it_be_with_refind(:user) { create(:user) }
+ let_it_be_with_refind(:user) { create(:user, :no_super_sidebar) }
before do
stub_feature_flags(profile_tabs_vue: false)
diff --git a/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb b/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb
index 547e47ead77..728fe1a3172 100644
--- a/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_ssh_keys_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits the profile SSH keys page', feature_category: :user_profile do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 594c2b442aa..973a1e76679 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
RSpec.describe 'Project active tab', feature_category: :groups_and_projects do
- let_it_be(:project) { create(:project, :repository, :with_namespace_settings) }
-
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, :with_namespace_settings, namespace: user.namespace) }
before do
sign_in(user)
diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb
index 8d636dacb75..eafb75d75ac 100644
--- a/spec/features/projects/branches/user_creates_branch_spec.rb
+++ b/spec/features/projects/branches/user_creates_branch_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User creates branch', :js, feature_category: :groups_and_project
include Features::BranchesHelpers
let_it_be(:group) { create(:group, :public) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
shared_examples 'creates new branch' do
specify do
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index b09aa91f4ab..adaa5e48967 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition d
it 'renders the empty page', :aggregate_failures do
expect(page).to have_content 'Optimize your workflow with CI/CD Pipelines'
- expect(page).to have_selector '[data-testid="create_new_ci_button"]'
+ expect(page).to have_selector '[data-testid="create-new-ci-button"]'
end
context 'when clicking on the create new CI button' do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index eadcc0e62c4..b16f43a16b6 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Gcp Cluster', :js, feature_category: :deployment_management do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
project.add_maintainer(user)
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 6da8eea687e..1393cc6db15 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User Cluster', :js, feature_category: :deployment_management do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
project.add_maintainer(user)
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index d40f929d0b2..e075cc86319 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Clusters', :js, feature_category: :groups_and_projects do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
project.add_maintainer(user)
diff --git a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
index e44364c7f2d..bc5d468c97a 100644
--- a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
+++ b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Commit > Pipelines tab', :js, feature_category: :source_code_man
wait_for_requests
page.within('[data-testid="pipeline-table-row"]') do
- expect(page).to have_selector('[data-testid="ci-badge-passed"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
expect(page).to have_content(pipeline.id)
expect(page).to have_content('API')
expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
diff --git a/spec/features/projects/confluence/user_views_confluence_page_spec.rb b/spec/features/projects/confluence/user_views_confluence_page_spec.rb
index c1ce6ea4536..216bea74c09 100644
--- a/spec/features/projects/confluence/user_views_confluence_page_spec.rb
+++ b/spec/features/projects/confluence/user_views_confluence_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User views the Confluence page', feature_category: :integrations do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 11ea72b87a2..3abe3ce1396 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -10,24 +10,19 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
before do
sign_in(user)
project.add_role(user, role)
- stub_feature_flags(environment_details_vue: false)
end
def auto_stop_button_selector
%q{button[title="Prevent environment from auto-stopping"]}
end
- describe 'environment details page vue' do
+ describe 'environment details page', :js do
let_it_be(:environment) { create(:environment, project: project) }
let!(:permissions) {}
let!(:deployment) {}
let!(:action) {}
let!(:cluster) {}
- before do
- stub_feature_flags(environment_details_vue: true)
- end
-
context 'with auto-stop' do
let_it_be(:environment) { create(:environment, :will_auto_stop, name: 'staging', project: project) }
@@ -35,122 +30,16 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
visit_environment(environment)
end
- it 'shows auto stop info', :js do
- expect(page).to have_content('Auto stops')
- end
-
- it 'shows auto stop button', :js do
- expect(page).to have_selector(auto_stop_button_selector)
- expect(page.find(auto_stop_button_selector).find(:xpath, '..')['action']).to have_content(cancel_auto_stop_project_environment_path(environment.project, environment))
- end
-
- it 'allows user to cancel auto stop', :js do
- page.find(auto_stop_button_selector).click
- wait_for_all_requests
- expect(page).to have_content('Auto stop successfully canceled.')
- expect(page).not_to have_selector(auto_stop_button_selector)
- end
- end
-
- context 'without deployments' do
- before do
- visit_environment(environment)
- end
-
- it 'does not show deployments', :js do
- expect(page).to have_content('You don\'t have any deployments right now.')
- end
- end
-
- context 'with deployments' do
- before do
- visit_environment(environment)
- end
-
- context 'when there is a successful deployment' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
-
- let(:deployment) do
- create(:deployment, :success, environment: environment, deployable: build)
- end
-
- it 'does show deployments', :js do
- wait_for_requests
- expect(page).to have_link("#{build.name} (##{build.id})")
- end
- end
-
- context 'when there is a failed deployment' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- let(:deployment) do
- create(:deployment, :failed, environment: environment, deployable: build)
- end
-
- it 'does show deployments', :js do
- wait_for_requests
- expect(page).to have_link("#{build.name} (##{build.id})")
- end
- end
-
- context 'with related deployable present' do
- let_it_be(:previous_pipeline) { create(:ci_pipeline, project: project) }
-
- let_it_be(:previous_build) do
- create(:ci_build, :success, pipeline: previous_pipeline, environment: environment.name)
- end
-
- let_it_be(:previous_deployment) do
- create(:deployment, :success, environment: environment, deployable: previous_build)
- end
-
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
-
- let_it_be(:deployment) do
- create(:deployment, :success, environment: environment, deployable: build)
- end
-
- before do
- visit_environment(environment)
- end
-
- it 'shows deployment information and buttons', :js do
- wait_for_requests
- expect(page).to have_button('Re-deploy to environment')
- expect(page).to have_button('Rollback environment')
- expect(page).to have_link("#{build.name} (##{build.id})")
- end
- end
- end
- end
-
- describe 'environment details page' do
- let_it_be(:environment) { create(:environment, project: project) }
- let!(:permissions) {}
- let!(:deployment) {}
- let!(:action) {}
- let!(:cluster) {}
-
- context 'with auto-stop' do
- let!(:environment) { create(:environment, :will_auto_stop, name: 'staging', project: project) }
-
- before do
- visit_environment(environment)
- end
-
- it 'shows auto stop info', :js do
+ it 'shows auto stop info' do
expect(page).to have_content('Auto stops')
end
- it 'shows auto stop button', :js do
+ it 'shows auto stop button' do
expect(page).to have_selector(auto_stop_button_selector)
expect(page.find(auto_stop_button_selector).find(:xpath, '..')['action']).to have_content(cancel_auto_stop_project_environment_path(environment.project, environment))
end
- it 'allows user to cancel auto stop', :js do
+ it 'allows user to cancel auto stop' do
page.find(auto_stop_button_selector).click
wait_for_all_requests
expect(page).to have_content('Auto stop successfully canceled.')
@@ -208,10 +97,6 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
it 'does show deployments' do
expect(page).to have_link("#{build.name} (##{build.id})")
end
-
- it 'shows a tooltip on the job name' do
- expect(page).to have_css("[title=\"#{build.name} (##{build.id})\"].has-tooltip")
- end
end
context 'when there is a failed deployment' do
@@ -227,26 +112,6 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
end
end
- context 'with many deployments' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
-
- let!(:second) { create(:deployment, environment: environment, deployable: build, status: :success, finished_at: Time.current) }
- let!(:first) { create(:deployment, environment: environment, deployable: build, status: :running) }
- let!(:last) { create(:deployment, environment: environment, deployable: build, status: :success, finished_at: 2.days.ago) }
- let!(:third) { create(:deployment, environment: environment, deployable: build, status: :canceled, finished_at: 1.day.ago) }
-
- before do
- visit_environment(environment)
- end
-
- it 'shows all of them in ordered way' do
- ids = find_all('[data-testid="deployment-id"]').map { |e| e.text }
- expected_ordered_ids = [first, second, third, last].map { |d| "##{d.iid}" }
- expect(ids).to eq(expected_ordered_ids)
- end
- end
-
context 'with upcoming deployments' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
@@ -265,7 +130,7 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
# See https://gitlab.com/gitlab-org/gitlab/-/issues/350618 for more information.
it 'shows upcoming deployments in unordered way' do
displayed_ids = find_all('[data-testid="deployment-id"]').map { |e| e.text }
- internal_ids = [runnind_deployment_1, runnind_deployment_2, success_without_finished_at].map { |d| "##{d.iid}" }
+ internal_ids = [runnind_deployment_1, runnind_deployment_2, success_without_finished_at].map { |d| d.iid.to_s }
expect(displayed_ids).to match_array(internal_ids)
end
end
@@ -309,20 +174,19 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
end
it 'does show a play button' do
- expect(page).to have_link(action.name)
+ expect(page).to have_button(action.name, visible: :all)
end
- it 'does allow to play manual action', :js do
+ it 'does allow to play manual action' do
expect(action).to be_manual
- find('button.dropdown').click
+ click_button('Deploy to...')
- expect { click_link(action.name) }
+ expect { click_button(action.name) }
.not_to change { Ci::Pipeline.count }
wait_for_all_requests
- expect(page).to have_content(action.name)
expect(action.reload).to be_pending
end
end
@@ -347,38 +211,6 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
end
end
- context 'with terminal' do
- context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) do
- create(:cluster, :project, :provided_by_gcp, projects: [project])
- end
-
- context 'for project maintainer' do
- let(:role) { :maintainer }
-
- context 'web terminal', :js do
- before do
- # Stub #terminals as it causes js-enabled feature specs to
- # render the page incorrectly
- #
- # In EE we have to stub EE::Environment since it overwrites
- # the "terminals" method.
- allow_next_instance_of(Gitlab.ee? ? EE::Environment : Environment) do |instance|
- allow(instance).to receive(:terminals) { nil }
- end
-
- visit terminal_project_environment_path(project, environment)
- end
-
- it 'displays a web terminal' do
- expect(page).to have_selector('#terminal')
- expect(page).to have_link(nil, href: environment.external_url)
- end
- end
- end
- end
- end
-
context 'when environment is available' do
context 'with stop action' do
let(:build) { create(:ci_build, :success, pipeline: pipeline, environment: environment.name) }
@@ -446,6 +278,8 @@ RSpec.describe 'Environment', feature_category: :groups_and_projects do
visit folder_project_environments_path(project, id: 'staging-1.0')
end
+ wait_for_requests
+
expect(reqs.first.status_code).to eq(200)
expect(page).to have_content('Environments / staging-1.0')
end
diff --git a/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
index 4af5c91479a..127610cf4db 100644
--- a/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
+++ b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
@@ -7,13 +7,14 @@ RSpec.describe 'User creates feature flag', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
+ let!(:environment) { create(:environment, :production, project: project) }
before do
project.add_developer(user)
sign_in(user)
end
- it 'user creates a flag enabled for user ids' do
+ it 'user creates a flag enabled for user ids with existing environment' do
visit(new_project_feature_flag_path(project))
set_feature_flag_info('test_feature', 'Test feature')
within_strategy_row(1) do
@@ -29,6 +30,22 @@ RSpec.describe 'User creates feature flag', :js do
expect(page).to have_text('test_feature')
end
+ it 'user creates a flag enabled for user ids with non-existing environment' do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('test_feature', 'Test feature')
+ within_strategy_row(1) do
+ select 'User IDs', from: 'Type'
+ fill_in 'User IDs', with: 'user1, user2'
+ environment_plus_button.click
+ environment_search_input.set('foo-bar')
+ environment_search_create_button.first.click
+ end
+ click_button 'Create feature flag'
+
+ expect_user_to_see_feature_flags_index_page
+ expect(page).to have_text('test_feature')
+ end
+
it 'user creates a flag with default environment scopes' do
visit(new_project_feature_flag_path(project))
set_feature_flag_info('test_flag', 'Test flag')
@@ -74,14 +91,18 @@ RSpec.describe 'User creates feature flag', :js do
end
def environment_plus_button
- find('.js-new-environments-dropdown')
+ find('[data-testid=new-environments-dropdown]')
end
def environment_search_input
- find('.js-new-environments-dropdown input')
+ find('[data-testid=new-environments-dropdown] input')
end
def environment_search_results
- all('.js-new-environments-dropdown button.dropdown-item')
+ all('[data-testid=new-environments-dropdown] li')
+ end
+
+ def environment_search_create_button
+ all('[data-testid=new-environments-dropdown] button')
end
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index b798524b9c4..8f66b722ead 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe 'Edit Project Settings', feature_category: :groups_and_projects do
- let(:member) { create(:user) }
+ let(:member) { create(:user, :no_super_sidebar) }
let!(:project) { create(:project, :public, :repository) }
let!(:issue) { create(:issue, project: project) }
- let(:non_member) { create(:user) }
+ let(:non_member) { create(:user, :no_super_sidebar) }
describe 'project features visibility selectors', :js do
before do
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 95e96159744..595aad0144b 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > Project owner creates a license file', :js, feature_category: :groups_and_projects do
- let(:project) { create(:project, :repository) }
- let(:project_maintainer) { project.first_owner }
+ let_it_be(:project_maintainer) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, namespace: project_maintainer.namespace) }
before do
project.repository.delete_file(project_maintainer, 'LICENSE',
diff --git a/spec/features/projects/files/user_find_file_spec.rb b/spec/features/projects/files/user_find_file_spec.rb
index 5406726eb6e..005a870bea0 100644
--- a/spec/features/projects/files/user_find_file_spec.rb
+++ b/spec/features/projects/files/user_find_file_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'User find project file', feature_category: :groups_and_projects do
include ListboxHelpers
- let(:user) { create :user }
+ let(:user) { create :user, :no_super_sidebar }
let(:project) { create :project, :repository }
before do
diff --git a/spec/features/projects/files/user_searches_for_files_spec.rb b/spec/features/projects/files/user_searches_for_files_spec.rb
index 25456593fc4..627912df408 100644
--- a/spec/features/projects/files/user_searches_for_files_spec.rb
+++ b/spec/features/projects/files/user_searches_for_files_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User searches for files', feature_category: :groups_and_projects do
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
@@ -11,7 +12,7 @@ RSpec.describe 'Projects > Files > User searches for files', feature_category: :
describe 'project main screen' do
context 'when project is empty' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
before do
visit project_path(project)
@@ -25,10 +26,7 @@ RSpec.describe 'Projects > Files > User searches for files', feature_category: :
end
context 'when project is not empty' do
- let(:project) { create(:project, :repository) }
-
before do
- project.add_developer(user)
visit project_path(project)
end
@@ -39,10 +37,7 @@ RSpec.describe 'Projects > Files > User searches for files', feature_category: :
end
describe 'project tree screen' do
- let(:project) { create(:project, :repository) }
-
before do
- project.add_developer(user)
visit project_tree_path(project, project.default_branch)
end
diff --git a/spec/features/projects/forks/fork_list_spec.rb b/spec/features/projects/forks/fork_list_spec.rb
index 966147637f5..86e4e03259e 100644
--- a/spec/features/projects/forks/fork_list_spec.rb
+++ b/spec/features/projects/forks/fork_list_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'listing forks of a project', feature_category: :groups_and_proje
let(:source) { create(:project, :public, :repository) }
let!(:fork) { fork_project(source, nil, repository: true) }
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
source.add_maintainer(user)
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index 16a3686215f..9b0803e4b0c 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project Graph', :js, feature_category: :groups_and_projects do
- let(:user) { create :user }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:branch_name) { 'master' }
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index ad2fccc14bf..bda45da0fa5 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -38,6 +38,10 @@ RSpec.describe 'Import/Export - project export integration test', :js, feature_c
context 'admin user' do
before do
sign_in(user)
+
+ # Now that we export project in batches we produce more queries than before
+ # needing to increase the default threshold
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(200)
end
it 'exports a project successfully', :sidekiq_inline do
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index 77f95827d88..afcf0e660f7 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
expect(page).not_to have_selector('[data-testid="jobs-table-error-alert"]')
end
end
@@ -93,7 +93,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-pending"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending')
end
end
@@ -133,7 +133,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-pending"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'pending')
end
it 'unschedules a job successfully' do
@@ -141,7 +141,7 @@ RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
wait_for_requests
- expect(page).to have_selector('[data-testid="ci-badge-manual"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'manual')
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index c203e644280..1bee4cc5081 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
wait_for_requests
- expect(page).to have_css('[data-testid="ci-badge-passed"]', text: 'passed')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'passed')
end
it 'shows commit`s data', :js do
@@ -93,7 +93,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
visit project_job_path(project, job)
within '.js-pipeline-info' do
- expect(page).to have_content("Pipeline ##{pipeline.id} for #{pipeline.ref}")
+ expect(page).to have_content("Pipeline ##{pipeline.id} #{pipeline.status} for #{pipeline.ref}")
end
end
@@ -239,7 +239,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
href = new_project_issue_path(project, options)
- page.within('.build-sidebar') do
+ page.within('aside.right-sidebar') do
expect(find('[data-testid="job-new-issue"]')['href']).to include(href)
end
end
@@ -1051,7 +1051,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
it 'retries the job' do
find('[data-testid="retry-button-modal"]').click
- within '[data-testid="ci-header-content"]' do
+ within '[data-testid="job-header-content"]' do
expect(page).to have_content('pending')
end
end
diff --git a/spec/features/projects/labels/user_creates_labels_spec.rb b/spec/features/projects/labels/user_creates_labels_spec.rb
index 46729048fe7..6e52963bee2 100644
--- a/spec/features/projects/labels/user_creates_labels_spec.rb
+++ b/spec/features/projects/labels/user_creates_labels_spec.rb
@@ -63,6 +63,8 @@ RSpec.describe "User creates labels", feature_category: :team_planning do
end
end
end
+
+ it_behaves_like "lock_on_merge when creating labels"
end
context "in another project" do
diff --git a/spec/features/projects/labels/user_edits_labels_spec.rb b/spec/features/projects/labels/user_edits_labels_spec.rb
index bf1182cfddd..059edea2109 100644
--- a/spec/features/projects/labels/user_edits_labels_spec.rb
+++ b/spec/features/projects/labels/user_edits_labels_spec.rb
@@ -13,16 +13,16 @@ RSpec.describe "User edits labels", feature_category: :team_planning do
project.add_maintainer(user)
sign_in(user)
- visit(edit_project_label_path(project, label))
+ visit edit_project_label_path(project, label)
end
- it "updates label's title" do
- new_title = "fix"
+ it 'update label with new title' do
+ new_title = 'fix'
- fill_in("Title", with: new_title)
- click_button("Save changes")
+ fill_in('Title', with: new_title)
+ click_button('Save changes')
- page.within(".other-labels .manage-labels-list") do
+ page.within('.other-labels .manage-labels-list') do
expect(page).to have_content(new_title).and have_no_content(label.title)
end
end
@@ -38,4 +38,17 @@ RSpec.describe "User edits labels", feature_category: :team_planning do
expect(page).to have_content("#{label.title} was removed").and have_no_content("#{label.title}</span>")
end
+
+ describe 'lock_on_merge' do
+ let_it_be_with_reload(:label_unlocked) { create(:label, project: project, lock_on_merge: false) }
+ let_it_be(:label_locked) { create(:label, project: project, lock_on_merge: true) }
+ let_it_be(:edit_label_path_unlocked) { edit_project_label_path(project, label_unlocked) }
+ let_it_be(:edit_label_path_locked) { edit_project_label_path(project, label_locked) }
+
+ before do
+ visit edit_label_path_unlocked
+ end
+
+ it_behaves_like 'lock_on_merge when editing labels'
+ end
end
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 0e3ac5ff3ac..76b2a73e170 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
end
end
- it_behaves_like 'inviting members', 'project-members-page' do
+ it_behaves_like 'inviting members', 'project_members_page' do
let_it_be(:entity) { project }
let_it_be(:members_page_path) { project_project_members_path(entity) }
let_it_be(:subentity) { project }
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index 9af36b4b2a9..d1e58ba91f0 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > User requests access', :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
- let_it_be(:user) { create(:user) }
- let_it_be(:maintainer) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:maintainer) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :public, :repository) }
let(:owner) { project.first_owner }
diff --git a/spec/features/projects/milestones/user_interacts_with_labels_spec.rb b/spec/features/projects/milestones/user_interacts_with_labels_spec.rb
index 36dfee7811d..3742c9f19d8 100644
--- a/spec/features/projects/milestones/user_interacts_with_labels_spec.rb
+++ b/spec/features/projects/milestones/user_interacts_with_labels_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User interacts with labels', feature_category: :team_planning do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, namespace: user.namespace) }
let(:milestone) { create(:milestone, project: project, title: 'v2.2', description: '# Description header') }
let(:issue1) { create(:issue, project: project, title: 'Bugfix1', milestone: milestone) }
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index b6645e9b710..e967c1be3bc 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -8,15 +8,13 @@ RSpec.describe 'Project navbar', :with_license, feature_category: :groups_and_pr
include_context 'project navbar structure'
- let_it_be(:project) { create(:project, :repository) }
-
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
stub_config(registry: { enabled: false })
- stub_feature_flags(harbor_registry_integration: false)
stub_feature_flags(ml_experiment_tracking: false)
insert_package_nav(_('Deployments'))
insert_infrastructure_registry_nav
@@ -88,8 +86,6 @@ RSpec.describe 'Project navbar', :with_license, feature_category: :groups_and_pr
let_it_be(:harbor_integration) { create(:harbor_integration, project: project) }
before do
- stub_feature_flags(harbor_registry_integration: true)
-
insert_harbor_registry_nav(_('Terraform modules'))
visit project_path(project)
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 6e6d9ff4af9..926fea24e14 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
end
context 'as a user' do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
@@ -76,7 +76,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
end
context 'as an admin' do
- let(:user) { create(:admin) }
+ let(:user) { create(:admin, :no_super_sidebar) }
shared_examples '"New project" page' do
before do
@@ -566,14 +566,14 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
let(:provider) { :bitbucket }
context 'as a user' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:oauth_config_instructions) { 'To enable importing projects from Bitbucket, ask your GitLab administrator to configure OAuth integration' }
it_behaves_like 'has instructions to enable OAuth'
end
context 'as an admin', :do_not_mock_admin_mode_setting do
- let(:user) { create(:admin) }
+ let(:user) { create(:admin, :no_super_sidebar) }
let(:oauth_config_instructions) { 'To enable importing projects from Bitbucket, as administrator you need to configure OAuth integration' }
it_behaves_like 'has instructions to enable OAuth'
@@ -581,7 +581,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
end
describe 'sidebar' do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:parent_group) { create(:group) }
before do
@@ -616,14 +616,14 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
context 'for a new top-level project' do
it 'shows the "Your work" navigation' do
visit new_project_path
- expect(page).to have_selector(".super-sidebar .context-switcher-toggle", text: "Your work")
+ expect(page).to have_selector(".super-sidebar", text: "Your work")
end
end
context 'for a new group project' do
it 'shows the group sidebar of the parent group' do
visit new_project_path(namespace_id: parent_group.id)
- expect(page).to have_selector(".super-sidebar .context-switcher-toggle", text: parent_group.name)
+ expect(page).to have_selector(".super-sidebar", text: parent_group.name)
end
end
end
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
index eec9f2befb6..8350214bf99 100644
--- a/spec/features/projects/pages/user_edits_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -5,7 +5,7 @@ RSpec.describe 'Pages edits pages settings', :js, feature_category: :pages do
include Spec::Support::Helpers::ModalHelpers
let_it_be_with_reload(:project) { create(:project, :pages_published, pages_https_only: false) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 358c55376d4..322d25ed052 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -12,389 +12,301 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects
let!(:user) { create(:user) }
let!(:maintainer) { create(:user) }
- context 'with pipeline_schedules_vue feature flag turned off' do
+ context 'logged in as the pipeline schedule owner' do
before do
- stub_feature_flags(pipeline_schedules_vue: false)
+ project.add_developer(user)
+ pipeline_schedule.update!(owner: user)
+ gitlab_sign_in(user)
end
- context 'logged in as the pipeline schedule owner' do
+ describe 'GET /projects/pipeline_schedules' do
before do
- project.add_developer(user)
- pipeline_schedule.update!(owner: user)
- gitlab_sign_in(user)
+ visit_pipelines_schedules
end
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
- end
-
- it 'edits the pipeline' do
- page.within('.pipeline-schedule-table-row') do
- click_link 'Edit'
- end
+ it 'edits the pipeline' do
+ page.find('[data-testid="edit-pipeline-schedule-btn"]').click
- expect(page).to have_content('Edit Pipeline Schedule')
- end
+ expect(page).to have_content(s_('PipelineSchedules|Edit pipeline schedule'))
end
+ end
- describe 'PATCH /projects/pipelines_schedules/:id/edit' do
- before do
- edit_pipeline_schedule
- end
-
- it 'displays existing properties' do
- description = find_field('schedule_description').value
- expect(description).to eq('pipeline schedule')
- expect(page).to have_button('master')
- expect(page).to have_button('Select timezone')
- end
+ describe 'PATCH /projects/pipelines_schedules/:id/edit' do
+ before do
+ edit_pipeline_schedule
+ end
- it 'edits the scheduled pipeline' do
- fill_in 'schedule_description', with: 'my brand new description'
+ it 'displays existing properties' do
+ description = find_field('schedule-description').value
+ expect(description).to eq('pipeline schedule')
+ expect(page).to have_button('master')
+ expect(page).to have_button(_('Select timezone'))
+ end
- save_pipeline_schedule
+ it 'edits the scheduled pipeline' do
+ fill_in 'schedule-description', with: 'my brand new description'
- expect(page).to have_content('my brand new description')
- end
+ save_pipeline_schedule
- context 'when ref is nil' do
- before do
- pipeline_schedule.update_attribute(:ref, nil)
- edit_pipeline_schedule
- end
+ expect(page).to have_content('my brand new description')
+ end
- it 'shows the pipeline schedule with default ref' do
- page.within('[data-testid="schedule-target-ref"]') do
- expect(first('.gl-button-text').text).to eq('master')
- end
- end
+ context 'when ref is nil' do
+ before do
+ pipeline_schedule.update_attribute(:ref, nil)
+ edit_pipeline_schedule
end
- context 'when ref is empty' do
- before do
- pipeline_schedule.update_attribute(:ref, '')
- edit_pipeline_schedule
- end
-
- it 'shows the pipeline schedule with default ref' do
- page.within('[data-testid="schedule-target-ref"]') do
- expect(first('.gl-button-text').text).to eq('master')
- end
+ it 'shows the pipeline schedule with default ref' do
+ page.within('#schedule-target-branch-tag') do
+ expect(first('.gl-button-text').text).to eq('master')
end
end
end
- end
-
- context 'logged in as a project maintainer' do
- before do
- project.add_maintainer(user)
- gitlab_sign_in(user)
- end
- describe 'GET /projects/pipeline_schedules' do
+ context 'when ref is empty' do
before do
- visit_pipelines_schedules
+ pipeline_schedule.update_attribute(:ref, '')
+ edit_pipeline_schedule
end
- describe 'The view' do
- it 'displays the required information description' do
- page.within('.pipeline-schedule-table-row') do
- expect(page).to have_content('pipeline schedule')
- expect(find("[data-testid='next-run-cell'] time")['title'])
- .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y'))
- expect(page).to have_link('master')
- expect(page).to have_link("##{pipeline.id}")
- end
- end
-
- it 'creates a new scheduled pipeline' do
- click_link 'New schedule'
-
- expect(page).to have_content('Schedule a new pipeline')
- end
-
- it 'changes ownership of the pipeline' do
- click_button 'Take ownership'
-
- page.within('#pipeline-take-ownership-modal') do
- click_link 'Take ownership'
- end
-
- page.within('.pipeline-schedule-table-row') do
- expect(page).not_to have_content('No owner')
- expect(page).to have_link('Sidney Jones')
- end
- end
-
- it 'deletes the pipeline' do
- click_link 'Delete'
-
- accept_gl_confirm(button_text: 'Delete pipeline schedule')
-
- expect(page).not_to have_css(".pipeline-schedule-table-row")
+ it 'shows the pipeline schedule with default ref' do
+ page.within('#schedule-target-branch-tag') do
+ expect(first('.gl-button-text').text).to eq('master')
end
end
+ end
+ end
+ end
- context 'when ref is nil' do
- before do
- pipeline_schedule.update_attribute(:ref, nil)
- visit_pipelines_schedules
- end
-
- it 'shows a list of the pipeline schedules with empty ref column' do
- expect(first('.branch-name-cell').text).to eq('')
- end
- end
+ context 'logged in as a project maintainer' do
+ before do
+ project.add_maintainer(user)
+ pipeline_schedule.update!(owner: maintainer)
+ gitlab_sign_in(user)
+ end
- context 'when ref is empty' do
- before do
- pipeline_schedule.update_attribute(:ref, '')
- visit_pipelines_schedules
- end
+ describe 'GET /projects/pipeline_schedules' do
+ before do
+ visit_pipelines_schedules
- it 'shows a list of the pipeline schedules with empty ref column' do
- expect(first('.branch-name-cell').text).to eq('')
- end
- end
+ wait_for_requests
end
- describe 'POST /projects/pipeline_schedules/new' do
- before do
- visit_new_pipeline_schedule
- end
-
- it 'sets defaults for timezone and target branch' do
- expect(page).to have_button('master')
- expect(page).to have_button('Select timezone')
+ describe 'The view' do
+ it 'displays the required information description' do
+ page.within('[data-testid="pipeline-schedule-table-row"]') do
+ expect(page).to have_content('pipeline schedule')
+ expect(find('[data-testid="next-run-cell"] time')['title'])
+ .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y'))
+ expect(page).to have_link('master')
+ expect(find("[data-testid='last-pipeline-status'] a")['href']).to include(pipeline.id.to_s)
+ end
end
it 'creates a new scheduled pipeline' do
- fill_in_schedule_form
- save_pipeline_schedule
+ click_link 'New schedule'
- expect(page).to have_content('my fancy description')
+ expect(page).to have_content('Schedule a new pipeline')
end
- it 'prevents an invalid form from being submitted' do
- save_pipeline_schedule
+ it 'changes ownership of the pipeline' do
+ find("[data-testid='take-ownership-pipeline-schedule-btn']").click
- expect(page).to have_content('This field is required')
- end
- end
+ page.within('#pipeline-take-ownership-modal') do
+ click_button s_('PipelineSchedules|Take ownership')
- context 'when user creates a new pipeline schedule with variables' do
- before do
- visit_pipelines_schedules
- click_link 'New schedule'
- fill_in_schedule_form
- all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA')
- all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123')
- all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB')
- all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123')
- save_pipeline_schedule
- end
+ wait_for_requests
+ end
- it 'user sees the new variable in edit window', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397040' do
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- page.within('.ci-variable-list') do
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('AAA')
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('AAA123')
- expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-key").value).to eq('BBB')
- expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-value", visible: false).value).to eq('BBB123')
+ page.within('[data-testid="pipeline-schedule-table-row"]') do
+ expect(page).not_to have_content('No owner')
+ expect(page).to have_link('Sidney Jones')
end
end
- end
- context 'when user edits a variable of a pipeline schedule' do
- before do
- create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
- create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule)
+ it 'deletes the pipeline' do
+ page.within('[data-testid="pipeline-schedule-table-row"]') do
+ click_button s_('PipelineSchedules|Delete pipeline schedule')
end
- visit_pipelines_schedules
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- find('.js-ci-variable-list-section .js-secret-value-reveal-button').click
- first('.js-ci-variable-input-key').set('foo')
- first('.js-ci-variable-input-value').set('bar')
- click_button 'Save pipeline schedule'
- end
+ accept_gl_confirm(button_text: s_('PipelineSchedules|Delete pipeline schedule'))
- it 'user sees the updated variable in edit window' do
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- page.within('.ci-variable-list') do
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('foo')
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('bar')
- end
+ expect(page).not_to have_css('[data-testid="pipeline-schedule-table-row"]')
end
end
- context 'when user removes a variable of a pipeline schedule' do
+ context 'when ref is nil' do
before do
- create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
- create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule)
- end
-
+ pipeline_schedule.update_attribute(:ref, nil)
visit_pipelines_schedules
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- find('.ci-variable-list .ci-variable-row-remove-button').click
- click_button 'Save pipeline schedule'
+ wait_for_requests
end
- it 'user does not see the removed variable in edit window' do
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- page.within('.ci-variable-list') do
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('')
- expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('')
+ it 'shows a list of the pipeline schedules with empty ref column' do
+ target = find('[data-testid="pipeline-schedule-target"]')
+
+ page.within('[data-testid="pipeline-schedule-table-row"]') do
+ expect(target.text).to eq(s_('PipelineSchedules|None'))
end
end
end
- context 'when active is true and next_run_at is NULL' do
+ context 'when ref is empty' do
before do
- create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
- pipeline_schedule.update_attribute(:next_run_at, nil) # Consequently next_run_at will be nil
- end
+ pipeline_schedule.update_attribute(:ref, '')
+ visit_pipelines_schedules
+ wait_for_requests
end
- it 'user edit and recover the problematic pipeline schedule' do
- visit_pipelines_schedules
- find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
- fill_in 'schedule_cron', with: '* 1 2 3 4'
- click_button 'Save pipeline schedule'
+ it 'shows a list of the pipeline schedules with empty ref column' do
+ target = find('[data-testid="pipeline-schedule-target"]')
- page.within('.pipeline-schedule-table-row:nth-child(1)') do
- expect(page).to have_css("[data-testid='next-run-cell'] time")
- end
+ expect(target.text).to eq(s_('PipelineSchedules|None'))
end
end
end
- context 'logged in as non-member' do
+ describe 'POST /projects/pipeline_schedules/new' do
before do
- gitlab_sign_in(user)
+ visit_new_pipeline_schedule
end
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
- end
+ it 'sets defaults for timezone and target branch' do
+ expect(page).to have_button('master')
+ expect(page).to have_button('Select timezone')
+ end
- describe 'The view' do
- it 'does not show create schedule button' do
- expect(page).not_to have_link('New schedule')
- end
- end
+ it 'creates a new scheduled pipeline' do
+ fill_in_schedule_form
+ create_pipeline_schedule
+
+ expect(page).to have_content('my fancy description')
end
- end
- context 'not logged in' do
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
- end
+ it 'prevents an invalid form from being submitted' do
+ create_pipeline_schedule
- describe 'The view' do
- it 'does not show create schedule button' do
- expect(page).not_to have_link('New schedule')
- end
- end
+ expect(page).to have_content("Cron timezone can't be blank")
end
end
- end
- context 'with pipeline_schedules_vue feature flag turned on' do
- context 'logged in as a project maintainer' do
+ context 'when user creates a new pipeline schedule with variables' do
before do
- project.add_maintainer(maintainer)
- pipeline_schedule.update!(owner: user)
- gitlab_sign_in(maintainer)
+ visit_pipelines_schedules
+ click_link 'New schedule'
+ fill_in_schedule_form
+ all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA')
+ all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123')
+ all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB')
+ all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123')
+ create_pipeline_schedule
end
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
-
- wait_for_requests
+ it 'user sees the new variable in edit window', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397040' do
+ find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click
+ page.within('.ci-variable-list') do
+ expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('AAA')
+ expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('AAA123')
+ expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-key").value).to eq('BBB')
+ expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-value", visible: false).value).to eq('BBB123')
end
+ end
+ end
- describe 'The view' do
- it 'displays the required information description' do
- page.within('[data-testid="pipeline-schedule-table-row"]') do
- expect(page).to have_content('pipeline schedule')
- expect(find("[data-testid='next-run-cell'] time")['title'])
- .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y'))
- expect(page).to have_link('master')
- expect(find("[data-testid='last-pipeline-status'] a")['href']).to include(pipeline.id.to_s)
- end
- end
-
- it 'changes ownership of the pipeline' do
- click_button 'Take ownership'
+ context 'when user edits a variable of a pipeline schedule' do
+ before do
+ create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
+ create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule)
+ end
- page.within('#pipeline-take-ownership-modal') do
- click_button 'Take ownership'
+ visit_pipelines_schedules
+ first('[data-testid="edit-pipeline-schedule-btn"]').click
+ click_button _('Reveal values')
+ first('[data-testid="pipeline-form-ci-variable-key"]').set('foo')
+ first('[data-testid="pipeline-form-ci-variable-value"]').set('bar')
+ save_pipeline_schedule
+ end
- wait_for_requests
- end
+ it 'user sees the updated variable' do
+ first('[data-testid="edit-pipeline-schedule-btn"]').click
- page.within('[data-testid="pipeline-schedule-table-row"]') do
- expect(page).not_to have_content('No owner')
- expect(page).to have_link('Sidney Jones')
- end
- end
+ expect(first('[data-testid="pipeline-form-ci-variable-key"]').value).to eq('foo')
+ expect(first('[data-testid="pipeline-form-ci-variable-value"]').value).to eq('')
- it 'runs the pipeline' do
- click_button 'Run pipeline schedule'
+ click_button _('Reveal values')
- wait_for_requests
+ expect(first('[data-testid="pipeline-form-ci-variable-value"]').value).to eq('bar')
+ end
+ end
- expect(page).to have_content("Successfully scheduled a pipeline to run. Go to the Pipelines page for details.")
- end
+ context 'when user removes a variable of a pipeline schedule' do
+ before do
+ create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
+ create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule)
+ end
- it 'deletes the pipeline' do
- click_button 'Delete pipeline schedule'
+ visit_pipelines_schedules
+ first('[data-testid="edit-pipeline-schedule-btn"]').click
+ find('[data-testid="remove-ci-variable-row"]').click
+ save_pipeline_schedule
+ end
- accept_gl_confirm(button_text: 'Delete pipeline schedule')
+ it 'user does not see the removed variable in edit window' do
+ first('[data-testid="edit-pipeline-schedule-btn"]').click
- expect(page).not_to have_css('[data-testid="pipeline-schedule-table-row"]')
- end
- end
+ expect(first('[data-testid="pipeline-form-ci-variable-key"]').value).to eq('')
+ expect(first('[data-testid="pipeline-form-ci-variable-value"]').value).to eq('')
end
end
- context 'logged in as non-member' do
+ context 'when active is true and next_run_at is NULL' do
before do
- gitlab_sign_in(user)
+ create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule|
+ pipeline_schedule.update_attribute(:next_run_at, nil) # Consequently next_run_at will be nil
+ end
end
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
+ it 'user edit and recover the problematic pipeline schedule' do
+ visit_pipelines_schedules
+ first('[data-testid="edit-pipeline-schedule-btn"]').click
+ fill_in 'schedule_cron', with: '* 1 2 3 4'
+ save_pipeline_schedule
- wait_for_requests
- end
-
- describe 'The view' do
- it 'does not show create schedule button' do
- expect(page).not_to have_link('New schedule')
- end
+ page.within(first('[data-testid="pipeline-schedule-table-row"]')) do
+ expect(page).to have_css("[data-testid='next-run-cell'] time")
end
end
end
+ end
- context 'not logged in' do
- describe 'GET /projects/pipeline_schedules' do
- before do
- visit_pipelines_schedules
+ context 'logged in as non-member' do
+ before do
+ gitlab_sign_in(user)
+ end
- wait_for_requests
+ describe 'GET /projects/pipeline_schedules' do
+ before do
+ visit_pipelines_schedules
+ end
+
+ describe 'The view' do
+ it 'does not show create schedule button' do
+ expect(page).not_to have_link('New schedule')
end
+ end
+ end
+ end
- describe 'The view' do
- it 'does not show create schedule button' do
- expect(page).not_to have_link('New schedule')
- end
+ context 'not logged in' do
+ describe 'GET /projects/pipeline_schedules' do
+ before do
+ visit_pipelines_schedules
+ end
+
+ describe 'The view' do
+ it 'does not show create schedule button' do
+ expect(page).not_to have_link('New schedule')
end
end
end
@@ -413,7 +325,7 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects
end
def select_timezone
- find('[data-testid="schedule-timezone"] .gl-new-dropdown-toggle').click
+ find('#schedule-timezone .gl-new-dropdown-toggle').click
find("li", text: "Arizona").click
end
@@ -421,12 +333,16 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects
click_button 'master'
end
+ def create_pipeline_schedule
+ click_button s_('PipelineSchedules|Create pipeline schedule')
+ end
+
def save_pipeline_schedule
- click_button 'Save pipeline schedule'
+ click_button s_('PipelineSchedules|Edit pipeline schedule')
end
def fill_in_schedule_form
- fill_in 'schedule_description', with: 'my fancy description'
+ fill_in 'schedule-description', with: 'my fancy description'
fill_in 'schedule_cron', with: '* 1 2 3 4'
select_timezone
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index bb49fb734d7..2fc8345fb47 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -224,7 +224,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Retry job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
end
end
@@ -278,7 +278,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Retry job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
end
@@ -312,7 +312,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
expect(page).not_to have_content('Play job')
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
end
end
@@ -537,7 +537,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
it 'shows running status in pipeline header', :sidekiq_might_not_need_inline do
within('[data-testid="pipeline-details-header"]') do
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
end
end
@@ -843,12 +843,10 @@ RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
end
it 'displays the PipelineSchedule in an inactive state' do
- stub_feature_flags(pipeline_schedules_vue: false)
-
visit project_pipeline_schedules_path(project)
page.click_link('Inactive')
- expect(page).to have_selector('table.ci-table > tbody > tr > td', text: 'blocked user schedule')
+ expect(page).to have_selector('[data-testid="pipeline-schedule-description"]', text: 'blocked user schedule')
end
it 'does not create a new Pipeline', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408215' do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 26fcd8ca3ca..c1aa2c35337 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
let(:expected_detached_mr_tag) { 'merge request' }
context 'when user is logged in' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
@@ -115,7 +115,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline can be canceled' do
expect(page).to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
context 'when canceling' do
@@ -127,7 +127,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicated that pipelines was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
end
end
end
@@ -144,7 +144,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline can be retried' do
expect(page).to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('[data-testid="ci-badge-failed"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'failed')
end
context 'when retrying' do
@@ -155,7 +155,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'shows running pipeline that is not retryable' do
expect(page).not_to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
end
end
@@ -396,7 +396,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'shows the pipeline as preparing' do
- expect(page).to have_selector('[data-testid="ci-badge-preparing"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'preparing')
end
end
@@ -417,7 +417,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'has pipeline running' do
- expect(page).to have_selector('[data-testid="ci-badge-running"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'running')
end
context 'when canceling' do
@@ -428,7 +428,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
it 'indicates that pipeline was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'canceled')
end
end
end
@@ -450,7 +450,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end
it 'has failed pipeline', :sidekiq_might_not_need_inline do
- expect(page).to have_selector('[data-testid="ci-badge-failed"]')
+ expect(page).to have_selector('[data-testid="ci-badge-link"]', text: 'failed')
end
end
end
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index b46451f4255..c2914c020e3 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -5,9 +5,8 @@ require 'spec_helper'
RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :groups_and_projects do
include ListboxHelpers
- let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
-
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, create_templates: :issue, namespace: user.namespace) }
before do
sign_in(user)
diff --git a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
index 1ab88ec0fff..ee54065fdf8 100644
--- a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
+++ b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Project > Settings > Packages and registries > Container registry tag expiration policy',
feature_category: :groups_and_projects do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
let(:container_registry_enabled) { true }
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 9df82e447aa..7f0367f47f7 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Project > Settings > Packages and registries > Container registry tag expiration policy',
feature_category: :groups_and_projects do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
let(:container_registry_enabled) { true }
diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb
index d068cb219f1..5cc2e2d3c05 100644
--- a/spec/features/projects/settings/service_desk_setting_spec.rb
+++ b/spec/features/projects/settings/service_desk_setting_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c
wait_for_requests
project.reload
- expect(find('[data-testid="incoming-email"]').value).to eq(project.service_desk_custom_address)
+ expect(find('[data-testid="incoming-email"]').value).to eq(project.service_desk_alias_address)
page.within '#js-service-desk' do
fill_in('service-desk-project-suffix', with: 'foo')
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index ee017336acc..626d4de7baf 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository, :public) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index 5a744be5d81..22d00e9a351 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > User sees sidebar', feature_category: :groups_and_projects do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :private, public_builds: false, namespace: user.namespace) }
# NOTE: See documented behaviour https://design.gitlab.com/regions/navigation#contextual-navigation
@@ -182,7 +182,7 @@ RSpec.describe 'Projects > User sees sidebar', feature_category: :groups_and_pro
end
context 'as guest' do
- let(:guest) { create(:user) }
+ let(:guest) { create(:user, :no_super_sidebar) }
let!(:issue) { create(:issue, :opened, project: project, author: guest) }
before do
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index 77f753b92eb..b7b2093d78a 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
RSpec.describe 'User uses shortcuts', :js, feature_category: :groups_and_projects do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:user) { project.first_owner }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
before do
sign_in(user)
@@ -15,58 +14,6 @@ RSpec.describe 'User uses shortcuts', :js, feature_category: :groups_and_project
wait_for_requests
end
- context 'disabling shortcuts' do
- before do
- page.evaluate_script("localStorage.removeItem('shortcutsDisabled')")
- end
-
- it 'can disable shortcuts from help menu' do
- open_modal_shortcut_keys
- click_toggle_button
- close_modal
-
- open_modal_shortcut_keys
-
- expect(page).not_to have_selector('[data-testid="modal-shortcuts"]')
-
- page.refresh
- open_modal_shortcut_keys
-
- # after reload, shortcuts modal doesn't exist at all until we add it
- expect(page).not_to have_selector('[data-testid="modal-shortcuts"]')
- end
-
- it 're-enables shortcuts' do
- open_modal_shortcut_keys
- click_toggle_button
- close_modal
-
- open_modal_from_help_menu
- click_toggle_button
- close_modal
-
- open_modal_shortcut_keys
- expect(find('[data-testid="modal-shortcuts"]')).to be_visible
- end
-
- def open_modal_shortcut_keys
- find('body').native.send_key('?')
- end
-
- def open_modal_from_help_menu
- find('.header-help-dropdown-toggle').click
- find('button', text: 'Keyboard shortcuts').click
- end
-
- def click_toggle_button
- find('.js-toggle-shortcuts .gl-toggle').click
- end
-
- def close_modal
- find('.modal button[aria-label="Close"]').click
- end
- end
-
context 'when navigating to the Project pages' do
it 'redirects to the project overview page' do
visit project_issues_path(project)
diff --git a/spec/features/projects/wikis_spec.rb b/spec/features/projects/wikis_spec.rb
index 5d950da6674..63714954c0c 100644
--- a/spec/features/projects/wikis_spec.rb
+++ b/spec/features/projects/wikis_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
RSpec.describe 'Project wikis', :js, feature_category: :wiki do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let(:wiki) { create(:project_wiki, user: user, project: project) }
let(:project) { create(:project, namespace: user.namespace, creator: user) }
diff --git a/spec/features/projects/work_items/work_item_spec.rb b/spec/features/projects/work_items/work_item_spec.rb
index 618d3e2efd0..a1f5466f5bf 100644
--- a/spec/features/projects/work_items/work_item_spec.rb
+++ b/spec/features/projects/work_items/work_item_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Work item', :js, feature_category: :team_planning do
- let_it_be_with_reload(:user) { create(:user) }
- let_it_be_with_reload(:user2) { create(:user, name: 'John') }
+ let_it_be_with_reload(:user) { create(:user, :no_super_sidebar) }
+ let_it_be_with_reload(:user2) { create(:user, :no_super_sidebar, name: 'John') }
let_it_be(:project) { create(:project, :public) }
let_it_be(:work_item) { create(:work_item, project: project) }
@@ -39,6 +39,44 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
expect(page).to have_selector('[data-testid="work-item-actions-dropdown"]')
end
+ it 'reassigns to another user',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413074' do
+ find('[data-testid="work-item-assignees-input"]').fill_in(with: user.username)
+ wait_for_requests
+
+ send_keys(:enter)
+ find("body").click
+ wait_for_requests
+
+ find('[data-testid="work-item-assignees-input"]').fill_in(with: user2.username)
+ wait_for_requests
+
+ send_keys(:enter)
+ find("body").click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user2)
+ end
+
+ it 'updates the assignee in real-time' do
+ Capybara::Session.new(:other_session)
+
+ using_session :other_session do
+ visit work_items_path
+ expect(work_item.reload.assignees).not_to include(user)
+ end
+
+ find('[data-testid="work-item-assignees-input"]').hover
+ find('[data-testid="assign-self"]').click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user)
+
+ using_session :other_session do
+ expect(work_item.reload.assignees).to include(user)
+ end
+ end
+
it_behaves_like 'work items title'
it_behaves_like 'work items toggle status button'
it_behaves_like 'work items assignees'
@@ -90,5 +128,11 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
expect(page).to have_selector('[data-testid="award-button"].disabled')
end
end
+
+ it 'assignees input field is disabled' do
+ within('[data-testid="work-item-assignees-input"]') do
+ expect(page).to have_field(type: 'text', disabled: true)
+ end
+ end
end
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index d28fafaac45..7ca9395f669 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
include MobileHelpers
describe 'template' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in user
@@ -78,7 +78,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'shows tip about push to create git command' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in user
@@ -214,7 +214,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'showing information about source of a project fork', :js do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:base_project) { create(:project, :public, :repository) }
let(:forked_project) { fork_project(base_project, user, repository: true) }
@@ -265,7 +265,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'when the project repository is disabled', :js do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :repository_disabled, :repository, namespace: user.namespace) }
before do
@@ -282,7 +282,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'removal', :js do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, namespace: user.namespace) }
before do
@@ -307,7 +307,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'tree view (default view is set to Files)', :js do
- let(:user) { create(:user, project_view: 'files') }
+ let(:user) { create(:user, :no_super_sidebar, project_view: 'files') }
let(:project) { create(:forked_project_with_submodules) }
before do
@@ -379,7 +379,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'activity view' do
- let(:user) { create(:user, project_view: 'activity') }
+ let(:user) { create(:user, :no_super_sidebar, project_view: 'activity') }
let(:project) { create(:project, :repository) }
before do
@@ -410,7 +410,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
end
describe 'edit' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public) }
let(:path) { edit_project_path(project) }
@@ -425,7 +425,7 @@ RSpec.describe 'Project', feature_category: :groups_and_projects do
describe 'view for a user without an access to a repo' do
let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
it 'does not contain default branch information in its content' do
default_branch = 'merge-commit-analyze-side-branch'
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 3c63ec82778..091c318459b 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -9,396 +9,387 @@ RSpec.describe 'Runners', feature_category: :runner_fleet do
sign_in(user)
end
- context 'when project_runners_vue_ui is disabled' do
- before do
- stub_feature_flags(project_runners_vue_ui: false)
- end
+ context 'with user as project maintainer' do
+ let_it_be(:project) { create(:project).tap { |project| project.add_maintainer(user) } }
- context 'with user as project maintainer' do
- let_it_be(:project) { create(:project).tap { |project| project.add_maintainer(user) } }
+ context 'when user views runners page', :js do
+ before do
+ visit project_runners_path(project)
+ end
- context 'when user views runners page', :js do
- before do
- visit project_runners_path(project)
- end
+ it 'user can see a link with instructions on how to install GitLab Runner' do
+ expect(page).to have_link(s_('Runners|New project runner'), href: new_project_runner_path(project))
+ end
- it 'user can see a link with instructions on how to install GitLab Runner' do
- expect(page).to have_link(s_('Runners|New project runner'), href: new_project_runner_path(project))
- end
+ it_behaves_like "shows and resets runner registration token" do
+ let(:dropdown_text) { s_('Runners|Register a project runner') }
+ let(:registration_token) { project.runners_token }
+ end
+ end
- it_behaves_like "shows and resets runner registration token" do
- let(:dropdown_text) { s_('Runners|Register a project runner') }
- let(:registration_token) { project.runners_token }
- end
+ context 'when user views new runner page', :js do
+ before do
+ visit new_project_runner_path(project)
end
- context 'when user views new runner page', :js do
- before do
- visit new_project_runner_path(project)
- end
+ it_behaves_like 'creates runner and shows register page' do
+ let(:register_path_pattern) { register_project_runner_path(project, '.*') }
+ end
- it_behaves_like 'creates runner and shows register page' do
- let(:register_path_pattern) { register_project_runner_path(project, '.*') }
- end
+ it_behaves_like 'shows locked field'
+ end
+ end
- it 'shows the locked field' do
- expect(page).to have_selector('input[type="checkbox"][name="locked"]')
- expect(page).to have_content(_('Lock to current projects'))
- end
- end
+ context 'when a project has enabled shared_runners' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(user)
end
- context 'when a project has enabled shared_runners' do
- let_it_be(:project) { create(:project) }
+ context 'when a project_type runner is activated on the project' do
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
- before do
- project.add_maintainer(user)
- end
+ it 'user sees the project runner' do
+ visit project_runners_path(project)
- context 'when a project_type runner is activated on the project' do
- let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+ within '[data-testid="assigned_project_runners"]' do
+ expect(page).to have_content(project_runner.display_name)
+ end
- it 'user sees the project runner' do
- visit project_runners_path(project)
+ click_on project_runner.short_sha
- within '[data-testid="assigned_project_runners"]' do
- expect(page).to have_content(project_runner.display_name)
- end
+ expect(page).to have_content(project_runner.platform)
+ end
- click_on project_runner.short_sha
+ it 'user can pause and resume the project runner' do
+ visit project_runners_path(project)
- expect(page).to have_content(project_runner.platform)
+ within '[data-testid="assigned_project_runners"]' do
+ expect(page).to have_link('Pause')
end
- it 'user can pause and resume the project runner' do
- visit project_runners_path(project)
+ click_on 'Pause'
- within '[data-testid="assigned_project_runners"]' do
- expect(page).to have_link('Pause')
- end
+ within '[data-testid="assigned_project_runners"]' do
+ expect(page).to have_link('Resume')
+ end
- click_on 'Pause'
+ click_on 'Resume'
- within '[data-testid="assigned_project_runners"]' do
- expect(page).to have_link('Resume')
- end
+ within '[data-testid="assigned_project_runners"]' do
+ expect(page).to have_link('Pause')
+ end
+ end
- click_on 'Resume'
+ it 'user removes an activated project runner if this is last project for that runners' do
+ visit project_runners_path(project)
- within '[data-testid="assigned_project_runners"]' do
- expect(page).to have_link('Pause')
- end
+ within '[data-testid="assigned_project_runners"]' do
+ click_on 'Remove runner'
end
- it 'user removes an activated project runner if this is last project for that runners' do
- visit project_runners_path(project)
+ expect(page).not_to have_content(project_runner.display_name)
+ end
- within '[data-testid="assigned_project_runners"]' do
- click_on 'Remove runner'
- end
+ it 'user edits the runner to be protected' do
+ visit project_runners_path(project)
+
+ within '[data-testid="assigned_project_runners"]' do
+ first('[data-testid="edit-runner-link"]').click
+ end
+
+ expect(page.find_field('runner[access_level]')).not_to be_checked
+
+ check 'runner_access_level'
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Protected Yes'
+ end
- expect(page).not_to have_content(project_runner.display_name)
+ context 'when a runner has a tag' do
+ before do
+ project_runner.update!(tag_list: ['tag'])
end
- it 'user edits the runner to be protected' do
+ it 'user edits runner not to run untagged jobs' do
visit project_runners_path(project)
within '[data-testid="assigned_project_runners"]' do
first('[data-testid="edit-runner-link"]').click
end
- expect(page.find_field('runner[access_level]')).not_to be_checked
+ expect(page.find_field('runner[run_untagged]')).to be_checked
- check 'runner_access_level'
+ uncheck 'runner_run_untagged'
click_button 'Save changes'
- expect(page).to have_content 'Protected Yes'
+ expect(page).to have_content 'Can run untagged jobs No'
end
+ end
- context 'when a runner has a tag' do
- before do
- project_runner.update!(tag_list: ['tag'])
- end
-
- it 'user edits runner not to run untagged jobs' do
- visit project_runners_path(project)
-
- within '[data-testid="assigned_project_runners"]' do
- first('[data-testid="edit-runner-link"]').click
- end
-
- expect(page.find_field('runner[run_untagged]')).to be_checked
+ context 'when a shared runner is activated on the project' do
+ let!(:shared_runner) { create(:ci_runner, :instance) }
- uncheck 'runner_run_untagged'
- click_button 'Save changes'
+ it 'user sees CI/CD setting page' do
+ visit project_runners_path(project)
- expect(page).to have_content 'Can run untagged jobs No'
+ within '[data-testid="available-shared-runners"]' do
+ expect(page).to have_content(shared_runner.display_name)
end
end
- context 'when a shared runner is activated on the project' do
- let!(:shared_runner) { create(:ci_runner, :instance) }
+ context 'when multiple shared runners are configured' do
+ let_it_be(:shared_runner_2) { create(:ci_runner, :instance) }
- it 'user sees CI/CD setting page' do
+ it 'shows the runner count' do
visit project_runners_path(project)
within '[data-testid="available-shared-runners"]' do
- expect(page).to have_content(shared_runner.display_name)
+ expect(page).to have_content format(_('Available shared runners: %{count}'), { count: 2 })
end
end
- context 'when multiple shared runners are configured' do
- let_it_be(:shared_runner_2) { create(:ci_runner, :instance) }
+ it 'adds pagination to the shared runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- it 'shows the runner count' do
- visit project_runners_path(project)
+ visit project_runners_path(project)
- within '[data-testid="available-shared-runners"]' do
- expect(page).to have_content format(_('Available shared runners: %{count}'), { count: 2 })
- end
+ within '[data-testid="available-shared-runners"]' do
+ expect(find('.pagination')).not_to be_nil
end
+ end
+ end
+ end
- it 'adds pagination to the shared runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+ context 'when multiple project runners are configured' do
+ let!(:project_runner_2) { create(:ci_runner, :project, projects: [project]) }
- visit project_runners_path(project)
+ it 'adds pagination to the runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- within '[data-testid="available-shared-runners"]' do
- expect(find('.pagination')).not_to be_nil
- end
- end
- end
+ visit project_runners_path(project)
+
+ expect(find('.pagination')).not_to be_nil
end
+ end
+ end
- context 'when multiple project runners are configured' do
- let!(:project_runner_2) { create(:ci_runner, :project, projects: [project]) }
+ context 'when a project runner exists in another project' do
+ let(:another_project) { create(:project) }
+ let!(:project_runner) { create(:ci_runner, :project, projects: [another_project]) }
- it 'adds pagination to the runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+ before do
+ another_project.add_maintainer(user)
+ end
- visit project_runners_path(project)
+ it 'user enables and disables a project runner' do
+ visit project_runners_path(project)
- expect(find('.pagination')).not_to be_nil
- end
+ within '[data-testid="available_project_runners"]' do
+ click_on 'Enable for this project'
+ end
+
+ expect(page.find('[data-testid="assigned_project_runners"]')).to have_content(project_runner.display_name)
+
+ within '[data-testid="assigned_project_runners"]' do
+ click_on 'Disable for this project'
end
+
+ expect(page.find('[data-testid="available_project_runners"]')).to have_content(project_runner.display_name)
end
+ end
- context 'when a project runner exists in another project' do
- let(:another_project) { create(:project) }
- let!(:project_runner) { create(:ci_runner, :project, projects: [another_project]) }
+ context 'shared runner text' do
+ context 'when application settings have shared_runners_text' do
+ let(:shared_runners_text) { 'custom **shared** runners description' }
+ let(:shared_runners_html) { 'custom shared runners description' }
before do
- another_project.add_maintainer(user)
+ stub_application_setting(shared_runners_text: shared_runners_text)
end
- it 'user enables and disables a project runner' do
+ it 'user sees shared runners description' do
visit project_runners_path(project)
- within '[data-testid="available_project_runners"]' do
- click_on 'Enable for this project'
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).not_to have_content('The same shared runner executes code from multiple projects')
+ expect(page).to have_content(shared_runners_html)
end
-
- expect(page.find('[data-testid="assigned_project_runners"]')).to have_content(project_runner.display_name)
-
- within '[data-testid="assigned_project_runners"]' do
- click_on 'Disable for this project'
- end
-
- expect(page.find('[data-testid="available_project_runners"]')).to have_content(project_runner.display_name)
end
end
- context 'shared runner text' do
- context 'when application settings have shared_runners_text' do
- let(:shared_runners_text) { 'custom **shared** runners description' }
- let(:shared_runners_html) { 'custom shared runners description' }
+ context 'when application settings have an unsafe link in shared_runners_text' do
+ let(:shared_runners_text) { '<a href="javascript:alert(\'xss\')">link</a>' }
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
- end
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- it 'user sees shared runners description' do
- visit project_runners_path(project)
+ it 'user sees no link' do
+ visit project_runners_path(project)
- page.within("[data-testid='shared-runners-description']") do
- expect(page).not_to have_content('The same shared runner executes code from multiple projects')
- expect(page).to have_content(shared_runners_html)
- end
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_content('link')
+ expect(page).not_to have_link('link')
end
end
+ end
- context 'when application settings have an unsafe link in shared_runners_text' do
- let(:shared_runners_text) { '<a href="javascript:alert(\'xss\')">link</a>' }
+ context 'when application settings have an unsafe image in shared_runners_text' do
+ let(:shared_runners_text) { '<img src="404.png" onerror="alert(\'xss\')"/>' }
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
- end
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- it 'user sees no link' do
- visit project_runners_path(project)
+ it 'user sees image safely' do
+ visit project_runners_path(project)
- page.within("[data-testid='shared-runners-description']") do
- expect(page).to have_content('link')
- expect(page).not_to have_link('link')
- end
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_css('img')
+ expect(page).not_to have_css('img[onerror]')
end
end
+ end
+ end
+ end
- context 'when application settings have an unsafe image in shared_runners_text' do
- let(:shared_runners_text) { '<img src="404.png" onerror="alert(\'xss\')"/>' }
+ context 'enable shared runners in project settings', :js do
+ before do
+ project.add_maintainer(user)
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
- end
+ visit project_runners_path(project)
+ end
- it 'user sees image safely' do
- visit project_runners_path(project)
+ context 'when a project has enabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
- page.within("[data-testid='shared-runners-description']") do
- expect(page).to have_css('img')
- expect(page).not_to have_css('img[onerror]')
- end
- end
- end
+ it 'shared runners toggle is on' do
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"]')
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
end
end
- context 'enable shared runners in project settings', :js do
- before do
- project.add_maintainer(user)
+ context 'when a project has disabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
- visit project_runners_path(project)
+ it 'shared runners toggle is off' do
+ expect(page).not_to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
end
+ end
+ end
- context 'when a project has enabled shared_runners' do
- let(:project) { create(:project, shared_runners_enabled: true) }
+ context 'group runners in project settings' do
+ before do
+ project.add_maintainer(user)
+ end
- it 'shared runners toggle is on' do
- expect(page).to have_selector('[data-testid="toggle-shared-runners"]')
- expect(page).to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
- end
+ let_it_be(:group) { create :group }
+ let_it_be(:project) { create :project, group: group }
+
+ context 'as project and group maintainer' do
+ before do
+ group.add_maintainer(user)
end
- context 'when a project has disabled shared_runners' do
- let(:project) { create(:project, shared_runners_enabled: false) }
+ context 'project with a group but no group runner' do
+ it 'group runners are not available' do
+ visit project_runners_path(project)
- it 'shared runners toggle is off' do
- expect(page).not_to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).to have_content 'Ask your group owner to set up a group runner'
end
end
end
- context 'group runners in project settings' do
+ context 'as project maintainer and group owner' do
before do
- project.add_maintainer(user)
+ group.add_owner(user)
end
- let_it_be(:group) { create :group }
- let_it_be(:project) { create :project, group: group }
+ context 'project with a group but no group runner' do
+ it 'group runners are available' do
+ visit project_runners_path(project)
- context 'as project and group maintainer' do
- before do
- group.add_maintainer(user)
+ expect(page).to have_content 'This group does not have any group runners yet.'
+
+ expect(page).to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).not_to have_content 'Ask your group owner to set up a group runner'
end
+ end
+ end
- context 'project with a group but no group runner' do
- it 'group runners are not available' do
- visit project_runners_path(project)
+ context 'as project maintainer' do
+ context 'project without a group' do
+ let(:project) { create :project }
- expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).to have_content 'Ask your group owner to set up a group runner'
- end
+ it 'group runners are not available' do
+ visit project_runners_path(project)
+
+ expect(page).to have_content 'This project does not belong to a group and cannot make use of group runners.'
end
end
- context 'as project maintainer and group owner' do
- before do
- group.add_owner(user)
- end
+ context 'with group project' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
context 'project with a group but no group runner' do
- it 'group runners are available' do
+ it 'group runners are not available' do
visit project_runners_path(project)
expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).not_to have_content 'Ask your group owner to set up a group runner'
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).to have_content 'Ask your group owner to set up a group runner.'
end
end
- end
- context 'as project maintainer' do
- context 'project without a group' do
- let(:project) { create :project }
+ context 'project with a group and a group runner' do
+ let_it_be(:group_runner) do
+ create(:ci_runner, :group, groups: [group], description: 'group-runner')
+ end
- it 'group runners are not available' do
+ it 'group runners are available' do
visit project_runners_path(project)
- expect(page).to have_content 'This project does not belong to a group and cannot make use of group runners.'
+ expect(page).to have_content 'Available group runners: 1'
+ expect(page).to have_content 'group-runner'
end
- end
- context 'with group project' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
+ it 'group runners may be disabled for a project' do
+ visit project_runners_path(project)
- context 'project with a group but no group runner' do
- it 'group runners are not available' do
- visit project_runners_path(project)
+ click_on 'Disable group runners'
- expect(page).to have_content 'This group does not have any group runners yet.'
+ expect(page).to have_content 'Enable group runners'
+ expect(project.reload.group_runners_enabled).to be false
- expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).to have_content 'Ask your group owner to set up a group runner.'
- end
- end
+ click_on 'Enable group runners'
- context 'project with a group and a group runner' do
- let_it_be(:group_runner) do
- create(:ci_runner, :group, groups: [group], description: 'group-runner')
- end
+ expect(page).to have_content 'Disable group runners'
+ expect(project.reload.group_runners_enabled).to be true
+ end
- it 'group runners are available' do
- visit project_runners_path(project)
+ context 'when multiple group runners are configured' do
+ let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group]) }
- expect(page).to have_content 'Available group runners: 1'
- expect(page).to have_content 'group-runner'
- end
-
- it 'group runners may be disabled for a project' do
+ it 'shows the runner count' do
visit project_runners_path(project)
- click_on 'Disable group runners'
-
- expect(page).to have_content 'Enable group runners'
- expect(project.reload.group_runners_enabled).to be false
-
- click_on 'Enable group runners'
-
- expect(page).to have_content 'Disable group runners'
- expect(project.reload.group_runners_enabled).to be true
- end
-
- context 'when multiple group runners are configured' do
- let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group]) }
-
- it 'shows the runner count' do
- visit project_runners_path(project)
-
- within '[data-testid="group-runners"]' do
- expect(page).to have_content format(_('Available group runners: %{runners}'), { runners: 2 })
- end
+ within '[data-testid="group-runners"]' do
+ expect(page).to have_content format(_('Available group runners: %{runners}'), { runners: 2 })
end
+ end
- it 'adds pagination to the group runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+ it 'adds pagination to the group runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- visit project_runners_path(project)
+ visit project_runners_path(project)
- within '[data-testid="group-runners"]' do
- expect(find('.pagination')).not_to be_nil
- end
+ within '[data-testid="group-runners"]' do
+ expect(find('.pagination')).not_to be_nil
end
end
end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 976324a5032..d2847203669 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
using RSpec::Parameterized::TableSyntax
include ListboxHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be_with_reload(:project) { create(:project, :repository, namespace: user.namespace) }
context 'when signed in' do
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
index f7af1797c71..f47e692c652 100644
--- a/spec/features/search/user_searches_for_comments_spec.rb
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'User searches for comments', :js, :disable_rate_limiter, feature_category: :global_search do
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
project.add_reporter(user)
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index 724daf9277d..140d8763813 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User searches for commits', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :repository) }
let(:sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index 9451e337db1..d816b393cce 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let!(:issue1) { create(:issue, title: 'issue Foo', project: project, created_at: 1.hour.ago) }
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index d7b52d9e07a..61af5e86eea 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:merge_request1) { create(:merge_request, title: 'Merge Request Foo', source_project: project, target_project: project, created_at: 1.hour.ago) }
let_it_be(:merge_request2) { create(:merge_request, :simple, title: 'Merge Request Bar', source_project: project, target_project: project) }
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 7ca7958f61b..ad62c8eb3da 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_limiting,
feature_category: :global_search do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:milestone1) { create(:milestone, title: 'Foo', project: project) }
let_it_be(:milestone2) { create(:milestone, title: 'Bar', project: project) }
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index 48a94161927..51e5ad85e2b 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature
context 'when signed out' do
context 'when block_anonymous_global_searches is disabled' do
before do
- stub_feature_flags(block_anonymous_global_searches: false)
+ stub_feature_flags(block_anonymous_global_searches: false, super_sidebar_logged_out: false)
end
include_examples 'top right search form'
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
index e0a07c5103d..b52f6aeba68 100644
--- a/spec/features/search/user_searches_for_users_spec.rb
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'User searches for users', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
- let_it_be(:user1) { create(:user, username: 'gob_bluth', name: 'Gob Bluth') }
- let_it_be(:user2) { create(:user, username: 'michael_bluth', name: 'Michael Bluth') }
- let_it_be(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') }
+ let_it_be(:user1) { create(:user, :no_super_sidebar, username: 'gob_bluth', name: 'Gob Bluth') }
+ let_it_be(:user2) { create(:user, :no_super_sidebar, username: 'michael_bluth', name: 'Michael Bluth') }
+ let_it_be(:user3) { create(:user, :no_super_sidebar, username: 'gob_2018', name: 'George Oscar Bluth') }
before do
sign_in(user1)
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 65f262075f9..a5b63243d0b 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_limiting,
feature_category: :global_search do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
let_it_be(:wiki_page) do
create(:wiki_page, wiki: project.wiki, title: 'directory/title', content: 'Some Wiki content')
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 71d0f8d6d7f..3f2a71b63dc 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'User uses header search field', :js, :disable_rate_limiter, feat
include FilteredSearchHelpers
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:reporter) { create(:user) }
- let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user, :no_super_sidebar) }
+ let_it_be(:developer) { create(:user, :no_super_sidebar) }
let(:user) { reporter }
diff --git a/spec/features/sentry_js_spec.rb b/spec/features/sentry_js_spec.rb
index d3880011914..0cf32864b1e 100644
--- a/spec/features/sentry_js_spec.rb
+++ b/spec/features/sentry_js_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe 'Sentry', feature_category: :error_tracking do
it 'loads sentry if sentry settings are enabled', :js do
allow(Gitlab::CurrentSettings).to receive(:sentry_enabled).and_return(true)
+ allow(Gitlab::CurrentSettings).to receive(:sentry_clientside_dsn).and_return('https://mockdsn@example.com/1')
visit new_user_session_path
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index 0268c8ad0d4..08d2d0575eb 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe 'GPG signed commits', feature_category: :source_code_management do
- let(:project) { create(:project, :public, :repository) }
+RSpec.describe 'GPG signed commits', :js, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :public, :repository) }
it 'changes from unverified to verified when the user changes their email to match the gpg key', :sidekiq_might_not_need_inline do
ref = GpgHelpers::SIGNED_AND_AUTHORED_SHA
@@ -47,7 +47,7 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
expect(page).to have_selector('.gl-badge', text: 'Verified')
end
- context 'shows popover badges', :js do
+ context 'shows popover badges' do
let(:user_1) do
create :user, email: GpgHelpers::User1.emails.first, username: 'nannie.bernhard', name: 'Nannie Bernhard'
end
@@ -163,7 +163,7 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
end
end
- context 'view signed commit on the tree view', :js do
+ context 'view signed commit on the tree view' do
shared_examples 'a commit with a signature' do
before do
visit project_tree_path(project, 'signed-commits')
diff --git a/spec/features/snippets/search_snippets_spec.rb b/spec/features/snippets/search_snippets_spec.rb
index afb53c563de..7a07299a14f 100644
--- a/spec/features/snippets/search_snippets_spec.rb
+++ b/spec/features/snippets/search_snippets_spec.rb
@@ -4,10 +4,11 @@ require 'spec_helper'
RSpec.describe 'Search Snippets', :js, feature_category: :global_search do
it 'user searches for snippets by title' do
+ user = create(:user, :no_super_sidebar)
public_snippet = create(:personal_snippet, :public, title: 'Beginning and Middle')
- private_snippet = create(:personal_snippet, :private, title: 'Middle and End')
+ private_snippet = create(:personal_snippet, :private, title: 'Middle and End', author: user)
- sign_in private_snippet.author
+ sign_in user
visit dashboard_snippets_path
submit_search('Middle')
diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb
index 2673ad5e1d7..bbb120edb80 100644
--- a/spec/features/snippets/show_spec.rb
+++ b/spec/features/snippets/show_spec.rb
@@ -3,9 +3,13 @@
require 'spec_helper'
RSpec.describe 'Snippet', :js, feature_category: :source_code_management do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let_it_be(:snippet) { create(:personal_snippet, :public, :repository, author: user) }
+ before do
+ stub_feature_flags(super_sidebar_logged_out: false)
+ end
+
it_behaves_like 'show and render proper snippet blob' do
let(:anchor) { nil }
@@ -36,7 +40,7 @@ RSpec.describe 'Snippet', :js, feature_category: :source_code_management do
end
context 'when authenticated as a different user' do
- let_it_be(:different_user) { create(:user) }
+ let_it_be(:different_user) { create(:user, :no_super_sidebar) }
before do
sign_in(different_user)
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index 090d854081a..341cc150a64 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User creates snippet', :js, feature_category: :source_code_manag
include DropzoneHelper
include Features::SnippetSpecHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
let(:title) { 'My Snippet Title' }
let(:file_content) { 'Hello World!' }
@@ -130,7 +130,7 @@ RSpec.describe 'User creates snippet', :js, feature_category: :source_code_manag
expect(page).not_to have_content(files_validation_message)
end
- it 'previews a snippet with file', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408203' do
+ it 'previews a snippet with file' do
# Click placeholder first to expand full description field
snippet_fill_in_description('My Snippet')
dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
@@ -145,7 +145,11 @@ RSpec.describe 'User creates snippet', :js, feature_category: :source_code_manag
# Adds a cache buster for checking if the image exists as Selenium is now handling the cached requests
# not anymore as requests when they come straight from memory cache.
# accept_confirm is needed because of https://gitlab.com/gitlab-org/gitlab/-/issues/262102
- reqs = inspect_requests { accept_confirm { visit("#{link}?ran=#{SecureRandom.base64(20)}") } }
+ reqs = inspect_requests do
+ visit("#{link}?ran=#{SecureRandom.base64(20)}") do
+ page.driver.browser.switch_to.alert.accept
+ end
+ end
expect(reqs.first.status_code).to eq(200)
end
end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index beadeab1736..24d63cadf00 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Task Lists', :js, feature_category: :team_planning do
include Warden::Test::Helpers
let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:user2) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:user2) { create(:user, :no_super_sidebar) }
let(:markdown) do
<<-MARKDOWN.strip_heredoc
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 77ef3df97f6..b78efa65888 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Unsubscribe links', :sidekiq_inline, feature_category: :shared d
include Warden::Test::Helpers
let_it_be(:project) { create(:project, :public) }
- let_it_be(:author) { create(:user).tap { |u| project.add_reporter(u) } }
- let_it_be(:recipient) { create(:user) }
+ let_it_be(:author) { create(:user, :no_super_sidebar).tap { |u| project.add_reporter(u) } }
+ let_it_be(:recipient) { create(:user, :no_super_sidebar) }
let(:params) { { title: 'A bug!', description: 'Fix it!', assignee_ids: [recipient.id] } }
let(:issue) { Issues::CreateService.new(container: project, current_user: author, params: params).execute[:issue] }
@@ -22,6 +22,10 @@ RSpec.describe 'Unsubscribe links', :sidekiq_inline, feature_category: :shared d
end
context 'when logged out' do
+ before do
+ stub_feature_flags(super_sidebar_logged_out: false)
+ end
+
context 'when visiting the link from the body' do
it 'shows the unsubscribe confirmation page and redirects to root path when confirming' do
visit body_link
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index cd181f73473..5de544e866e 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User uploads avatar to profile', feature_category: :user_profile do
- let!(:user) { create(:user) }
+ let!(:user) { create(:user, :no_super_sidebar) }
let(:avatar_file_path) { Rails.root.join('spec', 'fixtures', 'dk.png') }
shared_examples 'upload avatar' do
diff --git a/spec/features/usage_stats_consent_spec.rb b/spec/features/usage_stats_consent_spec.rb
index c446fe1531b..92f7a944007 100644
--- a/spec/features/usage_stats_consent_spec.rb
+++ b/spec/features/usage_stats_consent_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Usage stats consent', feature_category: :service_ping do
context 'when signed in' do
- let(:user) { create(:admin, created_at: 8.days.ago) }
+ let(:user) { create(:admin, :no_super_sidebar, created_at: 8.days.ago) }
let(:message) { 'To help improve GitLab, we would like to periodically collect usage information.' }
before do
diff --git a/spec/features/users/active_sessions_spec.rb b/spec/features/users/active_sessions_spec.rb
index 53a4c8a91e9..663d2283dbd 100644
--- a/spec/features/users/active_sessions_spec.rb
+++ b/spec/features/users/active_sessions_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions, feature_category: :system_access do
it 'successful login adds a new active user login' do
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
now = Time.zone.parse('2018-03-12 09:06')
travel_to(now) do
@@ -31,7 +31,7 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions, feature_cat
end
it 'successful login cleans up obsolete entries' do
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
Gitlab::Redis::Sessions.with do |redis|
redis.sadd?("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d')
@@ -45,7 +45,7 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions, feature_cat
end
it 'sessionless login does not clean up obsolete entries' do
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
personal_access_token = create(:personal_access_token, user: user)
Gitlab::Redis::Sessions.with do |redis|
@@ -61,7 +61,7 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions, feature_cat
end
it 'logout deletes the active user login' do
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
gitlab_sign_in(user)
expect(page).to have_current_path root_path, ignore_query: true
diff --git a/spec/features/users/anonymous_sessions_spec.rb b/spec/features/users/anonymous_sessions_spec.rb
index 83473964d6b..368f272ba23 100644
--- a/spec/features/users/anonymous_sessions_spec.rb
+++ b/spec/features/users/anonymous_sessions_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Session TTLs', :clean_gitlab_redis_shared_state, feature_categor
end
it 'increases the TTL when the login succeeds' do
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
gitlab_sign_in(user)
expect(page).to have_content(user.name)
diff --git a/spec/features/users/email_verification_on_login_spec.rb b/spec/features/users/email_verification_on_login_spec.rb
index 7675de28f86..d83040efd72 100644
--- a/spec/features/users/email_verification_on_login_spec.rb
+++ b/spec/features/users/email_verification_on_login_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting, :js, feature_category: :system_access do
include EmailHelpers
- let_it_be_with_reload(:user) { create(:user) }
- let_it_be(:another_user) { create(:user) }
+ let_it_be_with_reload(:user) { create(:user, :no_super_sidebar) }
+ let_it_be(:another_user) { create(:user, :no_super_sidebar) }
let_it_be(:new_email) { build_stubbed(:user).email }
let(:require_email_verification_enabled) { user }
@@ -220,7 +220,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
shared_examples 'no email verification required when 2fa enabled or ff disabled' do
context 'when 2FA is enabled' do
- let_it_be(:user) { create(:user, :two_factor) }
+ let_it_be(:user) { create(:user, :no_super_sidebar, :two_factor) }
it_behaves_like 'no email verification required', two_factor_auth: true
end
@@ -234,8 +234,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
describe 'when failing to login the maximum allowed number of times' do
before do
- # See comment in RequireEmailVerification::MAXIMUM_ATTEMPTS on why this is divided by 2
- (RequireEmailVerification::MAXIMUM_ATTEMPTS / 2).times do
+ RequireEmailVerification::MAXIMUM_ATTEMPTS.times do
gitlab_sign_in(user, password: 'wrong_password')
end
end
@@ -345,7 +344,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting,
before do
perform_enqueued_jobs do
- (User.maximum_attempts / 2).times do
+ User.maximum_attempts.times do
gitlab_sign_in(user, password: 'wrong_password')
end
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 047590fb3aa..c07e419be1f 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
- user = create(:user)
+ user = create(:user, :no_super_sidebar)
expect(user.reset_password_token).to be_nil
@@ -43,7 +43,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
# This behavior is dependent on there only being one user
User.delete_all
- user = create(:admin, password_automatically_set: true)
+ user = create(:admin, :no_super_sidebar, password_automatically_set: true)
visit root_path
expect(page).to have_current_path edit_user_password_path, ignore_query: true
@@ -77,7 +77,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
- user = create(:user, :blocked)
+ user = create(:user, :no_super_sidebar, :blocked)
gitlab_sign_in(user)
@@ -90,14 +90,14 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
- user = create(:user, :blocked)
+ user = create(:user, :no_super_sidebar, :blocked)
expect { gitlab_sign_in(user) }.not_to change { user.reload.sign_in_count }
end
end
describe 'with an unconfirmed email address' do
- let!(:user) { create(:user, confirmed_at: nil) }
+ let!(:user) { create(:user, :no_super_sidebar, confirmed_at: nil) }
let(:grace_period) { 2.days }
let(:alert_title) { 'Please confirm your email address' }
let(:alert_message) { "To continue, you need to select the link in the confirmation email we sent to verify your email address. If you didn't get our email, select Resend confirmation email" }
@@ -141,7 +141,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'when resending the confirmation email' do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
it 'redirects to the "almost there" page' do
visit new_user_confirmation_path
@@ -154,7 +154,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
describe 'with a disallowed password' do
- let(:user) { create(:user, :disallowed_password) }
+ let(:user) { create(:user, :no_super_sidebar, :disallowed_password) }
before do
expect(authentication_metrics)
@@ -180,7 +180,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
- gitlab_sign_in(User.ghost)
+ gitlab_sign_in(Users::Internal.ghost)
expect(page).to have_content('Invalid login or password.')
end
@@ -190,8 +190,8 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
- expect { gitlab_sign_in(User.ghost) }
- .not_to change { User.ghost.reload.sign_in_count }
+ expect { gitlab_sign_in(Users::Internal.ghost) }
+ .not_to change { Users::Internal.ghost.reload.sign_in_count }
end
end
@@ -286,6 +286,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
enter_code(code, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled)
expect(page).to have_content('Invalid two-factor code.')
+ expect(user.reload.failed_attempts).to eq(1)
end
end
end
@@ -294,7 +295,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
# Freeze time to prevent failures when time between code being entered and
# validated greater than otp_allowed_drift
context 'with valid username/password', :freeze_time do
- let(:user) { create(:user, :two_factor) }
+ let(:user) { create(:user, :no_super_sidebar, :two_factor) }
before do
gitlab_sign_in(user, remember: true)
@@ -371,13 +372,13 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'when user with TOTP enabled' do
- let(:user) { create(:user, :two_factor) }
+ let(:user) { create(:user, :no_super_sidebar, :two_factor) }
include_examples 'can login with recovery codes'
end
context 'when user with only Webauthn enabled' do
- let(:user) { create(:user, :two_factor_via_webauthn, registrations_count: 1) }
+ let(:user) { create(:user, :no_super_sidebar, :two_factor_via_webauthn, registrations_count: 1) }
include_examples 'can login with recovery codes', only_two_factor_webauthn_enabled: true
end
@@ -468,6 +469,12 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
describe 'without two-factor authentication' do
+ it 'renders sign in text for providers' do
+ visit new_user_session_path
+
+ expect(page).to have_content(_('or sign in with'))
+ end
+
it 'displays the remember me checkbox' do
visit new_user_session_path
@@ -487,7 +494,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'with correct username and password' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
it 'allows basic login' do
expect(authentication_metrics)
@@ -576,8 +583,8 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
end
- context 'with invalid username and password' do
- let(:user) { create(:user) }
+ context 'with correct username and invalid password' do
+ let(:user) { create(:user, :no_super_sidebar) }
it 'blocks invalid login' do
expect(authentication_metrics)
@@ -588,12 +595,13 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
expect_single_session_with_short_ttl
expect(page).to have_content('Invalid login or password.')
+ expect(user.reload.failed_attempts).to eq(1)
end
end
end
describe 'with required two-factor authentication enabled' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
# TODO: otp_grace_period_started_at
@@ -631,7 +639,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'after the grace period' do
- let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
+ let(:user) { create(:user, :no_super_sidebar, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
@@ -720,7 +728,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'after the grace period' do
- let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
+ let(:user) { create(:user, :no_super_sidebar, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
@@ -911,7 +919,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'when terms are enforced', :js do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
enforce_terms
@@ -1082,7 +1090,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
end
context 'when sending confirmation email and not yet confirmed' do
- let!(:user) { create(:user, confirmed_at: nil) }
+ let!(:user) { create(:user, :no_super_sidebar, confirmed_at: nil) }
let(:grace_period) { 2.days }
let(:alert_title) { 'Please confirm your email address' }
let(:alert_message) { "To continue, you need to select the link in the confirmation email we sent to verify your email address. If you didn't get our email, select Resend confirmation email" }
diff --git a/spec/features/users/logout_spec.rb b/spec/features/users/logout_spec.rb
index c9839247e7d..d0e5be8dca3 100644
--- a/spec/features/users/logout_spec.rb
+++ b/spec/features/users/logout_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Logout/Sign out', :js, feature_category: :system_access do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/users/overview_spec.rb b/spec/features/users/overview_spec.rb
index fdd0c38a718..d1ff60b6069 100644
--- a/spec/features/users/overview_spec.rb
+++ b/spec/features/users/overview_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Overview tab on a user profile', :js, feature_category: :user_profile do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
let(:contributed_project) { create(:project, :public, :repository) }
def push_code_contribution
diff --git a/spec/features/users/rss_spec.rb b/spec/features/users/rss_spec.rb
index 2db58ce04a1..99451ac472d 100644
--- a/spec/features/users/rss_spec.rb
+++ b/spec/features/users/rss_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'User RSS', feature_category: :user_profile do
- let(:user) { create(:user) }
- let(:path) { user_path(create(:user)) }
+ let(:user) { create(:user, :no_super_sidebar) }
+ let(:path) { user_path(create(:user, :no_super_sidebar)) }
describe 'with "user_profile_overflow_menu_vue" feature flag off' do
before do
@@ -22,6 +22,7 @@ RSpec.describe 'User RSS', feature_category: :user_profile do
context 'when signed out' do
before do
+ stub_feature_flags(super_sidebar_logged_out: false)
visit path
end
@@ -45,6 +46,7 @@ RSpec.describe 'User RSS', feature_category: :user_profile do
context 'when signed out' do
before do
+ stub_feature_flags(super_sidebar_logged_out: false)
visit path
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index f8653b22377..522eb12f507 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe 'User page', feature_category: :user_profile do
let_it_be(:user) { create(:user, bio: '<b>Lorem</b> <i>ipsum</i> dolor sit <a href="https://example.com">amet</a>') }
+ before do
+ stub_feature_flags(super_sidebar_logged_out: false)
+ end
+
subject(:visit_profile) { visit(user_path(user)) }
context 'with "user_profile_overflow_menu_vue" feature flag enabled', :js do
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 450b9fa46b1..111c0cce1b1 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -234,7 +234,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
confirm_email
- expect(find_field('Username or email').value).to eq(new_user.email)
+ expect(find_field('Username or primary email').value).to eq(new_user.email)
end
end
@@ -332,7 +332,6 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
click_button 'Register'
expect(page).to have_current_path(users_sign_up_welcome_path), ignore_query: true
- visit new_project_path
select 'Software Developer', from: 'user_role'
click_button 'Get started!'
@@ -341,7 +340,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
expect(created_user.software_developer_role?).to be_truthy
expect(created_user.setup_for_company).to be_nil
- expect(page).to have_current_path(new_project_path)
+ expect(page).to have_current_path(dashboard_projects_path)
end
it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
@@ -388,7 +387,7 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
end
end
- it 'redirects to step 2 of the signup process, sets the role and redirects back' do
+ it 'allows visiting of a page after initial registration' do
visit new_user_registration_path
fill_in_signup_form
@@ -397,15 +396,6 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
visit new_project_path
- expect(page).to have_current_path(users_sign_up_welcome_path)
-
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
-
- created_user = User.find_by_username(new_user.username)
-
- expect(created_user.software_developer_role?).to be_truthy
- expect(created_user.setup_for_company).to be_nil
expect(page).to have_current_path(new_project_path)
end
diff --git a/spec/features/users/snippets_spec.rb b/spec/features/users/snippets_spec.rb
index 2876351be37..98ac9fa5f92 100644
--- a/spec/features/users/snippets_spec.rb
+++ b/spec/features/users/snippets_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe 'Snippets tab on a user profile', :js, feature_category: :source_code_management do
context 'when the user has snippets' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
- stub_feature_flags(profile_tabs_vue: false)
+ stub_feature_flags(profile_tabs_vue: false, super_sidebar_logged_out: false)
end
context 'pagination' do
@@ -30,7 +30,7 @@ RSpec.describe 'Snippets tab on a user profile', :js, feature_category: :source_
let!(:other_snippet) { create(:snippet, :public) }
it 'contains only internal and public snippets of a user when a user is logged in' do
- sign_in(create(:user))
+ sign_in(create(:user, :no_super_sidebar))
visit user_path(user)
page.within('.user-profile-nav') { click_link 'Snippets' }
wait_for_requests
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index cf62ccaf999..3495af3ae85 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Users > Terms', :js, feature_category: :user_profile do
end
context 'when user is a project bot' do
- let(:project_bot) { create(:user, :project_bot) }
+ let(:project_bot) { create(:user, :no_super_sidebar, :project_bot) }
before do
enforce_terms
@@ -42,7 +42,7 @@ RSpec.describe 'Users > Terms', :js, feature_category: :user_profile do
end
context 'when user is a service account' do
- let(:service_account) { create(:user, :service_account) }
+ let(:service_account) { create(:user, :no_super_sidebar, :service_account) }
before do
enforce_terms
@@ -57,7 +57,7 @@ RSpec.describe 'Users > Terms', :js, feature_category: :user_profile do
end
context 'when signed in' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
sign_in(user)
diff --git a/spec/features/users/user_browses_projects_on_user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb
index 8bdc09f3f87..5e047192e7b 100644
--- a/spec/features/users/user_browses_projects_on_user_page_spec.rb
+++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Users > User browses projects on user page', :js, feature_category: :groups_and_projects do
- let!(:user) { create :user }
+ let!(:user) { create(:user, :no_super_sidebar) }
let!(:private_project) do
create :project, :private, name: 'private', namespace: user.namespace do |project|
project.add_maintainer(user)
@@ -29,7 +29,7 @@ RSpec.describe 'Users > User browses projects on user page', :js, feature_catego
end
before do
- stub_feature_flags(profile_tabs_vue: false)
+ stub_feature_flags(profile_tabs_vue: false, super_sidebar_logged_out: false)
end
it 'hides loading spinner after load', :js do
@@ -87,7 +87,7 @@ RSpec.describe 'Users > User browses projects on user page', :js, feature_catego
end
context 'when signed in as another user' do
- let(:another_user) { create :user }
+ let(:another_user) { create(:user, :no_super_sidebar) }
before do
sign_in(another_user)
diff --git a/spec/features/webauthn_spec.rb b/spec/features/webauthn_spec.rb
index 5c42facfa8b..52e2b375187 100644
--- a/spec/features/webauthn_spec.rb
+++ b/spec/features/webauthn_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
# TODO: it_behaves_like 'hardware device for 2fa', 'WebAuthn'
describe 'registration' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
gitlab_sign_in(user)
@@ -58,7 +58,8 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
gitlab_sign_out
# Second user
- user = gitlab_sign_in(:user)
+ user = create(:user, :no_super_sidebar)
+ gitlab_sign_in(user)
visit profile_account_path
enable_two_factor_authentication
webauthn_device_registration(webauthn_device: webauthn_device, name: 'My other device', password: user.password)
@@ -125,7 +126,7 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
it_behaves_like 'hardware device for 2fa', 'WebAuthn'
describe 'registration' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
gitlab_sign_in(user)
@@ -160,7 +161,8 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
gitlab_sign_out
# Second user
- user = gitlab_sign_in(:user)
+ user = create(:user, :no_super_sidebar)
+ gitlab_sign_in(user)
user.update_attribute(:otp_required_for_login, true)
visit profile_account_path
manage_two_factor_authentication
@@ -225,7 +227,7 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
describe 'authentication' do
let(:otp_required_for_login) { true }
- let(:user) { create(:user, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) }
+ let(:user) { create(:user, :no_super_sidebar, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) }
let!(:webauthn_device) do
add_webauthn_device(app_id, user)
end
@@ -254,7 +256,7 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
describe 'when a given WebAuthn device has already been registered by another user' do
describe 'but not the current user' do
- let(:other_user) { create(:user, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) }
+ let(:other_user) { create(:user, :no_super_sidebar, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) }
it 'does not allow logging in with that particular device' do
# Register other user with a different WebAuthn device
@@ -275,7 +277,8 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_categor
it "allows logging in with that particular device" do
pending("support for passing credential options in FakeClient")
# Register current user with the same WebAuthn device
- current_user = gitlab_sign_in(:user)
+ current_user = create(:user, :no_super_sidebar)
+ gitlab_sign_in(current_user)
visit profile_account_path
manage_two_factor_authentication
register_webauthn_device(webauthn_device)
diff --git a/spec/features/whats_new_spec.rb b/spec/features/whats_new_spec.rb
index 3668d90f2e9..c8bcf5f6ef0 100644
--- a/spec/features/whats_new_spec.rb
+++ b/spec/features/whats_new_spec.rb
@@ -3,9 +3,13 @@
require "spec_helper"
RSpec.describe "renders a `whats new` dropdown item", feature_category: :onboarding do
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
context 'when not logged in' do
+ before do
+ stub_feature_flags(super_sidebar_logged_out: false)
+ end
+
it 'and on SaaS it renders', :saas do
visit user_path(user)
diff --git a/spec/finders/abuse_reports_finder_spec.rb b/spec/finders/abuse_reports_finder_spec.rb
index 0b641d0cb08..c3cf84d082f 100644
--- a/spec/finders/abuse_reports_finder_spec.rb
+++ b/spec/finders/abuse_reports_finder_spec.rb
@@ -17,17 +17,21 @@ RSpec.describe AbuseReportsFinder, feature_category: :insider_threat do
create(:abuse_report, :closed, category: 'phishing', user: user_2, reporter: reporter_2, id: 2)
end
- let(:params) { {} }
-
subject(:finder) { described_class.new(params).execute }
describe '#execute' do
- context 'when params is empty' do
+ shared_examples 'returns all abuse reports' do
it 'returns all abuse reports' do
expect(finder).to match_array([abuse_report_1, abuse_report_2])
end
end
+ context 'when params is empty' do
+ let(:params) { {} }
+
+ it_behaves_like 'returns all abuse reports'
+ end
+
shared_examples 'returns filtered reports' do |filter_field|
it "returns abuse reports filtered by #{filter_field}_id" do
expect(finder).to match_array(filtered_reports)
@@ -41,9 +45,7 @@ RSpec.describe AbuseReportsFinder, feature_category: :insider_threat do
.and_return(nil)
end
- it 'returns all abuse reports' do
- expect(finder).to match_array([abuse_report_1, abuse_report_2])
- end
+ it_behaves_like 'returns all abuse reports'
end
end
@@ -169,39 +171,5 @@ RSpec.describe AbuseReportsFinder, feature_category: :insider_threat do
end
end
end
-
- context 'when legacy view is enabled' do
- before do
- stub_feature_flags(abuse_reports_list: false)
- end
-
- context 'when params is empty' do
- it 'returns all abuse reports' do
- expect(subject).to match_array([abuse_report_1, abuse_report_2])
- end
- end
-
- context 'when params[:user_id] is present' do
- let(:params) { { user_id: user_1 } }
-
- it 'returns abuse reports for the specified user' do
- expect(subject).to match_array([abuse_report_1])
- end
- end
-
- context 'when sorting' do
- it 'returns reports sorted by id in descending order' do
- expect(subject).to match_array([abuse_report_2, abuse_report_1])
- end
- end
-
- context 'when any of the new filters are present such as params[:status]' do
- let(:params) { { status: 'open' } }
-
- it 'returns all abuse reports' do
- expect(subject).to match_array([abuse_report_1, abuse_report_2])
- end
- end
- end
end
end
diff --git a/spec/finders/ci/jobs_finder_spec.rb b/spec/finders/ci/jobs_finder_spec.rb
index 0b3777a2fe8..57046baafab 100644
--- a/spec/finders/ci/jobs_finder_spec.rb
+++ b/spec/finders/ci/jobs_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::JobsFinder, '#execute' do
+RSpec.describe Ci::JobsFinder, '#execute', feature_category: :continuous_integration do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:project) { create(:project, :private, public_builds: false) }
@@ -13,8 +13,8 @@ RSpec.describe Ci::JobsFinder, '#execute' do
let(:params) { {} }
- context 'no project' do
- subject { described_class.new(current_user: current_user, params: params).execute }
+ context 'when project, pipeline, and runner are blank' do
+ subject(:finder_execute) { described_class.new(current_user: current_user, params: params).execute }
context 'with admin' do
let(:current_user) { admin }
@@ -34,43 +34,139 @@ RSpec.describe Ci::JobsFinder, '#execute' do
end
end
- context 'with normal user' do
- let(:current_user) { user }
+ context 'with admin and admin mode enabled', :enable_admin_mode do
+ let(:current_user) { admin }
- it { is_expected.to be_empty }
- end
+ context 'with param `scope`' do
+ using RSpec::Parameterized::TableSyntax
- context 'without user' do
- let(:current_user) { nil }
+ where(:scope, :expected_jobs) do
+ 'pending' | lazy { [pending_job] }
+ 'running' | lazy { [running_job] }
+ 'finished' | lazy { [successful_job] }
+ %w[running success] | lazy { [running_job, successful_job] }
+ end
- it { is_expected.to be_empty }
- end
+ with_them do
+ let(:params) { { scope: scope } }
- context 'with scope', :enable_admin_mode do
- let(:current_user) { admin }
- let(:jobs) { [pending_job, running_job, successful_job] }
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
- using RSpec::Parameterized::TableSyntax
+ context 'with param `runner_type`' do
+ let_it_be(:job_with_group_runner) { create(:ci_build, :success, runner: create(:ci_runner, :group)) }
+ let_it_be(:job_with_instance_runner) { create(:ci_build, :success, runner: create(:ci_runner, :instance)) }
+ let_it_be(:job_with_project_runner) { create(:ci_build, :success, runner: create(:ci_runner, :project)) }
+
+ context 'with feature flag :admin_jobs_filter_runner_type enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:runner_type, :expected_jobs) do
+ 'group_type' | lazy { [job_with_group_runner] }
+ 'instance_type' | lazy { [job_with_instance_runner] }
+ 'project_type' | lazy { [job_with_project_runner] }
+ %w[instance_type project_type] | lazy { [job_with_instance_runner, job_with_project_runner] }
+ end
+
+ with_them do
+ let(:params) { { runner_type: runner_type } }
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
- where(:scope, :expected_jobs) do
- 'pending' | lazy { [pending_job] }
- 'running' | lazy { [running_job] }
- 'finished' | lazy { [successful_job] }
- %w[running success] | lazy { [running_job, successful_job] }
+ context 'with feature flag :admin_jobs_filter_runner_type disabled' do
+ let(:params) { { runner_type: 'instance_type' } }
+ let(:expected_jobs) do
+ [
+ job_with_group_runner,
+ job_with_instance_runner,
+ job_with_project_runner,
+ pending_job,
+ running_job,
+ successful_job
+ ]
+ end
+
+ before do
+ stub_feature_flags(admin_jobs_filter_runner_type: false)
+ end
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
end
- with_them do
- let(:params) { { scope: scope } }
+ context "with params" do
+ let_it_be(:job_with_running_status_and_group_runner) do
+ create(:ci_build, :running, runner: create(:ci_runner, :group))
+ end
+
+ let_it_be(:job_with_instance_runner) { create(:ci_build, :success, runner: create(:ci_runner, :instance)) }
+ let_it_be(:job_with_project_runner) { create(:ci_build, :success, runner: create(:ci_runner, :project)) }
+
+ context 'with feature flag :admin_jobs_filter_runner_type enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_runner_type, :param_scope, :expected_jobs) do
+ 'group_type' | 'running' | lazy { [job_with_running_status_and_group_runner] }
+ %w[instance_type project_type] | 'finished' | lazy { [job_with_instance_runner, job_with_project_runner] }
+ %w[instance_type project_type] | 'pending' | lazy { [] }
+ end
+
+ with_them do
+ let(:params) { { runner_type: param_runner_type, scope: param_scope } }
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
- it { is_expected.to match_array(expected_jobs) }
+ context 'with feature flag :admin_jobs_filter_runner_type disabled' do
+ before do
+ stub_feature_flags(admin_jobs_filter_runner_type: false)
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_runner_type, :param_scope, :expected_jobs) do
+ 'group_type' | 'running' | lazy do
+ [job_with_running_status_and_group_runner, running_job]
+ end
+ %w[instance_type project_type] | 'finished' | lazy do
+ [
+ job_with_instance_runner,
+ job_with_project_runner,
+ successful_job
+ ]
+ end
+ %w[instance_type project_type] | 'pending' | lazy { [pending_job] }
+ end
+
+ with_them do
+ let(:params) { { runner_type: param_runner_type, scope: param_scope } }
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
end
end
+
+ context 'with user not being project member' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'without user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_empty }
+ end
end
- context 'a project is present' do
+ context 'when project is present' do
subject { described_class.new(current_user: user, project: project, params: params).execute }
- context 'user has access to the project' do
+ context 'with user being project maintainer' do
before do
project.add_maintainer(user)
end
@@ -78,9 +174,47 @@ RSpec.describe Ci::JobsFinder, '#execute' do
it 'returns jobs for the specified project' do
expect(subject).to match_array([successful_job])
end
+
+ context 'when artifacts are present for some jobs' do
+ let_it_be(:job_with_artifacts) { create(:ci_build, :success, pipeline: pipeline, name: 'test') }
+ let_it_be(:artifact) { create(:ci_job_artifact, job: job_with_artifacts) }
+
+ context 'when with_artifacts is true' do
+ let(:params) { { with_artifacts: true } }
+
+ it 'returns only jobs with artifacts' do
+ expect(subject).to match_array([job_with_artifacts])
+ end
+ end
+
+ context 'when with_artifacts is false' do
+ let(:params) { { with_artifacts: false } }
+
+ it 'returns all jobs' do
+ expect(subject).to match_array([successful_job, job_with_artifacts])
+ end
+ end
+
+ context "with param `scope" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_scope, :expected_jobs) do
+ 'success' | lazy { [successful_job, job_with_artifacts] }
+ '[success pending]' | lazy { [successful_job, job_with_artifacts] }
+ 'pending' | lazy { [] }
+ nil | lazy { [successful_job, job_with_artifacts] }
+ end
+
+ with_them do
+ let(:params) { { with_artifacts: false, scope: param_scope } }
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
+ end
end
- context 'user has no access to project builds' do
+ context 'with user being project guest' do
before do
project.add_guest(user)
end
@@ -99,79 +233,80 @@ RSpec.describe Ci::JobsFinder, '#execute' do
end
end
- context 'when artifacts are present for some jobs' do
- let_it_be(:job_with_artifacts) { create(:ci_build, :success, pipeline: pipeline, name: 'test') }
- let_it_be(:artifact) { create(:ci_job_artifact, job: job_with_artifacts) }
-
- subject { described_class.new(current_user: user, project: project, params: params).execute }
-
- before do
- project.add_maintainer(user)
- end
-
- context 'when with_artifacts is true' do
- let(:params) { { with_artifacts: true } }
+ context 'when pipeline is present' do
+ subject { described_class.new(current_user: user, pipeline: pipeline, params: params).execute }
- it 'returns only jobs with artifacts' do
- expect(subject).to match_array([job_with_artifacts])
+ context 'with user being project maintainer' do
+ before_all do
+ project.add_maintainer(user)
+ successful_job.update!(retried: true)
end
- end
- context 'when with_artifacts is false' do
- let(:params) { { with_artifacts: false } }
+ let_it_be(:job_4) { create(:ci_build, :success, pipeline: pipeline, name: 'build') }
- it 'returns all jobs' do
- expect(subject).to match_array([successful_job, job_with_artifacts])
+ it 'does not return retried jobs by default' do
+ expect(subject).to match_array([job_4])
end
- end
- end
-
- context 'when pipeline is present' do
- before_all do
- project.add_maintainer(user)
- successful_job.update!(retried: true)
- end
-
- let_it_be(:job_4) { create(:ci_build, :success, pipeline: pipeline, name: 'build') }
- subject { described_class.new(current_user: user, pipeline: pipeline, params: params).execute }
+ context 'when include_retried is false' do
+ let(:params) { { include_retried: false } }
- it 'does not return retried jobs by default' do
- expect(subject).to match_array([job_4])
- end
+ it 'does not return retried jobs' do
+ expect(subject).to match_array([job_4])
+ end
+ end
- context 'when include_retried is false' do
- let(:params) { { include_retried: false } }
+ context 'when include_retried is true' do
+ let(:params) { { include_retried: true } }
- it 'does not return retried jobs' do
- expect(subject).to match_array([job_4])
+ it 'returns retried jobs' do
+ expect(subject).to match_array([successful_job, job_4])
+ end
end
end
- context 'when include_retried is true' do
- let(:params) { { include_retried: true } }
+ context 'without user' do
+ let(:user) { nil }
- it 'returns retried jobs' do
- expect(subject).to match_array([successful_job, job_4])
+ it 'returns no jobs' do
+ expect(subject).to be_empty
end
end
end
- context 'a runner is present' do
+ context 'when runner is present' do
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
let_it_be(:job_4) { create(:ci_build, :success, runner: runner) }
- subject { described_class.new(current_user: user, runner: runner, params: params).execute }
+ subject(:execute) { described_class.new(current_user: user, runner: runner, params: params).execute }
- context 'user has access to the runner', :enable_admin_mode do
+ context 'when current user is an admin' do
let(:user) { admin }
- it 'returns jobs for the specified project' do
- expect(subject).to match_array([job_4])
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns jobs for the specified project' do
+ expect(subject).to contain_exactly job_4
+ end
+
+ context 'with params' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_runner_type, :param_scope, :expected_jobs) do
+ 'project_type' | 'success' | lazy { [job_4] }
+ 'instance_type' | nil | lazy { [] }
+ nil | 'pending' | lazy { [] }
+ end
+
+ with_them do
+ let(:params) { { runner_type: param_runner_type, scope: param_scope } }
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
end
end
- context 'user has no access to project builds' do
+ context 'with user being project guest' do
let_it_be(:guest) { create(:user) }
let(:user) { guest }
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index e57ad5bc76d..5d249ddb391 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -222,12 +222,14 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
end
shared_examples 'executes as normal user' do
- it 'returns no runners' do
+ it 'raises Gitlab::Access::AccessDeniedError' do
user = create :user
create :ci_runner, active: true
create :ci_runner, active: false
- expect(described_class.new(current_user: user, params: {}).execute).to be_empty
+ expect do
+ described_class.new(current_user: user, params: {}).execute
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
@@ -250,12 +252,14 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
end
context 'when user is nil' do
- it 'returns no runners' do
+ it 'raises Gitlab::Access::AccessDeniedError' do
user = nil
create :ci_runner, active: true
create :ci_runner, active: false
- expect(described_class.new(current_user: user, params: {}).execute).to be_empty
+ expect do
+ described_class.new(current_user: user, params: {}).execute
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
@@ -306,154 +310,162 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
shared_examples 'membership equal to :descendants' do
it 'returns all descendant runners' do
- expect(subject).to eq([runner_project_7, runner_project_6, runner_project_5,
- runner_project_4, runner_project_3, runner_project_2,
- runner_project_1, runner_sub_group_4, runner_sub_group_3,
- runner_sub_group_2, runner_sub_group_1, runner_group])
+ is_expected.to contain_exactly(
+ runner_project_7, runner_project_6, runner_project_5,
+ runner_project_4, runner_project_3, runner_project_2,
+ runner_project_1, runner_sub_group_4, runner_sub_group_3,
+ runner_sub_group_2, runner_sub_group_1, runner_group)
end
end
- context 'with user as group owner' do
- before do
- group.add_owner(user)
+ context 'with user is group maintainer or owner' do
+ where(:user_role) do
+ [GroupMember::OWNER, GroupMember::MAINTAINER]
end
- context 'with :group as target group' do
- let(:target_group) { group }
-
- context 'passing no membership params' do
- it_behaves_like 'membership equal to :descendants'
+ with_them do
+ before do
+ group.add_member(user, user_role)
end
- context 'with :descendants membership' do
- let(:membership) { :descendants }
+ context 'with :group as target group' do
+ let(:target_group) { group }
- it_behaves_like 'membership equal to :descendants'
- end
+ context 'passing no membership params' do
+ it_behaves_like 'membership equal to :descendants'
+ end
- context 'with :direct membership' do
- let(:membership) { :direct }
+ context 'with :descendants membership' do
+ let(:membership) { :descendants }
- it 'returns runners belonging to group' do
- expect(subject).to eq([runner_group])
+ it_behaves_like 'membership equal to :descendants'
end
- end
- context 'with :all_available membership' do
- let(:membership) { :all_available }
+ context 'with :direct membership' do
+ let(:membership) { :direct }
- it 'returns runners available to group' do
- expect(subject).to match_array([runner_project_7, runner_project_6, runner_project_5,
- runner_project_4, runner_project_3, runner_project_2,
- runner_project_1, runner_sub_group_4, runner_sub_group_3,
- runner_sub_group_2, runner_sub_group_1, runner_group, runner_instance])
+ it 'returns runners belonging to group' do
+ is_expected.to contain_exactly(runner_group)
+ end
end
- end
- context 'with unknown membership' do
- let(:membership) { :unsupported }
+ context 'with :all_available membership' do
+ let(:membership) { :all_available }
- it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, 'Invalid membership filter')
+ it 'returns runners available to group' do
+ is_expected.to contain_exactly(
+ runner_project_7, runner_project_6, runner_project_5,
+ runner_project_4, runner_project_3, runner_project_2,
+ runner_project_1, runner_sub_group_4, runner_sub_group_3,
+ runner_sub_group_2, runner_sub_group_1, runner_group, runner_instance)
+ end
end
- end
- context 'with nil group' do
- let(:target_group) { nil }
+ context 'with unknown membership' do
+ let(:membership) { :unsupported }
- it 'returns no runners' do
- # Query should run against all runners, however since user is not admin, query returns no results
- expect(subject).to eq([])
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, 'Invalid membership filter')
+ end
end
- end
- context 'with sort param' do
- let(:extra_params) { { sort: 'contacted_asc' } }
+ context 'with nil group' do
+ let(:target_group) { nil }
- it 'sorts by specified attribute' do
- expect(subject).to eq([runner_group, runner_sub_group_1, runner_sub_group_2,
- runner_sub_group_3, runner_sub_group_4, runner_project_1,
- runner_project_2, runner_project_3, runner_project_4,
- runner_project_5, runner_project_6, runner_project_7])
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ # Query should run against all runners, however since user is not admin, we raise an error
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
end
- end
- context 'filtering' do
- context 'by search term' do
- let(:extra_params) { { search: 'runner_project_search' } }
+ context 'with sort param' do
+ let(:extra_params) { { sort: 'contacted_asc' } }
- it 'returns correct runner' do
- expect(subject).to match_array([runner_project_3])
+ it 'sorts by specified attribute' do
+ expect(subject).to eq([runner_group, runner_sub_group_1, runner_sub_group_2,
+ runner_sub_group_3, runner_sub_group_4, runner_project_1,
+ runner_project_2, runner_project_3, runner_project_4,
+ runner_project_5, runner_project_6, runner_project_7])
end
end
- context 'by active status' do
- let(:extra_params) { { active: false } }
+ context 'filtering' do
+ context 'by search term' do
+ let(:extra_params) { { search: 'runner_project_search' } }
- it 'returns correct runner' do
- expect(subject).to match_array([runner_sub_group_1])
+ it 'returns correct runner' do
+ expect(subject).to match_array([runner_project_3])
+ end
end
- end
- context 'by status' do
- let(:extra_params) { { status_status: 'paused' } }
+ context 'by active status' do
+ let(:extra_params) { { active: false } }
- it 'returns correct runner' do
- expect(subject).to match_array([runner_sub_group_1])
+ it 'returns correct runner' do
+ expect(subject).to match_array([runner_sub_group_1])
+ end
end
- end
- context 'by tag_name' do
- let(:extra_params) { { tag_name: %w[runner_tag] } }
+ context 'by status' do
+ let(:extra_params) { { status_status: 'paused' } }
- it 'returns correct runner' do
- expect(subject).to match_array([runner_project_5])
+ it 'returns correct runner' do
+ expect(subject).to match_array([runner_sub_group_1])
+ end
end
- end
- context 'by runner type' do
- let(:extra_params) { { type_type: 'project_type' } }
+ context 'by tag_name' do
+ let(:extra_params) { { tag_name: %w[runner_tag] } }
- it 'returns correct runners' do
- expect(subject).to eq([runner_project_7, runner_project_6,
- runner_project_5, runner_project_4,
- runner_project_3, runner_project_2, runner_project_1])
+ it 'returns correct runner' do
+ expect(subject).to match_array([runner_project_5])
+ end
+ end
+
+ context 'by runner type' do
+ let(:extra_params) { { type_type: 'project_type' } }
+
+ it 'returns correct runners' do
+ expect(subject).to eq([runner_project_7, runner_project_6,
+ runner_project_5, runner_project_4,
+ runner_project_3, runner_project_2, runner_project_1])
+ end
end
end
end
end
end
- context 'when user is not group owner' do
- where(:user_permission) do
- [:maintainer, :developer, :reporter, :guest]
+ context 'when user is group developer or below' do
+ where(:user_role) do
+ [GroupMember::DEVELOPER, GroupMember::REPORTER, GroupMember::GUEST]
end
with_them do
before do
- create(:group_member, user_permission, group: sub_group_1, user: user)
+ group.add_member(user, user_role)
end
context 'with :sub_group_1 as target group' do
let(:target_group) { sub_group_1 }
- it 'returns no runners' do
- is_expected.to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
context 'with :group as target group' do
let(:target_group) { group }
- it 'returns no runners' do
- is_expected.to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
context 'with :all_available membership' do
let(:membership) { :all_available }
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
@@ -461,35 +473,31 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
end
context 'when user has no access' do
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
context 'when user is nil' do
- let_it_be(:user) { nil }
+ let(:user) { nil }
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
describe '#sort_key' do
- subject { described_class.new(current_user: user, params: params.merge(group: group)).sort_key }
+ subject(:sort_key) { described_class.new(current_user: user, params: params.merge(group: group)).sort_key }
context 'without params' do
- it 'returns created_at_desc' do
- expect(subject).to eq('created_at_desc')
- end
+ it { is_expected.to eq('created_at_desc') }
end
context 'with params' do
let(:extra_params) { { sort: 'contacted_asc' } }
- it 'returns contacted_asc' do
- expect(subject).to eq('contacted_asc')
- end
+ it { is_expected.to eq('contacted_asc') }
end
end
end
@@ -504,7 +512,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
let(:params) { { project: project }.merge(extra_params).reject { |_, v| v.nil? } }
describe '#execute' do
- subject { described_class.new(current_user: user, params: params).execute }
+ subject(:execute) { described_class.new(current_user: user, params: params).execute }
context 'with user as project admin' do
before do
@@ -515,7 +523,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
let_it_be(:runner_project) { create(:ci_runner, :project, contacted_at: 7.minutes.ago, projects: [project]) }
it 'returns runners available to project' do
- expect(subject).to match_array([runner_project])
+ is_expected.to match_array([runner_project])
end
end
@@ -524,7 +532,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
let_it_be(:runner_group) { create(:ci_runner, :group, contacted_at: 12.minutes.ago, groups: [group]) }
it 'returns runners available to project' do
- expect(subject).to match_array([runner_instance, runner_group])
+ is_expected.to match_array([runner_instance, runner_group])
end
end
@@ -610,24 +618,24 @@ RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
project.add_developer(user)
end
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
context 'when user is nil' do
let_it_be(:user) { nil }
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
context 'with nil project_full_path' do
let(:project_full_path) { nil }
- it 'returns no runners' do
- expect(subject).to be_empty
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
diff --git a/spec/finders/ci/triggers_finder_spec.rb b/spec/finders/ci/triggers_finder_spec.rb
new file mode 100644
index 00000000000..2df79e8f023
--- /dev/null
+++ b/spec/finders/ci/triggers_finder_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggersFinder, feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:trigger) { create(:ci_trigger, project: project) }
+
+ subject { described_class.new(current_user, project).execute }
+
+ describe "#execute" do
+ context 'when the current user is authorized' do
+ before_all do
+ project.add_owner(current_user)
+ end
+
+ it 'returns list of trigger tokens' do
+ expect(subject).to contain_exactly(trigger)
+ end
+ end
+
+ context 'when the current user is not authorized' do
+ it 'does not return trigger tokens' do
+ expect(subject).to be_blank
+ end
+ end
+ end
+end
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index 5a803ee2a0d..807a7ca8e26 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -280,6 +280,22 @@ RSpec.describe DeploymentsFinder, feature_category: :deployment_management do
it { is_expected.to match_array([deployment_2]) }
end
end
+
+ context 'with mixed deployable types' do
+ let!(:deployment_1) do
+ create(:deployment, :success, project: project, deployable: create(:ci_build))
+ end
+
+ let!(:deployment_2) do
+ create(:deployment, :success, project: project, deployable: create(:ci_bridge))
+ end
+
+ let(:params) { { **base_params, status: 'success' } }
+
+ it 'successfuly fetches deployments' do
+ is_expected.to contain_exactly(deployment_1, deployment_2)
+ end
+ end
end
context 'at group scope' do
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index 18473a5e70b..b8a5be44241 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -288,4 +288,39 @@ RSpec.describe GroupMembersFinder, '#execute', feature_category: :groups_and_pro
end
end
end
+
+ context 'filter by non-invite' do
+ let_it_be(:member) { group.add_maintainer(user1) }
+ let_it_be(:invited_member) do
+ create(:group_member, :invited, { user: user2, group: group })
+ end
+
+ context 'params is not passed in' do
+ subject { described_class.new(group, user1).execute }
+
+ it 'does not filter members by invite' do
+ expect(subject).to match_array([member, invited_member])
+ end
+ end
+
+ context 'params is passed in' do
+ subject { described_class.new(group, user1, params: { non_invite: non_invite_param }).execute }
+
+ context 'filtering is set to false' do
+ let(:non_invite_param) { false }
+
+ it 'does not filter members by invite' do
+ expect(subject).to match_array([member, invited_member])
+ end
+ end
+
+ context 'filtering is set to true' do
+ let(:non_invite_param) { true }
+
+ it 'filters members by invite' do
+ expect(subject).to match_array([member])
+ end
+ end
+ end
+ end
end
diff --git a/spec/finders/groups/accepting_group_transfers_finder_spec.rb b/spec/finders/groups/accepting_group_transfers_finder_spec.rb
index 18407dd0196..5c78ec3124b 100644
--- a/spec/finders/groups/accepting_group_transfers_finder_spec.rb
+++ b/spec/finders/groups/accepting_group_transfers_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::AcceptingGroupTransfersFinder do
+RSpec.describe Groups::AcceptingGroupTransfersFinder, feature_category: :groups_and_projects do
let_it_be(:current_user) { create(:user) }
let_it_be(:great_grandparent_group) do
@@ -119,6 +119,25 @@ RSpec.describe Groups::AcceptingGroupTransfersFinder do
expect(result).to contain_exactly(great_grandparent_group)
end
end
+
+ context 'on searching with multiple matches' do
+ let(:params) { { search: 'great-grandparent-group' } }
+ let(:other_groups) { [] }
+
+ before do
+ 2.times do
+ # app/finders/group/base.rb adds an ORDER BY path, so create a group with 1 in the front.
+ group = create(:group, parent: great_grandparent_group, path: "1-#{SecureRandom.hex}")
+ group.add_owner(current_user)
+ other_groups << group
+ end
+ end
+
+ it 'prioritizes exact matches first' do
+ expect(result.first).to eq(great_grandparent_group)
+ expect(result[1..]).to match_array(other_groups)
+ end
+ end
end
end
end
diff --git a/spec/finders/organizations/groups_finder_spec.rb b/spec/finders/organizations/groups_finder_spec.rb
new file mode 100644
index 00000000000..08c5604149b
--- /dev/null
+++ b/spec/finders/organizations/groups_finder_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::GroupsFinder, feature_category: :cell do
+ let_it_be(:organization_user) { create(:organization_user) }
+ let_it_be(:organization) { organization_user.organization }
+ let_it_be(:user) { organization_user.user }
+ let_it_be(:public_group) { create(:group, name: 'public-group', organization: organization) }
+ let_it_be(:other_group) { create(:group, name: 'other-group', organization: organization) }
+ let_it_be(:outside_organization_group) { create(:group) }
+ let_it_be(:private_group) do
+ create(:group, :private, name: 'private-group', organization: organization)
+ end
+
+ let_it_be(:no_access_group_in_org) do
+ create(:group, :private, name: 'no-access', organization: organization)
+ end
+
+ let(:current_user) { user }
+ let(:params) { {} }
+ let(:finder) { described_class.new(organization: organization, current_user: current_user, params: params) }
+
+ before_all do
+ private_group.add_developer(user)
+ public_group.add_developer(user)
+ other_group.add_developer(user)
+ outside_organization_group.add_developer(user)
+ end
+
+ subject(:result) { finder.execute.to_a }
+
+ describe '#execute' do
+ context 'when user is not authorized to read the organization' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when organization is nil' do
+ let(:organization) { nil }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when user is authorized to read the organization' do
+ it 'return all accessible groups' do
+ expect(result).to contain_exactly(public_group, private_group, other_group)
+ end
+
+ context 'when search param is passed' do
+ let(:params) { { search: 'the' } }
+
+ it 'filters the groups by search' do
+ expect(result).to contain_exactly(other_group)
+ end
+ end
+
+ context 'when sort param is not passed' do
+ it 'return groups sorted by name in ascending order by default' do
+ expect(result).to eq([other_group, private_group, public_group])
+ end
+ end
+
+ context 'when sort param is passed' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:field, :direction, :sorted_groups) do
+ 'name' | 'asc' | lazy { [other_group, private_group, public_group] }
+ 'name' | 'desc' | lazy { [public_group, private_group, other_group] }
+ 'path' | 'asc' | lazy { [other_group, private_group, public_group] }
+ 'path' | 'desc' | lazy { [public_group, private_group, other_group] }
+ end
+
+ with_them do
+ let(:params) { { sort: { field: field, direction: direction } } }
+ it 'sorts the groups' do
+ expect(result).to eq(sorted_groups)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/organizations/organization_users_finder_spec.rb b/spec/finders/organizations/organization_users_finder_spec.rb
new file mode 100644
index 00000000000..d7fba372e40
--- /dev/null
+++ b/spec/finders/organizations/organization_users_finder_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationUsersFinder, feature_category: :cell do
+ let_it_be(:organization) { create(:organization) }
+ let_it_be(:organization_user_1) { create(:organization_user, organization: organization) }
+ let_it_be(:organization_user_2) { create(:organization_user, organization: organization) }
+ let_it_be(:other_organization_user) { create(:organization_user) }
+
+ let(:current_user) { organization_user_1.user }
+ let(:finder) { described_class.new(organization: organization, current_user: current_user) }
+
+ subject(:result) { finder.execute.to_a }
+
+ describe '#execute' do
+ context 'when user is not authorized to read the organization' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when organization is nil' do
+ let(:organization) { nil }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when user is authorized to read the organization' do
+ it 'returns all organization users' do
+ expect(result).to contain_exactly(organization_user_1, organization_user_2)
+ end
+ end
+ end
+end
diff --git a/spec/finders/packages/npm/packages_for_user_finder_spec.rb b/spec/finders/packages/npm/packages_for_user_finder_spec.rb
new file mode 100644
index 00000000000..e2dc21e1008
--- /dev/null
+++ b/spec/finders/packages/npm/packages_for_user_finder_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Npm::PackagesForUserFinder, feature_category: :package_registry do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project2) { create(:project, group: group) }
+ let_it_be(:package) { create(:npm_package, project: project) }
+ let_it_be(:package_name) { package.name }
+ let_it_be(:package_with_diff_name) { create(:npm_package, project: project) }
+ let_it_be(:package_with_diff_project) { create(:npm_package, name: package_name, project: project2) }
+ let_it_be(:maven_package) { create(:maven_package, name: package_name, project: project) }
+
+ let(:finder) { described_class.new(user, project_or_group, package_name: package_name) }
+
+ describe '#execute' do
+ subject { finder.execute }
+
+ shared_examples 'searches for packages' do
+ it { is_expected.to contain_exactly(package) }
+ end
+
+ context 'with a project' do
+ let(:project_or_group) { project }
+
+ it_behaves_like 'searches for packages'
+ end
+
+ context 'with a group' do
+ let(:project_or_group) { group }
+
+ before_all do
+ project.add_reporter(user)
+ end
+
+ it_behaves_like 'searches for packages'
+ end
+ end
+end
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 792e543e424..8230d132d75 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -114,16 +114,6 @@ RSpec.describe Packages::Nuget::PackageFinder, feature_category: :package_regist
it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: true
end
-
- context 'when nuget_normalized_version feature flag is disabled' do
- let(:package_version) { '2.0.0+abc' }
-
- before do
- stub_feature_flags(nuget_normalized_version: false)
- end
-
- it_behaves_like 'calling with_nuget_version_or_normalized_version scope', with_normalized: false
- end
end
context 'with a project' do
diff --git a/spec/fixtures/api/schemas/entities/codequality_degradation.json b/spec/fixtures/api/schemas/entities/codequality_degradation.json
index ac772873daf..ac1a556e33c 100644
--- a/spec/fixtures/api/schemas/entities/codequality_degradation.json
+++ b/spec/fixtures/api/schemas/entities/codequality_degradation.json
@@ -10,6 +10,9 @@
"description": {
"type": "string"
},
+ "fingerprint": {
+ "type": "string"
+ },
"severity": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/job/job.json b/spec/fixtures/api/schemas/job/job.json
index f3d5e9b038a..34668f309a6 100644
--- a/spec/fixtures/api/schemas/job/job.json
+++ b/spec/fixtures/api/schemas/job/job.json
@@ -5,7 +5,6 @@
"id",
"name",
"started",
- "build_path",
"playable",
"created_at",
"updated_at",
diff --git a/spec/fixtures/api/schemas/ml/search_runs.json b/spec/fixtures/api/schemas/ml/search_runs.json
new file mode 100644
index 00000000000..c1db2c9f15c
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/search_runs.json
@@ -0,0 +1,82 @@
+{
+ "type": "object",
+ "required": [
+ "runs",
+ "next_page_token"
+ ],
+ "properties": {
+ "runs": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "info",
+ "data"
+ ],
+ "properties": {
+ "info": {
+ "type": "object",
+ "required": [
+ "run_id",
+ "run_uuid",
+ "user_id",
+ "experiment_id",
+ "status",
+ "start_time",
+ "artifact_uri",
+ "lifecycle_stage"
+ ],
+ "optional": [
+ "end_time"
+ ],
+ "properties": {
+ "run_id": {
+ "type": "string"
+ },
+ "run_uuid": {
+ "type": "string"
+ },
+ "experiment_id": {
+ "type": "string"
+ },
+ "artifact_uri": {
+ "type": "string"
+ },
+ "start_time": {
+ "type": "integer"
+ },
+ "end_time": {
+ "type": "integer"
+ },
+ "user_id": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string",
+ "enum": [
+ "RUNNING",
+ "SCHEDULED",
+ "FINISHED",
+ "FAILED",
+ "KILLED"
+ ]
+ },
+ "lifecycle_stage": {
+ "type": "string",
+ "enum": [
+ "active"
+ ]
+ }
+ }
+ },
+ "data": {
+ "type": "object"
+ }
+ }
+ }
+ },
+ "next_page_token": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/integration.json b/spec/fixtures/api/schemas/public_api/v4/integration.json
index 8902196a2c4..2b16e44eb85 100644
--- a/spec/fixtures/api/schemas/public_api/v4/integration.json
+++ b/spec/fixtures/api/schemas/public_api/v4/integration.json
@@ -65,6 +65,9 @@
},
"comment_on_event_enabled": {
"type": "boolean"
+ },
+ "vulnerability_events": {
+ "type": "boolean"
}
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json b/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json
index f572b1a4f9b..a72260af145 100644
--- a/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json
+++ b/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json
@@ -8,7 +8,8 @@
"id": { "type": "integer" },
"name": { "type": "string" },
"parameters": { "type": "object" },
- "scopes": { "type": "array", "items": { "$ref": "scope.json" } }
+ "scopes": { "type": "array", "items": { "$ref": "scope.json" } },
+ "user_list": { "type": ["object", "null"], "$ref": "user_list.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/operations/user_list.json b/spec/fixtures/api/schemas/public_api/v4/operations/user_list.json
new file mode 100644
index 00000000000..6a9f977e37d
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/operations/user_list.json
@@ -0,0 +1,16 @@
+{
+ "type": ["object", "null"],
+ "required": [
+ "id",
+ "iid",
+ "name",
+ "user_xids"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "name": { "type": "string" },
+ "user_xids": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/status/ci_detailed_status.json b/spec/fixtures/api/schemas/status/ci_detailed_status.json
index 8d0f1e4a6af..0d9e4975858 100644
--- a/spec/fixtures/api/schemas/status/ci_detailed_status.json
+++ b/spec/fixtures/api/schemas/status/ci_detailed_status.json
@@ -17,7 +17,7 @@
"group": { "type": "string" },
"tooltip": { "type": "string" },
"has_details": { "type": "boolean" },
- "details_path": { "type": "string" },
+ "details_path": { "oneOf": [{ "type": "null" }, {"type": "string" }] },
"favicon": { "type": "string" },
"illustration": { "$ref": "illustration.json" },
"action": { "$ref": "action.json" }
diff --git a/spec/fixtures/ci_secure_files/sample.p12 b/spec/fixtures/ci_secure_files/sample.p12
index c74df26a8d4..84c7bf6a2f5 100644
--- a/spec/fixtures/ci_secure_files/sample.p12
+++ b/spec/fixtures/ci_secure_files/sample.p12
Binary files differ
diff --git a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml
index 1c1ad65796c..6c3a0ef5ed6 100644
--- a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml
+++ b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml
@@ -1,6 +1,6 @@
---
description:
-category: Groups::EmailCampaignsController
+category: Projects::Pipelines::EmailCampaignsController
action: click
label_description:
property_description:
@@ -13,12 +13,12 @@ identifiers:
product_section:
product_stage:
product_group:
-milestone: "13.11"
+milestone: '13.11'
introduced_by_url:
distributions:
-- ce
-- ee
+ - ce
+ - ee
tiers:
-- free
-- premium
-- ultimate
+ - free
+ - premium
+ - ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml
index 174468028b8..3381c73f23e 100644
--- a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml
+++ b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml
@@ -1,6 +1,6 @@
---
description:
-category: Groups::EmailCampaignsController
+category: Projects::Pipelines::EmailCampaignsController
action: click
label_description:
property_description:
@@ -13,10 +13,10 @@ identifiers:
product_section:
product_stage:
product_group:
-milestone: "13.11"
+milestone: '13.11'
introduced_by_url:
distributions:
-- ee
+ - ee
tiers:
-#- premium
-- ultimate
+ #- premium
+ - ultimate
diff --git a/spec/fixtures/packages/nuget/symbol/package.pdb b/spec/fixtures/packages/nuget/symbol/package.pdb
new file mode 100644
index 00000000000..dc82bf418a2
--- /dev/null
+++ b/spec/fixtures/packages/nuget/symbol/package.pdb
Binary files differ
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
index 4c494963a79..31a86d3a8ae 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -1,11 +1,11 @@
{
"vulnerabilities": [
{
+ "id": "vulnerability-1",
"category": "dependency_scanning",
"name": "Vulnerability for remediation testing 1",
"message": "This vulnerability should have ONE remediation",
"description": "",
- "cve": "CVE-2137",
"severity": "High",
"solution": "Upgrade to latest version.",
"scanner": {
@@ -43,11 +43,11 @@
}
},
{
+ "id": "vulnerability-2",
"category": "dependency_scanning",
"name": "Vulnerability for remediation testing 2",
"message": "This vulnerability should have ONE remediation",
"description": "",
- "cve": "CVE-2138",
"severity": "High",
"solution": "Upgrade to latest version.",
"scanner": {
@@ -85,11 +85,11 @@
}
},
{
+ "id": "vulnerability-3",
"category": "dependency_scanning",
"name": "Vulnerability for remediation testing 3",
"message": "Remediation for this vulnerability should remediate CVE-2140 as well",
"description": "",
- "cve": "CVE-2139",
"severity": "High",
"solution": "Upgrade to latest version.",
"scanner": {
@@ -127,11 +127,11 @@
}
},
{
+ "id": "vulnerability-4",
"category": "dependency_scanning",
"name": "Vulnerability for remediation testing 4",
"message": "Remediation for this vulnerability should remediate CVE-2139 as well",
"description": "",
- "cve": "CVE-2140",
"severity": "High",
"solution": "Upgrade to latest version.",
"scanner": {
@@ -169,11 +169,11 @@
}
},
{
+ "id": "vulnerability-5",
"category": "dependency_scanning",
"name": "Vulnerabilities in libxml2",
"message": "Vulnerabilities in libxml2 in nokogiri",
"description": "",
- "cve": "CVE-1020",
"severity": "High",
"solution": "Upgrade to latest version.",
"scanner": {
@@ -281,12 +281,11 @@
}
},
{
- "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3",
+ "id": "vulnerability-6",
"category": "dependency_scanning",
"name": "Regular Expression Denial of Service",
"message": "Regular Expression Denial of Service in debug",
"description": "",
- "cve": "CVE-1030",
"severity": "Unknown",
"solution": "Upgrade to latest versions.",
"scanner": {
@@ -387,6 +386,7 @@
]
},
{
+ "id": "vulnerability-7",
"category": "dependency_scanning",
"name": "Authentication bypass via incorrect DOM traversal and canonicalization",
"message": "Authentication bypass via incorrect DOM traversal and canonicalization in saml2-js",
@@ -421,47 +421,46 @@
{
"fixes": [
{
- "cve": "CVE-2137"
+ "id": "vulnerability-1"
}
],
- "summary": "this remediates CVE-2137",
+ "summary": "this remediates the first vulnerability",
"diff": "dG90YWxseSBsZWdpdCBkaWZm"
},
{
"fixes": [
{
- "cve": "CVE-2138"
+ "id": "vulnerability-2"
}
],
- "summary": "this remediates CVE-2138",
+ "summary": "this remediates the second vulnerability",
"diff": "dG90YWxseSBsZWdpdCBkaWZm"
},
{
"fixes": [
{
- "cve": "CVE-2139"
+ "id": "vulnerability-3"
},
{
- "cve": "CVE-2140"
+ "id": "vulnerability-4"
}
],
- "summary": "this remediates CVE-2139 and CVE-2140",
+ "summary": "this remediates the third and fourth vulnerability",
"diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
},
{
"fixes": [
{
- "cve": "CVE-1020"
+ "id": "vulnerability-5"
}
],
- "summary": "this fixes CVE-1020",
+ "summary": "this fixes the fifth vulnerability",
"diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
},
{
"fixes": [
{
- "cve": "CVE",
- "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
+ "id": "vulnerability-6"
}
],
"summary": "this fixes CVE",
@@ -470,22 +469,11 @@
{
"fixes": [
{
- "cve": "CVE",
- "id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
+ "id": "vulnerability-6"
}
],
"summary": "this fixed CVE",
"diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
- },
- {
- "fixes": [
- {
- "id": "2134",
- "cve": "CVE-1"
- }
- ],
- "summary": "this fixes CVE-1",
- "diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
}
],
"dependency_files": [],
diff --git a/spec/frontend/__helpers__/clean_html_element_serializer.js b/spec/frontend/__helpers__/clean_html_element_serializer.js
new file mode 100644
index 00000000000..d787f5126ec
--- /dev/null
+++ b/spec/frontend/__helpers__/clean_html_element_serializer.js
@@ -0,0 +1,142 @@
+// slot-scope attribute is a result of Vue.js 3 stubs being serialized in slot context, drop it
+// modelModifiers are result of Vue.js 3 model modifiers handling and should not be in snapshot
+const ATTRIBUTES_TO_REMOVE = ['slot-scope', 'modelmodifiers'];
+// Taken from https://github.com/vuejs/vue/blob/72aed6a149b94b5b929fb47370a7a6d4cb7491c5/src/platforms/web/util/attrs.ts#L37-L44
+const BOOLEAN_ATTRIBUTES = new Set(
+ (
+ 'allowfullscreen,async,autofocus,autoplay,checked,compact,controls,declare,' +
+ 'default,defaultchecked,defaultmuted,defaultselected,defer,disabled,' +
+ 'enabled,formnovalidate,hidden,indeterminate,inert,ismap,itemscope,loop,multiple,' +
+ 'muted,nohref,noresize,noshade,novalidate,nowrap,open,pauseonexit,readonly,' +
+ 'required,reversed,scoped,seamless,selected,sortable,' +
+ 'truespeed,typemustmatch,visible'
+ ).split(','),
+);
+
+function sortClassesAlphabetically(node) {
+ // Make classes render in alphabetical order for both Vue2 and Vue3
+ if (node.hasAttribute('class')) {
+ const classes = node.getAttribute('class');
+ if (classes === '') {
+ node.removeAttribute('class');
+ } else {
+ node.setAttribute('class', Array.from(node.classList).sort().join(' '));
+ }
+ }
+}
+
+const TRANSITION_VALUES_TO_REMOVE = [
+ { attributeName: 'css', defaultValue: 'true' },
+ { attributeName: 'persisted', defaultValue: 'true' },
+];
+function removeInternalPropsLeakingToTransitionStub(node) {
+ TRANSITION_VALUES_TO_REMOVE.forEach((hash) => {
+ if (node.getAttribute(hash.attributeName) === hash.defaultValue) {
+ node.removeAttribute(hash.attributeName);
+ }
+ });
+}
+
+function normalizeText(node) {
+ const newText = node.textContent.trim();
+ const textWithoutNewLines = newText.replace(/\n/g, '');
+ const textWithoutDeepSpace = textWithoutNewLines.replace(/(?<=\S)\s+/g, ' ');
+ // eslint-disable-next-line no-param-reassign
+ node.textContent = textWithoutDeepSpace;
+}
+
+const visited = new WeakSet();
+
+// Lovingly borrowed from https://developer.mozilla.org/en-US/docs/Web/API/Document_Object_Model/Whitespace#whitespace_helper_functions
+function isAllWhitespace(node) {
+ return !/[^\t\n\r ]/.test(node.textContent);
+}
+
+function isIgnorable(node) {
+ return (
+ node.nodeType === Node.COMMENT_NODE || // A comment node
+ (node.nodeType === Node.TEXT_NODE && isAllWhitespace(node))
+ ); // a text node, all ws
+}
+
+const REFERENCE_ATTRIBUTES = ['aria-controls', 'aria-labelledby', 'for'];
+function updateIdTags(root) {
+ const elementsWithIds = [...(root.id ? [root] : []), ...root.querySelectorAll('[id]')];
+
+ const referenceSelector = REFERENCE_ATTRIBUTES.map((attr) => `[${attr}]`).join(',');
+ const elementsWithReference = [
+ ...(root.matches(referenceSelector) ? [root] : []),
+ ...root.querySelectorAll(REFERENCE_ATTRIBUTES.map((attr) => `[${attr}]`).join(',')),
+ ];
+
+ elementsWithReference.forEach((el) => {
+ REFERENCE_ATTRIBUTES.filter((attr) => el.getAttribute(attr)).forEach((target) => {
+ const index = elementsWithIds.findIndex((t) => t.id === el.getAttribute(target));
+ if (index !== -1) {
+ el.setAttribute(target, `reference-${index}`);
+ }
+ });
+ });
+
+ elementsWithIds.forEach((el, index) => {
+ el.setAttribute('id', `reference-${index}`);
+ });
+}
+
+export function test(received) {
+ return received instanceof Element && !visited.has(received);
+}
+
+export function serialize(received, config, indentation, depth, refs, printer) {
+ // Explicitly set empty string values of img.src to `null` as Vue3 does
+ // We need to do this before `clone`, otherwise src prop diff will be lost
+ received.querySelectorAll('img').forEach((img) => img.setAttribute('src', img.src || null));
+
+ const clone = received.cloneNode(true);
+
+ updateIdTags(clone);
+ visited.add(clone);
+
+ const iterator = document.createNodeIterator(
+ clone,
+ // eslint-disable-next-line no-bitwise
+ NodeFilter.SHOW_COMMENT | NodeFilter.SHOW_TEXT | NodeFilter.SHOW_ELEMENT,
+ );
+ const ignorableNodes = [];
+
+ for (let currentNode = iterator.nextNode(); currentNode; currentNode = iterator.nextNode()) {
+ if (isIgnorable(currentNode)) {
+ ignorableNodes.push(currentNode);
+ } else {
+ if (currentNode instanceof Element) {
+ ATTRIBUTES_TO_REMOVE.forEach((attr) => currentNode.removeAttribute(attr));
+
+ if (!currentNode.tagName.includes('-')) {
+ // We want to normalize boolean attributes rendering only on native tags
+ BOOLEAN_ATTRIBUTES.forEach((attr) => {
+ if (currentNode.hasAttribute(attr) && currentNode.getAttribute(attr) === attr) {
+ currentNode.setAttribute(attr, '');
+ }
+ });
+ }
+
+ sortClassesAlphabetically(currentNode);
+
+ if (currentNode.tagName === 'TRANSITION-STUB') {
+ removeInternalPropsLeakingToTransitionStub(currentNode);
+ }
+ }
+
+ if (currentNode.nodeType === Node.TEXT_NODE) {
+ normalizeText(currentNode);
+ }
+
+ currentNode.normalize();
+ visited.add(currentNode);
+ }
+ }
+
+ ignorableNodes.forEach((x) => x.remove());
+
+ return printer(clone, config, indentation, depth, refs);
+}
diff --git a/spec/frontend/__helpers__/dom_shims/get_client_rects.js b/spec/frontend/__helpers__/dom_shims/get_client_rects.js
index 7ba60dd7936..0ec3525f0ef 100644
--- a/spec/frontend/__helpers__/dom_shims/get_client_rects.js
+++ b/spec/frontend/__helpers__/dom_shims/get_client_rects.js
@@ -1,7 +1,8 @@
function hasHiddenStyle(node) {
if (!node.style) {
return false;
- } else if (node.style.display === 'none' || node.style.visibility === 'hidden') {
+ }
+ if (node.style.display === 'none' || node.style.visibility === 'hidden') {
return true;
}
diff --git a/spec/frontend/__helpers__/html_string_serializer.js b/spec/frontend/__helpers__/html_string_serializer.js
new file mode 100644
index 00000000000..99f4acd0e97
--- /dev/null
+++ b/spec/frontend/__helpers__/html_string_serializer.js
@@ -0,0 +1,11 @@
+export function test(received) {
+ return received && typeof received === 'string' && received.startsWith('<');
+}
+
+export function serialize(received, config, indentation, depth, refs, printer) {
+ const parser = new DOMParser();
+ const doc = parser.parseFromString(received, 'text/html');
+ const el = doc.body.firstElementChild;
+
+ return printer(el, config, indentation, depth, refs);
+}
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper.js b/spec/frontend/__helpers__/vue_test_utils_helper.js
index c144a256dce..20a79fc4d2f 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper.js
@@ -1,6 +1,13 @@
import * as testingLibrary from '@testing-library/dom';
-import { createWrapper, WrapperArray, ErrorWrapper, mount, shallowMount } from '@vue/test-utils';
-import { isArray, upperFirst } from 'lodash';
+import {
+ createWrapper,
+ Wrapper, // eslint-disable-line no-unused-vars
+ ErrorWrapper,
+ mount,
+ shallowMount,
+ WrapperArray,
+} from '@vue/test-utils';
+import { compose } from 'lodash/fp';
const vNodeContainsText = (vnode, text) =>
(vnode.text && vnode.text.includes(text)) ||
@@ -14,7 +21,7 @@ const vNodeContainsText = (vnode, text) =>
*
* @param {HTMLElement} element
* @param {Object} options
- * @returns VTU wrapper
+ * @returns {Wrapper} VTU wrapper
*/
const createWrapperFromElement = (element, options) =>
// eslint-disable-next-line no-underscore-dangle
@@ -52,19 +59,84 @@ export const waitForMutation = (store, expectedMutationType) =>
});
});
+/**
+ * Query function type
+ * @callback FindFunction
+ * @param text
+ * @returns {Wrapper}
+ */
+
+/**
+ * Query all function type
+ * @callback FindAllFunction
+ * @param text
+ * @returns {WrapperArray}
+ */
+
+/**
+ * Query find with options functions type
+ * @callback FindWithOptionsFunction
+ * @param text
+ * @param options
+ * @returns {Wrapper}
+ */
+
+/**
+ * Query find all with options functions type
+ * @callback FindAllWithOptionsFunction
+ * @param text
+ * @param options
+ * @returns {WrapperArray}
+ */
+
+/**
+ * Extended Wrapper queries
+ * @typedef { {
+ * findByTestId: FindFunction,
+ * findAllByTestId: FindAllFunction,
+ * findComponentByTestId: FindFunction,
+ * findAllComponentsByTestId: FindAllFunction,
+ * findByRole: FindWithOptionsFunction,
+ * findAllByRole: FindAllWithOptionsFunction,
+ * findByLabelText: FindWithOptionsFunction,
+ * findAllByLabelText: FindAllWithOptionsFunction,
+ * findByPlaceholderText: FindWithOptionsFunction,
+ * findAllByPlaceholderText: FindAllWithOptionsFunction,
+ * findByText: FindWithOptionsFunction,
+ * findAllByText: FindAllWithOptionsFunction,
+ * findByDisplayValue: FindWithOptionsFunction,
+ * findAllByDisplayValue: FindAllWithOptionsFunction,
+ * findByAltText: FindWithOptionsFunction,
+ * findAllByAltText: FindAllWithOptionsFunction,
+ * findByTitle: FindWithOptionsFunction,
+ * findAllByTitle: FindAllWithOptionsFunction
+ * } } ExtendedQueries
+ */
+
+/**
+ * Extended Wrapper
+ * @typedef {(Wrapper & ExtendedQueries)} ExtendedWrapper
+ */
+
+/**
+ * Creates a Wrapper {@link https://v1.test-utils.vuejs.org/api/wrapper/} with
+ * Additional Queries {@link https://testing-library.com/docs/queries/about}.
+ * @param { Wrapper } wrapper
+ * @returns { ExtendedWrapper }
+ */
export const extendedWrapper = (wrapper) => {
// https://testing-library.com/docs/queries/about
const AVAILABLE_QUERIES = [
- 'byRole',
- 'byLabelText',
- 'byPlaceholderText',
- 'byText',
- 'byDisplayValue',
- 'byAltText',
- 'byTitle',
+ 'ByRole',
+ 'ByLabelText',
+ 'ByPlaceholderText',
+ 'ByText',
+ 'ByDisplayValue',
+ 'ByAltText',
+ 'ByTitle',
];
- if (isArray(wrapper) || !wrapper?.find) {
+ if (Array.isArray(wrapper) || !wrapper?.find) {
// eslint-disable-next-line no-console
console.warn(
'[vue-test-utils-helper]: you are trying to extend an object that is not a VueWrapper.',
@@ -74,11 +146,13 @@ export const extendedWrapper = (wrapper) => {
return Object.defineProperties(wrapper, {
findByTestId: {
+ /** @this { Wrapper } */
value(id) {
return this.find(`[data-testid="${id}"]`);
},
},
findAllByTestId: {
+ /** @this { Wrapper } */
value(id) {
return this.findAll(`[data-testid="${id}"]`);
},
@@ -88,6 +162,7 @@ export const extendedWrapper = (wrapper) => {
* with CSS selectors: https://v1.test-utils.vuejs.org/api/wrapper/#findcomponent
*/
findComponentByTestId: {
+ /** @this { Wrapper } */
value(id) {
return this.findComponent(`[data-testid="${id}"]`);
},
@@ -97,6 +172,7 @@ export const extendedWrapper = (wrapper) => {
* with CSS selectors: https://v1.test-utils.vuejs.org/api/wrapper/#findallcomponents
*/
findAllComponentsByTestId: {
+ /** @this { Wrapper } */
value(id) {
return this.findAllComponents(`[data-testid="${id}"]`);
},
@@ -105,13 +181,10 @@ export const extendedWrapper = (wrapper) => {
...AVAILABLE_QUERIES.reduce((accumulator, query) => {
return {
...accumulator,
- [`find${upperFirst(query)}`]: {
+ [`find${query}`]: {
+ /** @this { Wrapper } */
value(text, options = {}) {
- const elements = testingLibrary[`queryAll${upperFirst(query)}`](
- wrapper.element,
- text,
- options,
- );
+ const elements = testingLibrary[`queryAll${query}`](this.element, text, options);
// Element not found, return an `ErrorWrapper`
if (!elements.length) {
@@ -126,13 +199,10 @@ export const extendedWrapper = (wrapper) => {
...AVAILABLE_QUERIES.reduce((accumulator, query) => {
return {
...accumulator,
- [`findAll${upperFirst(query)}`]: {
+ [`findAll${query}`]: {
+ /** @this { Wrapper } */
value(text, options = {}) {
- const elements = testingLibrary[`queryAll${upperFirst(query)}`](
- wrapper.element,
- text,
- options,
- );
+ const elements = testingLibrary[`queryAll${query}`](this.element, text, options);
const wrappers = elements.map((element) => {
const elementWrapper = createWrapperFromElement(element, this.options);
@@ -152,6 +222,5 @@ export const extendedWrapper = (wrapper) => {
});
};
-export const shallowMountExtended = (...args) => extendedWrapper(shallowMount(...args));
-
-export const mountExtended = (...args) => extendedWrapper(mount(...args));
+export const shallowMountExtended = compose(extendedWrapper, shallowMount);
+export const mountExtended = compose(extendedWrapper, mount);
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
index 2f69a2348d9..c137561154d 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
@@ -374,34 +374,34 @@ describe('Vue test utils helpers', () => {
});
});
- describe.each`
- mountExtendedFunction | expectedMountFunction
- ${shallowMountExtended} | ${'shallowMount'}
- ${mountExtended} | ${'mount'}
- `('$mountExtendedFunction', ({ mountExtendedFunction, expectedMountFunction }) => {
- const FakeComponent = jest.fn();
- const options = {
- propsData: {
- foo: 'bar',
- },
- };
-
- beforeEach(() => {
- const mockWrapper = { find: jest.fn() };
- jest.spyOn(vtu, expectedMountFunction).mockImplementation(() => mockWrapper);
+ describe('mount extended functions', () => {
+ // eslint-disable-next-line vue/one-component-per-file
+ const FakeChildComponent = Vue.component('FakeChildComponent', {
+ template: '<div>Bar <div data-testid="fake-id"/></div>',
});
- it(`calls \`${expectedMountFunction}\` with passed arguments`, () => {
- mountExtendedFunction(FakeComponent, options);
-
- expect(vtu[expectedMountFunction]).toHaveBeenCalledWith(FakeComponent, options);
+ // eslint-disable-next-line vue/one-component-per-file
+ const FakeComponent = Vue.component('FakeComponent', {
+ components: {
+ FakeChildComponent,
+ },
+ template: '<div>Foo <fake-child-component data-testid="fake-id" /></div>',
});
- it('returns extended wrapper', () => {
- const result = mountExtendedFunction(FakeComponent, options);
+ describe('mountExtended', () => {
+ it('mounts component and provides extended queries', () => {
+ const wrapper = mountExtended(FakeComponent);
+ expect(wrapper.text()).toBe('Foo Bar');
+ expect(wrapper.findAllByTestId('fake-id').length).toBe(2);
+ });
+ });
- expect(result).toHaveProperty('find');
- expect(result).toHaveProperty('findByTestId');
+ describe('shallowMountExtended', () => {
+ it('shallow mounts component and provides extended queries', () => {
+ const wrapper = shallowMountExtended(FakeComponent);
+ expect(wrapper.text()).toBe('Foo');
+ expect(wrapper.findAllByTestId('fake-id').length).toBe(1);
+ });
});
});
});
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
index 5236f38dc35..ae767f8b3f5 100644
--- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -157,9 +157,9 @@ describe('~/access_tokens/components/access_token_table_app', () => {
href: '/-/profile/personal_access_tokens/1/revoke',
'data-confirm': sprintf(
__(
- 'Are you sure you want to revoke this %{accessTokenType}? This action cannot be undone.',
+ 'Are you sure you want to revoke the %{accessTokenType} "%{tokenName}"? This action cannot be undone.',
),
- { accessTokenType },
+ { accessTokenType, tokenName: 'a' },
),
});
expect(button.props('category')).toBe('tertiary');
diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
index ddeab3e3b62..fca17f948f8 100644
--- a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
+++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
@@ -24,7 +24,6 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
<gl-tab-stub
titlelinkclass=""
>
-
<div
class="gl-mt-3"
>
@@ -38,7 +37,6 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
searchtextoptionlabel="Search for this text"
value=""
/>
-
<review-tab-container-stub
commits=""
emptylisttext="Your search didn't match any commits. Try a different query."
@@ -46,11 +44,9 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
/>
</div>
</gl-tab-stub>
-
<gl-tab-stub
titlelinkclass=""
>
-
<review-tab-container-stub
commits=""
emptylisttext="Commits you select appear here. Go to the first tab and select commits to add to this merge request."
diff --git a/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js b/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
index e519684bbc5..4340699a7ed 100644
--- a/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
+++ b/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
@@ -1,28 +1,46 @@
-import { shallowMount } from '@vue/test-utils';
import { GlAlert } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import AbuseReportApp from '~/admin/abuse_report/components/abuse_report_app.vue';
import ReportHeader from '~/admin/abuse_report/components/report_header.vue';
import UserDetails from '~/admin/abuse_report/components/user_details.vue';
+import ReportDetails from '~/admin/abuse_report/components/report_details.vue';
import ReportedContent from '~/admin/abuse_report/components/reported_content.vue';
-import HistoryItems from '~/admin/abuse_report/components/history_items.vue';
+import ActivityEventsList from '~/admin/abuse_report/components/activity_events_list.vue';
+import ActivityHistoryItem from '~/admin/abuse_report/components/activity_history_item.vue';
import { SUCCESS_ALERT } from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
describe('AbuseReportApp', () => {
let wrapper;
+ const { similarOpenReports } = mockAbuseReport.user;
+
const findAlert = () => wrapper.findComponent(GlAlert);
const findReportHeader = () => wrapper.findComponent(ReportHeader);
const findUserDetails = () => wrapper.findComponent(UserDetails);
- const findReportedContent = () => wrapper.findComponent(ReportedContent);
- const findHistoryItems = () => wrapper.findComponent(HistoryItems);
- const createComponent = (props = {}) => {
- wrapper = shallowMount(AbuseReportApp, {
+ const findReportedContent = () => wrapper.findByTestId('reported-content');
+ const findReportedContentForSimilarReports = () =>
+ wrapper.findAllByTestId('reported-content-similar-open-reports');
+ const firstReportedContentForSimilarReports = () =>
+ findReportedContentForSimilarReports().at(0).findComponent(ReportedContent);
+
+ const findActivityList = () => wrapper.findComponent(ActivityEventsList);
+ const findActivityItem = () => wrapper.findByTestId('activity');
+ const findActivityForSimilarReports = () =>
+ wrapper.findAllByTestId('activity-similar-open-reports');
+ const firstActivityForSimilarReports = () =>
+ findActivityForSimilarReports().at(0).findComponent(ActivityHistoryItem);
+
+ const findReportDetails = () => wrapper.findComponent(ReportDetails);
+
+ const createComponent = (props = {}, provide = {}) => {
+ wrapper = shallowMountExtended(AbuseReportApp, {
propsData: {
abuseReport: mockAbuseReport,
...props,
},
+ provide,
});
};
@@ -64,7 +82,7 @@ describe('AbuseReportApp', () => {
});
});
- describe('ReportHeader', () => {
+ describe('Report header', () => {
it('renders ReportHeader', () => {
expect(findReportHeader().props('user')).toBe(mockAbuseReport.user);
expect(findReportHeader().props('report')).toBe(mockAbuseReport.report);
@@ -83,7 +101,7 @@ describe('AbuseReportApp', () => {
});
});
- describe('UserDetails', () => {
+ describe('User Details', () => {
it('renders UserDetails', () => {
expect(findUserDetails().props('user')).toBe(mockAbuseReport.user);
});
@@ -101,13 +119,47 @@ describe('AbuseReportApp', () => {
});
});
- it('renders ReportedContent', () => {
- expect(findReportedContent().props('report')).toBe(mockAbuseReport.report);
- expect(findReportedContent().props('reporter')).toBe(mockAbuseReport.reporter);
+ describe('Reported Content', () => {
+ it('renders ReportedContent', () => {
+ expect(findReportedContent().props('report')).toBe(mockAbuseReport.report);
+ });
+
+ it('renders similar abuse reports', () => {
+ expect(findReportedContentForSimilarReports()).toHaveLength(similarOpenReports.length);
+ expect(firstReportedContentForSimilarReports().props('report')).toBe(similarOpenReports[0]);
+ });
});
- it('renders HistoryItems', () => {
- expect(findHistoryItems().props('report')).toBe(mockAbuseReport.report);
- expect(findHistoryItems().props('reporter')).toBe(mockAbuseReport.reporter);
+ describe('ReportDetails', () => {
+ describe('when abuseReportLabels feature flag is enabled', () => {
+ it('renders ReportDetails', () => {
+ createComponent({}, { glFeatures: { abuseReportLabels: true } });
+
+ expect(findReportDetails().props('reportId')).toBe(mockAbuseReport.report.globalId);
+ });
+ });
+
+ describe('when abuseReportLabels feature flag is disabled', () => {
+ it('does not render ReportDetails', () => {
+ createComponent({}, { glFeatures: { abuseReportLabels: false } });
+
+ expect(findReportDetails().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Activity', () => {
+ it('renders the activity events list', () => {
+ expect(findActivityList().exists()).toBe(true);
+ });
+
+ it('renders activity item for abuse report', () => {
+ expect(findActivityItem().props('report')).toBe(mockAbuseReport.report);
+ });
+
+ it('renders activity items for similar abuse reports', () => {
+ expect(findActivityForSimilarReports()).toHaveLength(similarOpenReports.length);
+ expect(firstActivityForSimilarReports().props('report')).toBe(similarOpenReports[0]);
+ });
});
});
diff --git a/spec/frontend/admin/abuse_report/components/activity_events_list_spec.js b/spec/frontend/admin/abuse_report/components/activity_events_list_spec.js
new file mode 100644
index 00000000000..cd1120d2db4
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/activity_events_list_spec.js
@@ -0,0 +1,30 @@
+import { shallowMount } from '@vue/test-utils';
+import ActivityEventsList from '~/admin/abuse_report/components/activity_events_list.vue';
+
+describe('ActivityEventsList', () => {
+ let wrapper;
+
+ const mockSlotContent = 'Test slot content';
+
+ const findActivityEventsList = () => wrapper.findComponent(ActivityEventsList);
+
+ const createComponent = () => {
+ wrapper = shallowMount(ActivityEventsList, {
+ slots: {
+ 'history-items': mockSlotContent,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders activity title', () => {
+ expect(findActivityEventsList().text()).toContain('Activity');
+ });
+
+ it('renders history-items slot', () => {
+ expect(findActivityEventsList().text()).toContain(mockSlotContent);
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/history_items_spec.js b/spec/frontend/admin/abuse_report/components/activity_history_item_spec.js
index 86e994fdc57..3f430b0143e 100644
--- a/spec/frontend/admin/abuse_report/components/history_items_spec.js
+++ b/spec/frontend/admin/abuse_report/components/activity_history_item_spec.js
@@ -1,25 +1,23 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { sprintf } from '~/locale';
-import HistoryItems from '~/admin/abuse_report/components/history_items.vue';
+import AcitivityHistoryItem from '~/admin/abuse_report/components/activity_history_item.vue';
import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { HISTORY_ITEMS_I18N } from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
-describe('HistoryItems', () => {
+describe('AcitivityHistoryItem', () => {
let wrapper;
- const { report, reporter } = mockAbuseReport;
+ const { report } = mockAbuseReport;
const findHistoryItem = () => wrapper.findComponent(HistoryItem);
const findTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
const createComponent = (props = {}) => {
- wrapper = shallowMount(HistoryItems, {
+ wrapper = shallowMount(AcitivityHistoryItem, {
propsData: {
report,
- reporter,
...props,
},
stubs: {
@@ -38,8 +36,8 @@ describe('HistoryItems', () => {
describe('rendering the title', () => {
it('renders the reporters name and the category', () => {
- const title = sprintf(HISTORY_ITEMS_I18N.reportedByForCategory, {
- name: reporter.name,
+ const title = sprintf('Reported by %{name} for %{category}.', {
+ name: report.reporter.name,
category: report.category,
});
expect(findHistoryItem().text()).toContain(title);
@@ -47,12 +45,12 @@ describe('HistoryItems', () => {
describe('when the reporter is not defined', () => {
beforeEach(() => {
- createComponent({ reporter: undefined });
+ createComponent({ report: { ...report, reporter: undefined } });
});
it('renders the `No user found` as the reporters name and the category', () => {
- const title = sprintf(HISTORY_ITEMS_I18N.reportedByForCategory, {
- name: HISTORY_ITEMS_I18N.deletedReporter,
+ const title = sprintf('Reported by %{name} for %{category}.', {
+ name: 'No user found',
category: report.category,
});
expect(findHistoryItem().text()).toContain(title);
diff --git a/spec/frontend/admin/abuse_report/components/labels_select_spec.js b/spec/frontend/admin/abuse_report/components/labels_select_spec.js
new file mode 100644
index 00000000000..a22dcc18e10
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/labels_select_spec.js
@@ -0,0 +1,297 @@
+import MockAdapter from 'axios-mock-adapter';
+import { GlButton, GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK, HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import LabelsSelect from '~/admin/abuse_report/components/labels_select.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { stubComponent, RENDER_ALL_SLOTS_TEMPLATE } from 'helpers/stub_component';
+import labelsQuery from '~/admin/abuse_report/components/graphql/abuse_report_labels.query.graphql';
+import DropdownWidget from '~/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue';
+import DropdownValue from '~/sidebar/components/labels/labels_select_widget/dropdown_value.vue';
+import DropdownHeader from '~/sidebar/components/labels/labels_select_widget/dropdown_header.vue';
+import DropdownContentsCreateView from '~/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view.vue';
+import DropdownFooter from '~/sidebar/components/labels/labels_select_widget/dropdown_footer.vue';
+import { createAlert } from '~/alert';
+import { mockLabelsQueryResponse, mockLabel1, mockLabel2 } from '../mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+
+describe('Labels select component', () => {
+ let mock;
+ let wrapper;
+ let fakeApollo;
+
+ const selectedText = () => wrapper.findByTestId('selected-labels').text();
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findEditButton = () => wrapper.findComponent(GlButton);
+ const findDropdown = () => wrapper.findComponent(DropdownWidget);
+ const findDropdownHeader = () => wrapper.findComponent(DropdownHeader);
+ const findDropdownValue = () => wrapper.findComponent(DropdownValue);
+ const findCreateView = () => wrapper.findComponent(DropdownContentsCreateView);
+ const findDropdownFooter = () => wrapper.findComponent(DropdownFooter);
+
+ const labelsQueryHandlerSuccess = jest.fn().mockResolvedValue(mockLabelsQueryResponse);
+ const labelsQueryHandlerFailure = jest.fn().mockRejectedValue(new Error());
+
+ const updatePath = '/admin/abuse_reports/1';
+ const listPath = '/admin/abuse_reports';
+
+ async function openLabelsDropdown() {
+ findEditButton().vm.$emit('click');
+ await waitForPromises();
+ }
+
+ const selectLabel = (label) => {
+ findDropdown().vm.$emit('set-option', label);
+ nextTick();
+ };
+
+ const createComponent = ({ props = {}, labelsQueryHandler = labelsQueryHandlerSuccess } = {}) => {
+ fakeApollo = createMockApollo([[labelsQuery, labelsQueryHandler]]);
+ wrapper = shallowMountExtended(LabelsSelect, {
+ apolloProvider: fakeApollo,
+ propsData: {
+ report: { labels: [] },
+ canEdit: true,
+ ...props,
+ },
+ provide: {
+ updatePath,
+ listPath,
+ },
+ stubs: {
+ GlDropdown,
+ GlDropdownItem,
+ DropdownWidget: stubComponent(DropdownWidget, {
+ template: RENDER_ALL_SLOTS_TEMPLATE,
+ methods: { showDropdown: jest.fn() },
+ }),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ fakeApollo = null;
+ mock.restore();
+ });
+
+ describe('initial load', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays loading icon', () => {
+ expect(findLoadingIcon().exists()).toEqual(true);
+ });
+
+ it('disables edit button', () => {
+ expect(findEditButton().props('disabled')).toEqual(true);
+ });
+
+ describe('after initial load', () => {
+ beforeEach(() => {
+ wrapper.setProps({ report: { labels: [mockLabel1] } });
+ });
+
+ it('does not display loading icon', () => {
+ expect(findLoadingIcon().exists()).toEqual(false);
+ });
+
+ it('enables edit button', () => {
+ expect(findEditButton().props('disabled')).toEqual(false);
+ });
+
+ it('renders fetched DropdownValue with the correct props', () => {
+ const component = findDropdownValue();
+ expect(component.isVisible()).toBe(true);
+ expect(component.props('selectedLabels')).toEqual([mockLabel1]);
+ expect(component.props('labelsFilterBasePath')).toBe(listPath);
+ });
+ });
+ });
+
+ describe('when there are no selected labels', () => {
+ it('displays "None"', () => {
+ createComponent();
+
+ expect(selectedText()).toContain('None');
+ });
+ });
+
+ describe('when there are selected labels', () => {
+ beforeEach(() => {
+ createComponent({ props: { report: { labels: [mockLabel1, mockLabel2] } } });
+
+ mock.onPut(updatePath).reply(HTTP_STATUS_OK, {});
+ jest.spyOn(axios, 'put');
+ });
+
+ it('renders selected labels in DropdownValue', () => {
+ expect(findDropdownValue().isVisible()).toBe(true);
+ expect(findDropdownValue().props('selectedLabels')).toEqual([mockLabel1, mockLabel2]);
+ });
+
+ it('selected labels can be removed', async () => {
+ findDropdownValue().vm.$emit('onLabelRemove', mockLabel1.id);
+ await nextTick();
+
+ expect(findDropdownValue().props('selectedLabels')).toEqual([mockLabel2]);
+ expect(axios.put).toHaveBeenCalledWith(updatePath, {
+ label_ids: [mockLabel2.id],
+ });
+ });
+ });
+
+ describe('when not editing', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not trigger abuse report labels query', () => {
+ expect(labelsQueryHandlerSuccess).not.toHaveBeenCalled();
+ });
+
+ it('does not render the dropdown', () => {
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+
+ describe('when editing', () => {
+ beforeEach(async () => {
+ createComponent();
+ await openLabelsDropdown();
+ });
+
+ it('triggers abuse report labels query', () => {
+ expect(labelsQueryHandlerSuccess).toHaveBeenCalledTimes(1);
+ });
+
+ it('renders dropdown with fetched labels', () => {
+ expect(findDropdown().isVisible()).toBe(true);
+ expect(findDropdown().props('options')).toEqual([mockLabel1, mockLabel2]);
+ });
+
+ it('selects/deselects a label', async () => {
+ await selectLabel(mockLabel1);
+
+ expect(findDropdownValue().props('selectedLabels')).toEqual([mockLabel1]);
+
+ await selectLabel(mockLabel1);
+
+ expect(selectedText()).toContain('None');
+ });
+
+ it('triggers abuse report labels query when search term is set', async () => {
+ findDropdown().vm.$emit('set-search', 'Dos');
+ await waitForPromises();
+
+ expect(labelsQueryHandlerSuccess).toHaveBeenCalledTimes(2);
+ expect(labelsQueryHandlerSuccess).toHaveBeenCalledWith({ searchTerm: 'Dos' });
+ });
+
+ it('does not render DropdownContentsCreateView', () => {
+ expect(findCreateView().exists()).toBe(false);
+ });
+
+ it('renders DropdownFooter', () => {
+ expect(findDropdownFooter().props('footerCreateLabelTitle')).toEqual('Create label');
+ expect(findDropdownFooter().props('footerManageLabelTitle')).toEqual('');
+ });
+
+ describe('when DropdownHeader emits `toggleDropdownContentsCreateView` event', () => {
+ beforeEach(() => {
+ findDropdownHeader().vm.$emit('toggleDropdownContentsCreateView');
+ });
+
+ it('renders DropdownContentsCreateView and removes DropdownFooter', () => {
+ expect(findCreateView().props('workspaceType')).toEqual('abuseReport');
+ expect(findDropdownFooter().exists()).toBe(false);
+ });
+
+ describe('when DropdownContentsCreateView emits `hideCreateView` event', () => {
+ it('removes itself', async () => {
+ findCreateView().vm.$emit('hideCreateView');
+ await nextTick();
+
+ expect(findCreateView().exists()).toBe(false);
+ });
+ });
+
+ describe('when DropdownContentsCreateView emits `labelCreated` event', () => {
+ it('selects created label', async () => {
+ findCreateView().vm.$emit('labelCreated', mockLabel1);
+ await nextTick();
+
+ expect(findDropdownValue().props('selectedLabels')).toEqual([mockLabel1]);
+ });
+ });
+ });
+
+ describe('when DropdownFooter emits `toggleDropdownContentsCreateView` event', () => {
+ it('renders DropdownContentsCreateView', async () => {
+ findDropdownFooter().vm.$emit('toggleDropdownContentsCreateView');
+ await nextTick();
+
+ expect(findCreateView().props('workspaceType')).toEqual('abuseReport');
+ });
+ });
+ });
+
+ describe('after edit', () => {
+ const setup = async (response) => {
+ mock.onPut(updatePath).reply(response, {});
+ jest.spyOn(axios, 'put');
+
+ createComponent();
+ await openLabelsDropdown();
+ await selectLabel(mockLabel1);
+
+ findDropdown().vm.$emit('hide');
+ };
+
+ describe('successful save', () => {
+ it('saves', async () => {
+ await setup(HTTP_STATUS_OK);
+
+ expect(axios.put).toHaveBeenCalledWith(updatePath, {
+ label_ids: [mockLabel1.id],
+ });
+ });
+ });
+
+ describe('unsuccessful save', () => {
+ it('creates an alert', async () => {
+ await setup(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred while updating labels.',
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ });
+ });
+
+ describe('failed abuse report labels query', () => {
+ it('creates an alert', async () => {
+ createComponent({ labelsQueryHandler: labelsQueryHandlerFailure });
+ await openLabelsDropdown();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred while searching for labels, please try again.',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/report_actions_spec.js b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
index 6dd6d0e55c5..0e20630db14 100644
--- a/spec/frontend/admin/abuse_report/components/report_actions_spec.js
+++ b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
@@ -191,31 +191,4 @@ describe('ReportActions', () => {
);
});
});
-
- describe('when moderateUserPath is not present', () => {
- it('sends the request to updatePath', async () => {
- jest.spyOn(axios, 'put');
- axiosMock.onPut(report.updatePath).replyOnce(HTTP_STATUS_OK, {});
-
- const reportWithoutModerateUserPath = { ...report };
- delete reportWithoutModerateUserPath.moderateUserPath;
-
- createComponent({ report: reportWithoutModerateUserPath });
-
- clickActionsButton();
-
- await nextTick();
-
- selectAction(params.user_action);
- selectReason(params.reason);
-
- await nextTick();
-
- submitForm();
-
- await waitForPromises();
-
- expect(axios.put).toHaveBeenCalledWith(report.updatePath, expect.any(Object));
- });
- });
});
diff --git a/spec/frontend/admin/abuse_report/components/report_details_spec.js b/spec/frontend/admin/abuse_report/components/report_details_spec.js
new file mode 100644
index 00000000000..a5c43dcb82b
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/report_details_spec.js
@@ -0,0 +1,74 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import LabelsSelect from '~/admin/abuse_report/components/labels_select.vue';
+import ReportDetails from '~/admin/abuse_report/components/report_details.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import abuseReportQuery from '~/admin/abuse_report/components/graphql/abuse_report.query.graphql';
+import { createAlert } from '~/alert';
+import { mockAbuseReport, mockLabel1, mockReportQueryResponse } from '../mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+
+describe('Report Details', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findLabelsSelect = () => wrapper.findComponent(LabelsSelect);
+
+ const abuseReportQueryHandlerSuccess = jest.fn().mockResolvedValue(mockReportQueryResponse);
+ const abuseReportQueryHandlerFailure = jest.fn().mockRejectedValue(new Error());
+
+ const createComponent = ({ abuseReportQueryHandler = abuseReportQueryHandlerSuccess } = {}) => {
+ fakeApollo = createMockApollo([[abuseReportQuery, abuseReportQueryHandler]]);
+ wrapper = shallowMount(ReportDetails, {
+ apolloProvider: fakeApollo,
+ propsData: {
+ reportId: mockAbuseReport.report.globalId,
+ },
+ });
+ };
+
+ afterEach(() => {
+ fakeApollo = null;
+ });
+
+ describe('successful abuse report query', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('triggers abuse report query', async () => {
+ await waitForPromises();
+
+ expect(abuseReportQueryHandlerSuccess).toHaveBeenCalledWith({
+ id: mockAbuseReport.report.globalId,
+ });
+ });
+
+ it('renders LabelsSelect with the fetched report', async () => {
+ expect(findLabelsSelect().props('report').labels).toEqual([]);
+
+ await waitForPromises();
+
+ expect(findLabelsSelect().props('report').labels).toEqual([mockLabel1]);
+ });
+ });
+
+ describe('failed abuse report query', () => {
+ beforeEach(async () => {
+ createComponent({ abuseReportQueryHandler: abuseReportQueryHandlerFailure });
+
+ await waitForPromises();
+ });
+
+ it('creates an alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred while fetching labels, please try again.',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/report_header_spec.js b/spec/frontend/admin/abuse_report/components/report_header_spec.js
index f22f3af091f..6ec380f0387 100644
--- a/spec/frontend/admin/abuse_report/components/report_header_spec.js
+++ b/spec/frontend/admin/abuse_report/components/report_header_spec.js
@@ -54,37 +54,30 @@ describe('ReportHeader', () => {
});
describe.each`
- status | text | variant | className | badgeIcon
- ${STATUS_OPEN} | ${REPORT_HEADER_I18N[STATUS_OPEN]} | ${'success'} | ${'issuable-status-badge-open'} | ${'issues'}
- ${STATUS_CLOSED} | ${REPORT_HEADER_I18N[STATUS_CLOSED]} | ${'info'} | ${'issuable-status-badge-closed'} | ${'issue-closed'}
- `(
- 'rendering the report $status status badge',
- ({ status, text, variant, className, badgeIcon }) => {
- beforeEach(() => {
- createComponent({ report: { ...report, status } });
- });
-
- it(`indicates the ${status} status`, () => {
- expect(findBadge().text()).toBe(text);
- });
-
- it(`with the ${variant} variant`, () => {
- expect(findBadge().props('variant')).toBe(variant);
- });
-
- it(`with the text '${text}' as 'aria-label'`, () => {
- expect(findBadge().attributes('aria-label')).toBe(text);
- });
-
- it(`contains the ${className} class`, () => {
- expect(findBadge().element.classList).toContain(className);
- });
-
- it(`has an icon with the ${badgeIcon} name`, () => {
- expect(findIcon().props('name')).toBe(badgeIcon);
- });
- },
- );
+ status | text | variant | badgeIcon
+ ${STATUS_OPEN} | ${REPORT_HEADER_I18N[STATUS_OPEN]} | ${'success'} | ${'issues'}
+ ${STATUS_CLOSED} | ${REPORT_HEADER_I18N[STATUS_CLOSED]} | ${'info'} | ${'issue-closed'}
+ `('rendering the report $status status badge', ({ status, text, variant, badgeIcon }) => {
+ beforeEach(() => {
+ createComponent({ report: { ...report, status } });
+ });
+
+ it(`indicates the ${status} status`, () => {
+ expect(findBadge().text()).toBe(text);
+ });
+
+ it(`with the ${variant} variant`, () => {
+ expect(findBadge().props('variant')).toBe(variant);
+ });
+
+ it(`with the text '${text}' as 'aria-label'`, () => {
+ expect(findBadge().attributes('aria-label')).toBe(text);
+ });
+
+ it(`has an icon with the ${badgeIcon} name`, () => {
+ expect(findIcon().props('name')).toBe(badgeIcon);
+ });
+ });
it('renders the actions', () => {
const actionsComponent = findActions();
diff --git a/spec/frontend/admin/abuse_report/components/reported_content_spec.js b/spec/frontend/admin/abuse_report/components/reported_content_spec.js
index 9fc49f08f8c..2f16f5a7af2 100644
--- a/spec/frontend/admin/abuse_report/components/reported_content_spec.js
+++ b/spec/frontend/admin/abuse_report/components/reported_content_spec.js
@@ -14,7 +14,7 @@ const modalId = 'abuse-report-screenshot-modal';
describe('ReportedContent', () => {
let wrapper;
- const { report, reporter } = { ...mockAbuseReport };
+ const { report } = { ...mockAbuseReport };
const findScreenshotButton = () => wrapper.findByTestId('screenshot-button');
const findReportUrlButton = () => wrapper.findByTestId('report-url-button');
@@ -32,7 +32,6 @@ describe('ReportedContent', () => {
wrapper = shallowMountExtended(ReportedContent, {
propsData: {
report,
- reporter,
...props,
},
stubs: {
@@ -167,18 +166,18 @@ describe('ReportedContent', () => {
describe('rendering the card footer', () => {
it('renders the reporters avatar', () => {
- expect(findAvatar().props('src')).toBe(reporter.avatarUrl);
+ expect(findAvatar().props('src')).toBe(report.reporter.avatarUrl);
});
it('renders the users name', () => {
- expect(findCardFooter().text()).toContain(reporter.name);
+ expect(findCardFooter().text()).toContain(report.reporter.name);
});
it('renders a link to the users profile page', () => {
const link = findProfileLink();
- expect(link.attributes('href')).toBe(reporter.path);
- expect(link.text()).toBe(`@${reporter.username}`);
+ expect(link.attributes('href')).toBe(report.reporter.path);
+ expect(link.text()).toBe(`@${report.reporter.username}`);
});
it('renders the time-ago tooltip', () => {
diff --git a/spec/frontend/admin/abuse_report/components/user_details_spec.js b/spec/frontend/admin/abuse_report/components/user_details_spec.js
index ca499fbaa6e..f3d8d5bb610 100644
--- a/spec/frontend/admin/abuse_report/components/user_details_spec.js
+++ b/spec/frontend/admin/abuse_report/components/user_details_spec.js
@@ -18,7 +18,7 @@ describe('UserDetails', () => {
const findLinkFor = (attribute) => findLinkIn(findUserDetail(attribute));
const findTimeIn = (component) => component.findComponent(TimeAgoTooltip).props('time');
const findTimeFor = (attribute) => findTimeIn(findUserDetail(attribute));
- const findOtherReport = (index) => wrapper.findByTestId(`other-report-${index}`);
+ const findPastReport = (index) => wrapper.findByTestId(`past-report-${index}`);
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(UserDetails, {
@@ -38,8 +38,8 @@ describe('UserDetails', () => {
describe('createdAt', () => {
it('renders the users createdAt with the correct label', () => {
- expect(findUserDetailLabel('createdAt')).toBe(USER_DETAILS_I18N.createdAt);
- expect(findTimeFor('createdAt')).toBe(user.createdAt);
+ expect(findUserDetailLabel('created-at')).toBe(USER_DETAILS_I18N.createdAt);
+ expect(findTimeFor('created-at')).toBe(user.createdAt);
});
});
@@ -67,32 +67,34 @@ describe('UserDetails', () => {
describe('creditCard', () => {
it('renders the correct label', () => {
- expect(findUserDetailLabel('creditCard')).toBe(USER_DETAILS_I18N.creditCard);
+ expect(findUserDetailLabel('credit-card-verification')).toBe(USER_DETAILS_I18N.creditCard);
});
it('renders the users name', () => {
- expect(findUserDetail('creditCard').text()).toContain(
+ expect(findUserDetail('credit-card-verification').text()).toContain(
sprintf(USER_DETAILS_I18N.registeredWith, { ...user.creditCard }),
);
- expect(findUserDetail('creditCard').text()).toContain(user.creditCard.name);
+ expect(findUserDetail('credit-card-verification').text()).toContain(user.creditCard.name);
});
describe('similar credit cards', () => {
it('renders the number of similar records', () => {
- expect(findUserDetail('creditCard').text()).toContain(
+ expect(findUserDetail('credit-card-verification').text()).toContain(
sprintf('Card matches %{similarRecordsCount} accounts', { ...user.creditCard }),
);
});
it('renders a link to the matching cards', () => {
- expect(findLinkFor('creditCard').attributes('href')).toBe(user.creditCard.cardMatchesLink);
+ expect(findLinkFor('credit-card-verification').attributes('href')).toBe(
+ user.creditCard.cardMatchesLink,
+ );
- expect(findLinkFor('creditCard').text()).toBe(
+ expect(findLinkFor('credit-card-verification').text()).toBe(
sprintf('%{similarRecordsCount} accounts', { ...user.creditCard }),
);
- expect(findLinkFor('creditCard').text()).toContain(
+ expect(findLinkFor('credit-card-verification').text()).toContain(
user.creditCard.similarRecordsCount.toString(),
);
});
@@ -105,13 +107,13 @@ describe('UserDetails', () => {
});
it('does not render the number of similar records', () => {
- expect(findUserDetail('creditCard').text()).not.toContain(
+ expect(findUserDetail('credit-card-verification').text()).not.toContain(
sprintf('Card matches %{similarRecordsCount} accounts', { ...user.creditCard }),
);
});
it('does not render a link to the matching cards', () => {
- expect(findLinkFor('creditCard').exists()).toBe(false);
+ expect(findLinkFor('credit-card-verification').exists()).toBe(false);
});
});
});
@@ -124,55 +126,55 @@ describe('UserDetails', () => {
});
it('does not render the users creditCard', () => {
- expect(findUserDetail('creditCard').exists()).toBe(false);
+ expect(findUserDetail('credit-card-verification').exists()).toBe(false);
});
});
});
describe('otherReports', () => {
it('renders the correct label', () => {
- expect(findUserDetailLabel('otherReports')).toBe(USER_DETAILS_I18N.otherReports);
+ expect(findUserDetailLabel('past-closed-reports')).toBe(USER_DETAILS_I18N.pastReports);
});
- describe.each(user.otherReports)('renders a line for report %#', (otherReport) => {
- const index = user.otherReports.indexOf(otherReport);
+ describe.each(user.pastClosedReports)('renders a line for report %#', (pastReport) => {
+ const index = user.pastClosedReports.indexOf(pastReport);
it('renders the category', () => {
- expect(findOtherReport(index).text()).toContain(
- sprintf('Reported for %{category}', { ...otherReport }),
+ expect(findPastReport(index).text()).toContain(
+ sprintf('Reported for %{category}', { ...pastReport }),
);
});
it('renders a link to the report', () => {
- expect(findLinkIn(findOtherReport(index)).attributes('href')).toBe(otherReport.reportPath);
+ expect(findLinkIn(findPastReport(index)).attributes('href')).toBe(pastReport.reportPath);
});
it('renders the time it was created', () => {
- expect(findTimeIn(findOtherReport(index))).toBe(otherReport.createdAt);
+ expect(findTimeIn(findPastReport(index))).toBe(pastReport.createdAt);
});
});
describe('when the users otherReports is empty', () => {
beforeEach(() => {
createComponent({
- user: { ...user, otherReports: [] },
+ user: { ...user, pastClosedReports: [] },
});
});
it('does not render the users otherReports', () => {
- expect(findUserDetail('otherReports').exists()).toBe(false);
+ expect(findUserDetail('past-closed-reports').exists()).toBe(false);
});
});
});
describe('normalLocation', () => {
it('renders the correct label', () => {
- expect(findUserDetailLabel('normalLocation')).toBe(USER_DETAILS_I18N.normalLocation);
+ expect(findUserDetailLabel('normal-location')).toBe(USER_DETAILS_I18N.normalLocation);
});
describe('when the users mostUsedIp is blank', () => {
it('renders the users lastSignInIp', () => {
- expect(findUserDetailValue('normalLocation')).toBe(user.lastSignInIp);
+ expect(findUserDetailValue('normal-location')).toBe(user.lastSignInIp);
});
});
@@ -186,23 +188,25 @@ describe('UserDetails', () => {
});
it('renders the users mostUsedIp', () => {
- expect(findUserDetailValue('normalLocation')).toBe(mostUsedIp);
+ expect(findUserDetailValue('normal-location')).toBe(mostUsedIp);
});
});
});
describe('lastSignInIp', () => {
it('renders the users lastSignInIp with the correct label', () => {
- expect(findUserDetailLabel('lastSignInIp')).toBe(USER_DETAILS_I18N.lastSignInIp);
- expect(findUserDetailValue('lastSignInIp')).toBe(user.lastSignInIp);
+ expect(findUserDetailLabel('last-sign-in-ip')).toBe(USER_DETAILS_I18N.lastSignInIp);
+ expect(findUserDetailValue('last-sign-in-ip')).toBe(user.lastSignInIp);
});
});
it.each(['snippets', 'groups', 'notes'])(
'renders the users %s with the correct label',
(attribute) => {
- expect(findUserDetailLabel(attribute)).toBe(USER_DETAILS_I18N[attribute]);
- expect(findUserDetailValue(attribute)).toBe(
+ const testId = `user-${attribute}-count`;
+
+ expect(findUserDetailLabel(testId)).toBe(USER_DETAILS_I18N[attribute]);
+ expect(findUserDetailValue(testId)).toBe(
USER_DETAILS_I18N[`${attribute}Count`](user[`${attribute}Count`]),
);
},
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index 8ff0c7d507a..ee61eabfa66 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -15,7 +15,7 @@ export const mockAbuseReport = {
similarRecordsCount: 2,
cardMatchesLink: '/admin/users/spamuser417/card_match',
},
- otherReports: [
+ pastClosedReports: [
{
category: 'offensive',
createdAt: '2023-02-28T10:09:54.982Z',
@@ -32,14 +32,27 @@ export const mockAbuseReport = {
snippetsCount: 0,
groupsCount: 0,
notesCount: 6,
- },
- reporter: {
- username: 'reporter',
- name: 'R Porter',
- avatarUrl: 'https://www.gravatar.com/avatar/a2579caffc69ea5d7606f9dd9d8504ba?s=80&d=identicon',
- path: '/reporter',
+ similarOpenReports: [
+ {
+ status: 'open',
+ message: 'This is obvious spam',
+ reportedAt: '2023-03-29T09:39:50.502Z',
+ category: 'spam',
+ type: 'issue',
+ content: '',
+ screenshot: null,
+ reporter: {
+ username: 'reporter 2',
+ name: 'Another Reporter',
+ avatarUrl: 'https://www.gravatar.com/avatar/anotherreporter',
+ path: '/reporter-2',
+ },
+ updatePath: '/admin/abuse_reports/28',
+ },
+ ],
},
report: {
+ globalId: 'gid://gitlab/AbuseReport/1',
status: 'open',
message: 'This is obvious spam',
reportedAt: '2023-03-29T09:39:50.502Z',
@@ -52,5 +65,66 @@ export const mockAbuseReport = {
'/uploads/-/system/abuse_report/screenshot/27/Screenshot_2023-03-30_at_16.56.37.png',
updatePath: '/admin/abuse_reports/27',
moderateUserPath: '/admin/abuse_reports/27/moderate_user',
+ reporter: {
+ username: 'reporter',
+ name: 'R Porter',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a2579caffc69ea5d7606f9dd9d8504ba?s=80&d=identicon',
+ path: '/reporter',
+ },
+ },
+};
+
+export const mockLabel1 = {
+ id: 'gid://gitlab/Admin::AbuseReportLabel/1',
+ title: 'Uno',
+ color: '#F0AD4E',
+ textColor: '#FFFFFF',
+ description: null,
+};
+
+export const mockLabel2 = {
+ id: 'gid://gitlab/Admin::AbuseReportLabel/2',
+ title: 'Dos',
+ color: '#F0AD4E',
+ textColor: '#FFFFFF',
+ description: null,
+};
+
+export const mockLabelsQueryResponse = {
+ data: {
+ labels: {
+ nodes: [mockLabel1, mockLabel2],
+ __typename: 'LabelConnection',
+ },
+ },
+};
+
+export const mockReportQueryResponse = {
+ data: {
+ abuseReport: {
+ labels: {
+ nodes: [mockLabel1],
+ __typename: 'LabelConnection',
+ },
+ __typename: 'AbuseReport',
+ },
+ },
+};
+
+export const mockCreateLabelResponse = {
+ data: {
+ labelCreate: {
+ label: {
+ id: 'gid://gitlab/Admin::AbuseReportLabel/1',
+ color: '#ed9121',
+ description: null,
+ title: 'abuse report label',
+ textColor: '#FFFFFF',
+ __typename: 'Label',
+ },
+ errors: [],
+ __typename: 'AbuseReportLabelCreatePayload',
+ },
},
};
diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
index 8482faccca0..7f915dbacb1 100644
--- a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
+++ b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js
@@ -1,3 +1,4 @@
+import { GlLabel } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import AbuseReportRow from '~/admin/abuse_reports/components/abuse_report_row.vue';
@@ -13,6 +14,7 @@ describe('AbuseReportRow', () => {
const findListItem = () => wrapper.findComponent(ListItem);
const findAbuseCategory = () => wrapper.findComponent(AbuseCategory);
+ const findLabels = () => wrapper.findAllComponents(GlLabel);
const findAbuseReportTitle = () => wrapper.findByTestId('abuse-report-title');
const findDisplayedDate = () => wrapper.findByTestId('abuse-report-date');
@@ -95,6 +97,18 @@ describe('AbuseReportRow', () => {
expect(findAbuseCategory().exists()).toBe(true);
});
+ it('renders labels', () => {
+ const labels = findLabels();
+ expect(labels).toHaveLength(2);
+
+ const { color, title } = mockAbuseReports[0].labels[0];
+ expect(labels.at(0).props()).toMatchObject({
+ backgroundColor: color,
+ title,
+ target: `${window.location.href}?${encodeURIComponent('label_name[]')}=${title}`,
+ });
+ });
+
describe('aggregated report', () => {
const mockAggregatedAbuseReport = mockAbuseReports[1];
const { reportedUser, category, count } = mockAggregatedAbuseReport;
diff --git a/spec/frontend/admin/abuse_reports/mock_data.js b/spec/frontend/admin/abuse_reports/mock_data.js
index 33a28a21cca..3101321d02d 100644
--- a/spec/frontend/admin/abuse_reports/mock_data.js
+++ b/spec/frontend/admin/abuse_reports/mock_data.js
@@ -1,3 +1,5 @@
+import { mockLabel1, mockLabel2 } from '../abuse_report/mock_data';
+
export const mockAbuseReports = [
{
category: 'spam',
@@ -7,6 +9,7 @@ export const mockAbuseReports = [
reportedUser: { name: 'Mr. Abuser' },
reportPath: '/admin/abuse_reports/1',
count: 1,
+ labels: [mockLabel1, mockLabel2],
},
{
category: 'phishing',
diff --git a/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap b/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap
index 459a113b6d1..7f068cf9ee9 100644
--- a/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap
+++ b/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap
@@ -10,7 +10,6 @@ exports[`DeleteApplication the modal component form matches the snapshot 1`] = `
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
diff --git a/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap b/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap
index 00f742c3614..e0fca2590e7 100644
--- a/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap
+++ b/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap
@@ -10,7 +10,6 @@ exports[`RemoveAvatar the modal component form matches the snapshot 1`] = `
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
diff --git a/spec/frontend/admin/users/components/associations/__snapshots__/associations_list_item_spec.js.snap b/spec/frontend/admin/users/components/associations/__snapshots__/associations_list_item_spec.js.snap
index 4237685e45c..d8157c6ff20 100644
--- a/spec/frontend/admin/users/components/associations/__snapshots__/associations_list_item_spec.js.snap
+++ b/spec/frontend/admin/users/components/associations/__snapshots__/associations_list_item_spec.js.snap
@@ -1,3 +1,10 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`AssociationsListItem renders interpolated message in a \`li\` element 1`] = `"<li><strong>5</strong> groups</li>"`;
+exports[`AssociationsListItem renders interpolated message in a \`li\` element 1`] = `
+<li>
+ <strong>
+ 5
+ </strong>
+ groups
+</li>
+`;
diff --git a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
index 265569ac0e3..7f853f13363 100644
--- a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap
@@ -10,13 +10,11 @@ exports[`Delete user modal renders modal with form included 1`] = `
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
value="csrf"
/>
-
<gl-form-input-stub
autocomplete="off"
autofocus=""
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index 69755c6142a..b44986ea7de 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -18,7 +18,7 @@ describe('AdminUserActions component', () => {
const findUserActions = (id) => wrapper.findByTestId(`user-actions-${id}`);
const findEditButton = (id = user.id) => findUserActions(id).find('[data-testid="edit"]');
const findActionsDropdown = (id = user.id) =>
- findUserActions(id).find('[data-testid="dropdown-toggle"]');
+ findUserActions(id).find('[data-testid="user-actions-dropdown-toggle"]');
const findDisclosureGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup);
const initComponent = ({ actions = [], showButtonLabels } = {}) => {
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
index 80d3676ffee..84156d6daf3 100644
--- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -7,7 +7,6 @@ exports[`Alert integration settings form default state should match the default
message="Action to take when receiving an alert. %{docsLink}"
/>
</p>
-
<form>
<gl-form-group-stub
class="gl-pl-0"
@@ -16,15 +15,14 @@ exports[`Alert integration settings form default state should match the default
>
<gl-form-checkbox-stub
checked="true"
- data-qa-selector="create_incident_checkbox"
- id="2"
+ data-testid="create-incident-checkbox"
+ id="reference-0"
>
<span>
Create an incident. Incidents are created for each alert triggered.
</span>
</gl-form-checkbox-stub>
</gl-form-group-stub>
-
<gl-form-group-stub
class="col-8 col-md-9 gl-px-6"
label-for="alert-integration-settings-issue-template"
@@ -34,11 +32,9 @@ exports[`Alert integration settings form default state should match the default
>
<label
class="gl-display-inline-flex"
- for="alert-integration-settings-issue-template"
+ for="reference-1"
>
-
Incident template (optional).
-
<gl-link-stub
href="/help/user/project/description_templates#create-an-issue-template"
target="_blank"
@@ -50,14 +46,13 @@ exports[`Alert integration settings form default state should match the default
</span>
</gl-link-stub>
</label>
-
<gl-collapsible-listbox-stub
block="true"
category="primary"
- data-qa-selector="incident_templates_dropdown"
+ data-testid="incident-templates-dropdown"
headertext=""
icon=""
- id="alert-integration-settings-issue-template"
+ id="reference-1"
items="[object Object]"
noresultstext="No results found"
placement="left"
@@ -71,50 +66,45 @@ exports[`Alert integration settings form default state should match the default
variant="default"
/>
</gl-form-group-stub>
-
<gl-form-group-stub
- class="gl-pl-0 gl-mb-5"
+ class="gl-mb-5 gl-pl-0"
labeldescription=""
optionaltext="(optional)"
>
<gl-form-checkbox-stub
- data-qa-selector="enable_email_notification_checkbox"
- id="3"
+ data-testid="enable-email-notification-checkbox"
+ id="reference-2"
>
<span>
Send a single email notification to Owners and Maintainers for new alerts.
</span>
</gl-form-checkbox-stub>
</gl-form-group-stub>
-
<gl-form-group-stub
- class="gl-pl-0 gl-mb-5"
+ class="gl-mb-5 gl-pl-0"
labeldescription=""
optionaltext="(optional)"
>
<gl-form-checkbox-stub
checked="true"
- id="4"
+ id="reference-3"
>
<span>
Automatically close associated incident when a recovery alert notification resolves an alert
</span>
</gl-form-checkbox-stub>
</gl-form-group-stub>
-
<gl-button-stub
buttontextclasses=""
category="primary"
class="js-no-auto-disable"
- data-qa-selector="save_changes_button"
+ data-testid="save-changes-button"
icon=""
size="medium"
type="submit"
variant="confirm"
>
-
Save changes
-
</gl-button-stub>
</form>
</div>
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index a16a03a2fc5..e01dde8f62c 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -57,7 +57,7 @@ describe('AlertsSettingsWrapper', () => {
const findIntegrationsList = () => wrapper.findComponent(IntegrationsList);
const findLoader = () => findIntegrationsList().findComponent(GlLoadingIcon);
const findIntegrations = () => findIntegrationsList().findAll('table tbody tr');
- const findAddIntegrationBtn = () => wrapper.findByTestId('add-integration-btn');
+ const findAddIntegrationBtn = () => wrapper.findByTestId('add-integration-button');
const findAlertsSettingsForm = () => wrapper.findComponent(AlertsSettingsForm);
const findAlert = () => wrapper.findComponent(GlAlert);
diff --git a/spec/frontend/analytics/cycle_analytics/components/__snapshots__/total_time_spec.js.snap b/spec/frontend/analytics/cycle_analytics/components/__snapshots__/total_time_spec.js.snap
index 92927ef16ec..5f712ba41f4 100644
--- a/spec/frontend/analytics/cycle_analytics/components/__snapshots__/total_time_spec.js.snap
+++ b/spec/frontend/analytics/cycle_analytics/components/__snapshots__/total_time_spec.js.snap
@@ -1,28 +1,52 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`TotalTime with a blank object should render -- 1`] = `"<span> -- </span>"`;
+exports[`TotalTime with a blank object should render -- 1`] = `
+<span>
+ --
+</span>
+`;
exports[`TotalTime with a valid time object with {"days": 3, "mins": 47, "seconds": 3} 1`] = `
-"<span>
- 3 <span>days</span></span>"
+<span>
+ 3
+ <span>
+ days
+ </span>
+</span>
`;
exports[`TotalTime with a valid time object with {"hours": 7, "mins": 20, "seconds": 10} 1`] = `
-"<span>
- 7 <span>hrs</span></span>"
+<span>
+ 7
+ <span>
+ hrs
+ </span>
+</span>
`;
exports[`TotalTime with a valid time object with {"hours": 23, "mins": 10} 1`] = `
-"<span>
- 23 <span>hrs</span></span>"
+<span>
+ 23
+ <span>
+ hrs
+ </span>
+</span>
`;
exports[`TotalTime with a valid time object with {"mins": 47, "seconds": 3} 1`] = `
-"<span>
- 47 <span>mins</span></span>"
+<span>
+ 47
+ <span>
+ mins
+ </span>
+</span>
`;
exports[`TotalTime with a valid time object with {"seconds": 35} 1`] = `
-"<span>
- 35 <span>s</span></span>"
+<span>
+ 35
+ <span>
+ s
+ </span>
+</span>
`;
diff --git a/spec/frontend/api/application_settings_api_spec.js b/spec/frontend/api/application_settings_api_spec.js
new file mode 100644
index 00000000000..92a6a159913
--- /dev/null
+++ b/spec/frontend/api/application_settings_api_spec.js
@@ -0,0 +1,45 @@
+import MockAdapter from 'axios-mock-adapter';
+import * as applicationSettingsApi from '~/api/application_settings_api';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+
+describe('~/api/application_settings_api.js', () => {
+ const MOCK_SETTINGS_RES = { test_setting: 'foo' };
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ window.gon = { api_version: 'v7' };
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('getApplicationSettings', () => {
+ it('fetches application settings', () => {
+ const expectedUrl = '/api/v7/application/settings';
+ jest.spyOn(axios, 'get');
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, MOCK_SETTINGS_RES);
+
+ return applicationSettingsApi.getApplicationSettings().then(({ data }) => {
+ expect(data).toEqual(MOCK_SETTINGS_RES);
+ expect(axios.get).toHaveBeenCalledWith(expectedUrl);
+ });
+ });
+ });
+
+ describe('updateApplicationSettings', () => {
+ it('updates application settings', () => {
+ const expectedUrl = '/api/v7/application/settings';
+ const MOCK_REQ = { another_setting: 'bar' };
+ jest.spyOn(axios, 'put');
+ mock.onPut(expectedUrl).reply(HTTP_STATUS_OK, MOCK_SETTINGS_RES);
+
+ return applicationSettingsApi.updateApplicationSettings(MOCK_REQ).then(({ data }) => {
+ expect(data).toEqual(MOCK_SETTINGS_RES);
+ expect(axios.put).toHaveBeenCalledWith(expectedUrl, MOCK_REQ);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
index 58aee76e381..1456830b0eb 100644
--- a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
+++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
@@ -2,63 +2,45 @@
exports[`Keep latest artifact toggle when application keep latest artifact setting is enabled sets correct setting value in toggle with query result 1`] = `
<div>
- <!---->
-
<div
- class="gl-toggle-wrapper gl-display-flex gl-mb-0 flex-grow-1 gl-flex-direction-column"
+ class="gl-display-flex gl-flex-direction-column gl-mb-0 gl-toggle-wrapper"
data-testid="toggle-wrapper"
>
<span
- class="gl-toggle-label-container gl-mb-3"
+ class="gl-flex-shrink-0 gl-mb-3 gl-toggle-label"
+ data-testid="toggle-label"
+ id="reference-0"
+ >
+ Keep artifacts from most recent successful jobs
+ </span>
+ <button
+ aria-checked="true"
+ aria-describedby="toggle-help-2"
+ aria-labelledby="reference-0"
+ class="gl-flex-shrink-0 gl-toggle is-checked"
+ role="switch"
+ type="button"
>
<span
- class="gl-toggle-label"
- data-testid="toggle-label"
- id="toggle-label-4"
+ class="toggle-icon"
>
- Keep artifacts from most recent successful jobs
+ <gl-icon-stub
+ name="mobile-issue-close"
+ size="16"
+ />
</span>
-
- <!---->
- </span>
-
+ </button>
<span
- class="gl-toggle-switch-container"
+ class="gl-help-label"
+ data-testid="toggle-help"
+ id="reference-1"
>
- <!---->
-
- <button
- aria-checked="true"
- aria-describedby="toggle-help-2"
- aria-labelledby="toggle-label-4"
- class="gl-flex-shrink-0 gl-toggle is-checked"
- role="switch"
- type="button"
- >
- <span
- class="toggle-icon"
- >
- <gl-icon-stub
- name="mobile-issue-close"
- size="16"
- />
- </span>
- </button>
-
- <span
- class="gl-help-label"
- data-testid="toggle-help"
- id="toggle-help-2"
- >
-
The latest artifacts created by jobs in the most recent successful pipeline will be stored.
-
- <gl-link-stub
- href="/help/ci/pipelines/job_artifacts"
- >
- Learn more.
- </gl-link-stub>
- </span>
+ <gl-link-stub
+ href="/help/ci/pipelines/job_artifacts"
+ >
+ Learn more.
+ </gl-link-stub>
</span>
</div>
</div>
diff --git a/spec/frontend/avatar_helper_spec.js b/spec/frontend/avatar_helper_spec.js
deleted file mode 100644
index 91bf8e28774..00000000000
--- a/spec/frontend/avatar_helper_spec.js
+++ /dev/null
@@ -1,110 +0,0 @@
-import { TEST_HOST } from 'spec/test_constants';
-import {
- DEFAULT_SIZE_CLASS,
- IDENTICON_BG_COUNT,
- renderAvatar,
- renderIdenticon,
- getIdenticonBackgroundClass,
- getIdenticonTitle,
-} from '~/helpers/avatar_helper';
-import { getFirstCharacterCapitalized } from '~/lib/utils/text_utility';
-
-function matchAll(str) {
- return new RegExp(`^${str}$`);
-}
-
-describe('avatar_helper', () => {
- describe('getIdenticonBackgroundClass', () => {
- it('returns identicon bg class from id that is a number', () => {
- expect(getIdenticonBackgroundClass(1)).toEqual('bg2');
- });
-
- it('returns identicon bg class from id that is a string', () => {
- expect(getIdenticonBackgroundClass('1')).toEqual('bg2');
- });
-
- it('returns identicon bg class from id that is a GraphQL string id', () => {
- expect(getIdenticonBackgroundClass('gid://gitlab/Project/1')).toEqual('bg2');
- });
-
- it('returns identicon bg class from unparsable string', () => {
- expect(getIdenticonBackgroundClass('gid://gitlab/')).toEqual('bg1');
- });
-
- it(`wraps around if id is bigger than ${IDENTICON_BG_COUNT}`, () => {
- expect(getIdenticonBackgroundClass(IDENTICON_BG_COUNT + 4)).toEqual('bg5');
- expect(getIdenticonBackgroundClass(IDENTICON_BG_COUNT * 5 + 6)).toEqual('bg7');
- });
- });
-
- describe('getIdenticonTitle', () => {
- it('returns identicon title from name', () => {
- expect(getIdenticonTitle('Lorem')).toEqual('L');
- expect(getIdenticonTitle('dolar-sit-amit')).toEqual('D');
- expect(getIdenticonTitle('%-with-special-chars')).toEqual('%');
- });
-
- it('returns space if name is falsey', () => {
- expect(getIdenticonTitle('')).toEqual(' ');
- expect(getIdenticonTitle(null)).toEqual(' ');
- });
- });
-
- describe('renderIdenticon', () => {
- it('renders with the first letter as title and bg based on id', () => {
- const entity = {
- id: IDENTICON_BG_COUNT + 3,
- name: 'Xavior',
- };
- const options = {
- sizeClass: 's32',
- };
-
- const result = renderIdenticon(entity, options);
-
- expect(result).toHaveClass(`identicon ${options.sizeClass} bg4`);
- expect(result).toHaveText(matchAll(getFirstCharacterCapitalized(entity.name)));
- });
-
- it('renders with defaults, if no options are given', () => {
- const entity = {
- id: 1,
- name: 'tanuki',
- };
-
- const result = renderIdenticon(entity);
-
- expect(result).toHaveClass(`identicon ${DEFAULT_SIZE_CLASS} bg2`);
- expect(result).toHaveText(matchAll(getFirstCharacterCapitalized(entity.name)));
- });
- });
-
- describe('renderAvatar', () => {
- it('renders an image with the avatarUrl', () => {
- const avatarUrl = `${TEST_HOST}/not-real-assets/test.png`;
-
- const result = renderAvatar({
- avatar_url: avatarUrl,
- });
-
- expect(result).toBeMatchedBy('img');
- expect(result).toHaveAttr('src', avatarUrl);
- expect(result).toHaveClass(DEFAULT_SIZE_CLASS);
- });
-
- it('renders an identicon if no avatarUrl', () => {
- const entity = {
- id: 1,
- name: 'walrus',
- };
- const options = {
- sizeClass: 's16',
- };
-
- const result = renderAvatar(entity, options);
-
- expect(result).toHaveClass(`identicon ${options.sizeClass} bg2`);
- expect(result).toHaveText(matchAll(getFirstCharacterCapitalized(entity.name)));
- });
- });
-});
diff --git a/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
index 7044618fd9e..154347b08b5 100644
--- a/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
+++ b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
@@ -77,7 +77,8 @@ describe('PasteMarkdownTable', () => {
data.getData = jest.fn().mockImplementation((type) => {
if (type === 'text/html') {
return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>Doe</td></table>';
- } else if (type === 'text/plain') {
+ }
+ if (type === 'text/plain') {
return 'First\tLast\nJohn\tDoe\nJane\tDoe';
}
@@ -102,7 +103,8 @@ describe('PasteMarkdownTable', () => {
data.getData = jest.fn().mockImplementation((type) => {
if (type === 'text/html') {
return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>/td></table>';
- } else if (type === 'text/plain') {
+ }
+ if (type === 'text/plain') {
return 'First\tLast\nJohn\tDoe\nJane';
}
diff --git a/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
index 1733c4d4bb4..bd7485e9d80 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Blob Header Editing rendering matches the snapshot 1`] = `
<div
- class="js-file-title file-title-flex-parent"
+ class="file-title-flex-parent js-file-title"
>
<div
- class="gl-display-flex gl-align-items-center gl-w-full"
+ class="gl-align-items-center gl-display-flex gl-w-full"
>
<gl-form-input-stub
class="form-control js-snippet-file-name"
@@ -14,8 +14,6 @@ exports[`Blob Header Editing rendering matches the snapshot 1`] = `
type="text"
value="foo.md"
/>
-
- <!---->
</div>
</div>
`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index 4ae55f34e4c..292a0da2bfe 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -2,9 +2,8 @@
exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
<div
- class="file-header-content d-flex align-items-center lh-100"
+ class="align-items-center d-flex file-header-content lh-100"
>
-
<file-icon-stub
aria-hidden="true"
cssclasses="gl-mr-3"
@@ -12,14 +11,12 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
filename="foo/bar/dummy.md"
size="16"
/>
-
<strong
- class="file-title-name mr-1 js-blob-header-filepath"
+ class="file-title-name js-blob-header-filepath mr-1"
data-qa-selector="file_title_content"
>
foo/bar/dummy.md
</strong>
-
<clipboard-button-stub
category="tertiary"
cssclass="gl-mr-2"
@@ -30,13 +27,10 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
tooltipplacement="top"
variant="default"
/>
-
<small
class="gl-mr-3"
>
a lot
</small>
-
- <!---->
</div>
`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
deleted file mode 100644
index b430dc15557..00000000000
--- a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
+++ /dev/null
@@ -1,34 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
-<div
- class="js-file-title file-title-flex-parent"
->
- <div
- class="gl-display-flex"
- >
- <table-of-contents-stub
- class="gl-pr-2"
- />
-
- <blob-filepath-stub
- blob="[object Object]"
- showpath="true"
- />
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap file-actions"
- >
- <viewer-switcher-stub
- docicon="document"
- value="simple"
- />
-
- <default-actions-stub
- activeviewer="simple"
- rawpath="https://testing.com/flightjs/flight/snippets/51/raw"
- />
- </div>
-</div>
-`;
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
index 47e09bb38bc..922d6a0211b 100644
--- a/spec/frontend/blob/components/blob_header_spec.js
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -1,4 +1,6 @@
+import Vue from 'vue';
import { shallowMount, mount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BlobHeader from '~/blob/components/blob_header.vue';
import DefaultActions from '~/blob/components/blob_header_default_actions.vue';
@@ -10,8 +12,14 @@ import {
SIMPLE_BLOB_VIEWER_TITLE,
} from '~/blob/components/constants';
import TableContents from '~/blob/components/table_contents.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import WebIdeLink from 'ee_else_ce/vue_shared/components/web_ide_link.vue';
+import userInfoQuery from '~/blob/queries/user_info.query.graphql';
+import applicationInfoQuery from '~/blob/queries/application_info.query.graphql';
+import { Blob, userInfoMock, applicationInfoMock } from './mock_data';
-import { Blob } from './mock_data';
+Vue.use(VueApollo);
describe('Blob Header Default Actions', () => {
let wrapper;
@@ -26,14 +34,29 @@ describe('Blob Header Default Actions', () => {
const findBlobFilePath = () => wrapper.findComponent(BlobFilepath);
const findRichTextEditorBtn = () => wrapper.findByLabelText(RICH_BLOB_VIEWER_TITLE);
const findSimpleTextEditorBtn = () => wrapper.findByLabelText(SIMPLE_BLOB_VIEWER_TITLE);
+ const findWebIdeLink = () => wrapper.findComponent(WebIdeLink);
- function createComponent({
+ async function createComponent({
blobProps = {},
options = {},
propsData = {},
mountFn = shallowMount,
} = {}) {
+ const userInfoMockResolver = jest.fn().mockResolvedValue({
+ data: { ...userInfoMock },
+ });
+
+ const applicationInfoMockResolver = jest.fn().mockResolvedValue({
+ data: { ...applicationInfoMock },
+ });
+
+ const fakeApollo = createMockApollo([
+ [userInfoQuery, userInfoMockResolver],
+ [applicationInfoQuery, applicationInfoMockResolver],
+ ]);
+
wrapper = mountFn(BlobHeader, {
+ apolloProvider: fakeApollo,
provide: {
...defaultProvide,
},
@@ -43,12 +66,40 @@ describe('Blob Header Default Actions', () => {
},
...options,
});
+
+ await waitForPromises();
}
describe('rendering', () => {
- it('matches the snapshot', () => {
- createComponent();
- expect(wrapper.element).toMatchSnapshot();
+ describe('WebIdeLink component', () => {
+ it('renders the WebIdeLink component with the correct props', async () => {
+ const { ideEditPath, editBlobPath, gitpodBlobUrl, pipelineEditorPath } = Blob;
+ const showForkSuggestion = false;
+ await createComponent({ propsData: { showForkSuggestion } });
+
+ expect(findWebIdeLink().props()).toMatchObject({
+ showEditButton: true,
+ editUrl: editBlobPath,
+ webIdeUrl: ideEditPath,
+ needsToFork: showForkSuggestion,
+ showPipelineEditorButton: Boolean(pipelineEditorPath),
+ pipelineEditorUrl: pipelineEditorPath,
+ gitpodUrl: gitpodBlobUrl,
+ showGitpodButton: applicationInfoMock.gitpodEnabled,
+ gitpodEnabled: userInfoMock.currentUser.gitpodEnabled,
+ userPreferencesGitpodPath: userInfoMock.currentUser.preferencesGitpodPath,
+ userProfileEnableGitpodPath: userInfoMock.currentUser.profileEnableGitpodPath,
+ });
+ });
+
+ it.each([[{ archived: true }], [{ editBlobPath: null }]])(
+ 'does not render the WebIdeLink component when blob is archived or does not have an edit path',
+ (blobProps) => {
+ createComponent({ blobProps });
+
+ expect(findWebIdeLink().exists()).toBe(false);
+ },
+ );
});
describe('default render', () => {
diff --git a/spec/frontend/blob/components/mock_data.js b/spec/frontend/blob/components/mock_data.js
index 6ecf5091591..7ed526fba97 100644
--- a/spec/frontend/blob/components/mock_data.js
+++ b/spec/frontend/blob/components/mock_data.js
@@ -30,6 +30,10 @@ export const Blob = {
richViewer: {
...RichViewerMock,
},
+ ideEditPath: 'ide/edit',
+ editBlobPath: 'edit/blob',
+ gitpodBlobUrl: 'gitpod/blob/url',
+ pipelineEditorPath: 'pipeline/editor/path',
};
export const BinaryBlob = {
@@ -60,3 +64,14 @@ export const SimpleBlobContentMock = {
export const mockEnvironmentName = 'my.testing.environment';
export const mockEnvironmentPath = 'https://my.testing.environment';
+
+export const userInfoMock = {
+ currentUser: {
+ id: '123',
+ gitpodEnabled: true,
+ preferencesGitpodPath: '/-/profile/preferences#user_gitpod_enabled',
+ profileEnableGitpodPath: '/-/profile?user%5Bgitpod_enabled%5D=true',
+ },
+};
+
+export const applicationInfoMock = { gitpodEnabled: true };
diff --git a/spec/frontend/blob/line_highlighter_spec.js b/spec/frontend/blob/line_highlighter_spec.js
index de39a8f688a..c7a86d6230a 100644
--- a/spec/frontend/blob/line_highlighter_spec.js
+++ b/spec/frontend/blob/line_highlighter_spec.js
@@ -72,6 +72,15 @@ describe('LineHighlighter', () => {
expect(utils.scrollToElement).toHaveBeenCalledWith('#L5', expect.anything());
});
+ it('does not scroll to the first highlighted line when disableScroll is `true`', () => {
+ jest.spyOn(utils, 'scrollToElement');
+ const highlighter = new LineHighlighter();
+ const scrollEnabled = false;
+ highlighter.highlightHash('#L5-25', scrollEnabled);
+
+ expect(utils.scrollToElement).not.toHaveBeenCalled();
+ });
+
it('discards click events', () => {
const clickSpy = jest.fn();
diff --git a/spec/frontend/blob/openapi/index_spec.js b/spec/frontend/blob/openapi/index_spec.js
index 95e86398ab8..c96a021550d 100644
--- a/spec/frontend/blob/openapi/index_spec.js
+++ b/spec/frontend/blob/openapi/index_spec.js
@@ -4,16 +4,16 @@ import { TEST_HOST } from 'helpers/test_constants';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import renderOpenApi from '~/blob/openapi';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import setWindowLocation from 'helpers/set_window_location_helper';
describe('OpenAPI blob viewer', () => {
const id = 'js-openapi-viewer';
const mockEndpoint = 'some/endpoint';
let mock;
- beforeEach(async () => {
+ beforeEach(() => {
setHTMLFixture(`<div id="${id}" data-endpoint="${mockEndpoint}"></div>`);
mock = new MockAdapter(axios).onGet().reply(HTTP_STATUS_OK);
- await renderOpenApi();
});
afterEach(() => {
@@ -21,9 +21,28 @@ describe('OpenAPI blob viewer', () => {
mock.restore();
});
- it('initializes SwaggerUI with the correct configuration', () => {
- expect(document.body.innerHTML).toContain(
- `<iframe src="${TEST_HOST}/-/sandbox/swagger" sandbox="allow-scripts allow-popups allow-forms" frameborder="0" width="100%" height="1000"></iframe>`,
- );
+ describe('without config options', () => {
+ beforeEach(async () => {
+ await renderOpenApi();
+ });
+
+ it('initializes SwaggerUI without config options', () => {
+ expect(document.body.innerHTML).toContain(
+ `<iframe src="${TEST_HOST}/-/sandbox/swagger" sandbox="allow-scripts allow-popups allow-forms" frameborder="0" width="100%" height="1000"></iframe>`,
+ );
+ });
+ });
+
+ describe('with config options', () => {
+ beforeEach(async () => {
+ setWindowLocation('?displayOperationId=true');
+ await renderOpenApi();
+ });
+
+ it('initializes SwaggerUI with the correct config options', () => {
+ expect(document.body.innerHTML).toContain(
+ `<iframe src="${TEST_HOST}/-/sandbox/swagger?displayOperationId=true" sandbox="allow-scripts allow-popups allow-forms" frameborder="0" width="100%" height="1000"></iframe>`,
+ );
+ });
});
});
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 1740676161f..95b5712bab0 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -91,6 +91,7 @@ describe('Board card component', () => {
rootPath: '/',
scopedLabelsAvailable: false,
isEpicBoard,
+ allowSubEpics: isEpicBoard,
issuableType: TYPE_ISSUE,
isGroupBoard,
isApolloBoard: false,
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 167efb94fcc..f0d40af94fe 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -72,6 +72,7 @@ describe('Board card', () => {
issuableType: 'issue',
isGroupBoard: true,
disabled: false,
+ allowSubEpics: false,
isApolloBoard: false,
...provide,
},
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index b17a5589c07..fa18b47cf54 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -87,6 +87,7 @@ describe('BoardsSelector', () => {
isGroupBoard = false,
isProjectBoard = false,
provide = {},
+ props = {},
} = {}) => {
fakeApollo = createMockApollo([
[projectBoardsQuery, projectBoardsQueryHandler],
@@ -100,6 +101,7 @@ describe('BoardsSelector', () => {
apolloProvider: fakeApollo,
propsData: {
throttleDuration,
+ ...props,
},
attachTo: document.body,
provide: {
@@ -307,4 +309,14 @@ describe('BoardsSelector', () => {
});
});
});
+
+ describe('Apollo boards', () => {
+ it('displays loading state of dropdown while current board is being fetched', () => {
+ createComponent({
+ props: { isCurrentBoardLoading: true },
+ provide: { isApolloBoard: true },
+ });
+ expect(findDropdown().props('loading')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
index 5b5b68d5dbe..16ad54f0854 100644
--- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -61,12 +61,7 @@ describe('IssueBoardFilter', () => {
({ isSignedIn }) => {
createComponent({ isSignedIn });
- const tokens = mockTokens(
- fetchLabelsSpy,
- fetchUsersSpy,
- wrapper.vm.fetchMilestones,
- isSignedIn,
- );
+ const tokens = mockTokens(fetchLabelsSpy, fetchUsersSpy, isSignedIn);
expect(findBoardsFilteredSearch().props('tokens')).toEqual(orderBy(tokens, ['title']));
},
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
index 1b526e6fbec..f354067e226 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
@@ -8,6 +8,7 @@ import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.v
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
import { createStore } from '~/boards/stores';
import issueSetTitleMutation from '~/boards/graphql/issue_set_title.mutation.graphql';
+import * as cacheUpdates from '~/boards/graphql/cache_updates';
import updateEpicTitleMutation from '~/sidebar/queries/update_epic_title.mutation.graphql';
import { updateIssueTitleResponse, updateEpicTitleResponse } from '../../mock_data';
@@ -40,6 +41,10 @@ describe('BoardSidebarTitle', () => {
.fn()
.mockResolvedValue(updateEpicTitleResponse);
+ beforeEach(() => {
+ cacheUpdates.setError = jest.fn();
+ });
+
afterEach(() => {
localStorage.clear();
store = null;
@@ -207,8 +212,7 @@ describe('BoardSidebarTitle', () => {
it('collapses sidebar and renders former item title', () => {
expect(findCollapsed().isVisible()).toBe(true);
expect(findTitle().text()).toContain(TEST_ISSUE_B.title);
- expect(storeDispatch).toHaveBeenCalledWith(
- 'setError',
+ expect(cacheUpdates.setError).toHaveBeenCalledWith(
expect.objectContaining({ message: 'An error occurred when updating the title' }),
);
});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 8f57a6eb7da..dfcdb4c05d0 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -827,7 +827,7 @@ export const mockConfidentialToken = {
],
};
-export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn) => [
+export const mockTokens = (fetchLabels, fetchUsers, isSignedIn) => [
{
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
@@ -870,7 +870,8 @@ export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn)
shouldSkipSort: true,
token: MilestoneToken,
unique: true,
- fetchMilestones,
+ fullPath: 'gitlab-org',
+ isProject: false,
},
{
icon: 'issues',
diff --git a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
index 4da56a865d5..ee8031f2475 100644
--- a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
@@ -16,102 +16,82 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
toggletext=""
variant="default"
>
-
<ul
aria-labelledby="dropdown-toggle-btn-25"
class="gl-new-dropdown-contents"
data-testid="disclosure-content"
- id="disclosure-26"
+ id="reference-0"
tabindex="-1"
>
<gl-disclosure-dropdown-item-stub
item="[object Object]"
/>
</ul>
-
</gl-base-dropdown-stub>
-
<b-button-stub
- class="gl-display-block gl-md-display-none! gl-button btn-danger-secondary"
+ class="btn-danger-secondary gl-button gl-display-block gl-md-display-none!"
data-testid="delete-merged-branches-button"
size="md"
tag="button"
type="button"
variant="danger"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Delete merged branches
-
+ Delete merged branches
</span>
</b-button-stub>
-
<div>
<form
action="/namespace/project/-/merged_branches"
method="post"
>
<p>
- You are about to
+ You are about to
<strong>
delete all branches
</strong>
- that were merged into
+ that were merged into
<code>
master
</code>
.
</p>
-
<p>
-
This may include merged branches that are not visible on the current screen.
-
</p>
-
<p>
-
A branch won't be deleted if it is protected or associated with an open merge request.
-
</p>
-
<p>
- This bulk action is
+ This bulk action is
<strong>
permanent and cannot be undone or recovered
</strong>
.
</p>
-
<p>
- Plese type the following to confirm:
+ Plese type the following to confirm:
<code>
delete
</code>
- .
+ .
<b-form-input-stub
aria-labelledby="input-label"
autocomplete="off"
- class="gl-form-input gl-mt-2 gl-form-input-sm"
+ class="gl-form-input gl-form-input-sm gl-mt-2"
debounce="0"
formatter="[Function]"
type="text"
value=""
/>
</p>
-
<input
name="_method"
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
@@ -119,7 +99,7 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
/>
</form>
<div
- class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap gl-m-0 gl-mr-3"
+ class="gl-display-flex gl-flex-direction-row gl-flex-wrap gl-justify-content-end gl-m-0 gl-mr-3"
>
<b-button-stub
class="gl-button"
@@ -129,19 +109,12 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
type="button"
variant="default"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
Cancel
-
</span>
</b-button-stub>
-
<b-button-stub
class="gl-button"
data-testid="delete-merged-branches-confirmation-button"
@@ -151,10 +124,6 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
type="button"
variant="danger"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
diff --git a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
index 2afca66b0c1..81a57653f61 100644
--- a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Branch divergence graph component renders ahead and behind count 1`] = `
<div
- class="divergence-graph px-2 d-none d-md-block"
+ class="d-md-block d-none divergence-graph px-2"
title="10 commits behind main, 10 commits ahead"
>
<graph-bar-stub
@@ -10,11 +10,9 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
maxcommits="100"
position="left"
/>
-
<div
- class="graph-separator float-left mt-1"
+ class="float-left graph-separator mt-1"
/>
-
<graph-bar-stub
count="10"
maxcommits="100"
@@ -25,7 +23,7 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
exports[`Branch divergence graph component renders distance count 1`] = `
<div
- class="divergence-graph px-2 d-none d-md-block"
+ class="d-md-block d-none divergence-graph px-2"
title="More than 900 commits different with main"
>
<graph-bar-stub
diff --git a/spec/frontend/ci/admin/jobs_table/admin_job_table_app_spec.js b/spec/frontend/ci/admin/jobs_table/admin_job_table_app_spec.js
new file mode 100644
index 00000000000..d14b78d2f4d
--- /dev/null
+++ b/spec/frontend/ci/admin/jobs_table/admin_job_table_app_spec.js
@@ -0,0 +1,445 @@
+import { GlLoadingIcon, GlEmptyState, GlAlert, GlIntersectionObserver } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import JobsTableTabs from '~/ci/jobs_page/components/jobs_table_tabs.vue';
+import JobsSkeletonLoader from '~/ci/admin/jobs_table/components/jobs_skeleton_loader.vue';
+import getAllJobsQuery from '~/ci/admin/jobs_table/graphql/queries/get_all_jobs.query.graphql';
+import getAllJobsCount from '~/ci/admin/jobs_table/graphql/queries/get_all_jobs_count.query.graphql';
+import getCancelableJobsQuery from '~/ci/admin/jobs_table/graphql/queries/get_cancelable_jobs_count.query.graphql';
+import AdminJobsTableApp from '~/ci/admin/jobs_table/admin_jobs_table_app.vue';
+import CancelJobs from '~/ci/admin/jobs_table/components/cancel_jobs.vue';
+import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
+import { createAlert } from '~/alert';
+import { TEST_HOST } from 'spec/test_constants';
+import JobsFilteredSearch from '~/ci/common/private/jobs_filtered_search/app.vue';
+import * as urlUtils from '~/lib/utils/url_utility';
+import {
+ JOBS_FETCH_ERROR_MSG,
+ CANCELABLE_JOBS_ERROR_MSG,
+ LOADING_ARIA_LABEL,
+ RAW_TEXT_WARNING_ADMIN,
+ JOBS_COUNT_ERROR_MESSAGE,
+} from '~/ci/admin/jobs_table/constants';
+import { TOKEN_TYPE_JOBS_RUNNER_TYPE } from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ mockAllJobsResponsePaginated,
+ mockCancelableJobsCountResponse,
+ mockAllJobsResponseEmpty,
+ statuses,
+ mockFailedSearchToken,
+ mockAllJobsCountResponse,
+} from 'jest/ci/jobs_mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/alert');
+
+describe('Job table app', () => {
+ let wrapper;
+
+ const successHandler = jest.fn().mockResolvedValue(mockAllJobsResponsePaginated);
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ const cancelHandler = jest.fn().mockResolvedValue(mockCancelableJobsCountResponse);
+ const emptyHandler = jest.fn().mockResolvedValue(mockAllJobsResponseEmpty);
+ const countSuccessHandler = jest.fn().mockResolvedValue(mockAllJobsCountResponse);
+
+ const findSkeletonLoader = () => wrapper.findComponent(JobsSkeletonLoader);
+ const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
+ const findTable = () => wrapper.findComponent(JobsTable);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findTabs = () => wrapper.findComponent(JobsTableTabs);
+ const findCancelJobsButton = () => wrapper.findComponent(CancelJobs);
+ const findFilteredSearch = () => wrapper.findComponent(JobsFilteredSearch);
+
+ const mockSearchTokenRunnerType = {
+ type: TOKEN_TYPE_JOBS_RUNNER_TYPE,
+ value: { data: 'INSTANCE_TYPE', operator: '=' },
+ };
+
+ const triggerInfiniteScroll = () =>
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
+
+ const createMockApolloProvider = (handler, cancelableHandler, countHandler) => {
+ const requestHandlers = [
+ [getAllJobsQuery, handler],
+ [getCancelableJobsQuery, cancelableHandler],
+ [getAllJobsCount, countHandler],
+ ];
+
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = ({
+ handler = successHandler,
+ cancelableHandler = cancelHandler,
+ countHandler = countSuccessHandler,
+ mountFn = shallowMount,
+ data = {},
+ provideOptions = {},
+ } = {}) => {
+ wrapper = mountFn(AdminJobsTableApp, {
+ data() {
+ return {
+ ...data,
+ };
+ },
+ provide: {
+ jobStatuses: statuses,
+ glFeatures: { adminJobsFilterRunnerType: true },
+ ...provideOptions,
+ },
+ apolloProvider: createMockApolloProvider(handler, cancelableHandler, countHandler),
+ });
+ };
+
+ describe('loading state', () => {
+ it('should display skeleton loader when loading', () => {
+ createComponent();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ expect(findTable().exists()).toBe(false);
+ expect(findLoadingSpinner().exists()).toBe(false);
+ });
+
+ it('when switching tabs only the skeleton loader should show', () => {
+ createComponent();
+
+ findTabs().vm.$emit('fetchJobsByStatus', null);
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ expect(findLoadingSpinner().exists()).toBe(false);
+ });
+ });
+
+ describe('loaded state', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('should display the jobs table with data', () => {
+ expect(findTable().exists()).toBe(true);
+ expect(findSkeletonLoader().exists()).toBe(false);
+ expect(findLoadingSpinner().exists()).toBe(false);
+ });
+
+ it('should refetch jobs query on fetchJobsByStatus event', async () => {
+ expect(successHandler).toHaveBeenCalledTimes(1);
+
+ await findTabs().vm.$emit('fetchJobsByStatus');
+
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ });
+
+ it('avoids refetch jobs query when scope has not changed', async () => {
+ expect(successHandler).toHaveBeenCalledTimes(1);
+
+ await findTabs().vm.$emit('fetchJobsByStatus', null);
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ });
+
+ it('should refetch jobs count query when the amount jobs and count do not match', async () => {
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+
+ // after applying filter a new count is fetched
+ findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
+
+ expect(successHandler).toHaveBeenCalledTimes(2);
+
+ // tab is switched to `finished`, no count
+ await findTabs().vm.$emit('fetchJobsByStatus', ['FAILED', 'SUCCESS', 'CANCELED']);
+
+ // tab is switched back to `all`, the old filter count has to be overwritten with new count
+ await findTabs().vm.$emit('fetchJobsByStatus', null);
+
+ expect(successHandler).toHaveBeenCalledTimes(4);
+ });
+
+ describe('when infinite scrolling is triggered', () => {
+ it('does not display a skeleton loader', () => {
+ triggerInfiniteScroll();
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('handles infinite scrolling by calling fetch more', async () => {
+ triggerInfiniteScroll();
+
+ await nextTick();
+
+ const pageSize = 50;
+
+ expect(findLoadingSpinner().exists()).toBe(true);
+ expect(findLoadingSpinner().attributes('aria-label')).toBe(LOADING_ARIA_LABEL);
+
+ await waitForPromises();
+
+ expect(findLoadingSpinner().exists()).toBe(false);
+
+ expect(successHandler).toHaveBeenLastCalledWith({
+ first: pageSize,
+ after: mockAllJobsResponsePaginated.data.jobs.pageInfo.endCursor,
+ });
+ });
+ });
+ });
+
+ describe('empty state', () => {
+ it('should display empty state if there are no jobs and tab scope is null', async () => {
+ createComponent({ handler: emptyHandler, mountFn: mount });
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('should not display empty state if there are jobs and tab scope is not null', async () => {
+ createComponent({ handler: successHandler, mountFn: mount });
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTable().exists()).toBe(true);
+ });
+ });
+
+ describe('error state', () => {
+ it('should show an alert if there is an error fetching the jobs data', async () => {
+ createComponent({ handler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe(JOBS_FETCH_ERROR_MSG);
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('should show an alert if there is an error fetching the jobs count data', async () => {
+ createComponent({ handler: successHandler, countHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe(JOBS_COUNT_ERROR_MESSAGE);
+ });
+
+ it('should show an alert if there is an error fetching the cancelable jobs data', async () => {
+ createComponent({ handler: successHandler, cancelableHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe(CANCELABLE_JOBS_ERROR_MSG);
+ });
+
+ it('jobs table should still load if count query fails', async () => {
+ createComponent({ handler: successHandler, countHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('jobs table should still load if cancel query fails', async () => {
+ createComponent({ handler: successHandler, cancelableHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('jobs count should be zero if count query fails', async () => {
+ createComponent({ handler: successHandler, countHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findTabs().props('allJobsCount')).toBe(0);
+ });
+
+ it('cancel button should be hidden if query fails', async () => {
+ createComponent({ handler: successHandler, cancelableHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(findCancelJobsButton().exists()).toBe(false);
+ });
+ });
+
+ describe('cancel jobs button', () => {
+ it('should display cancel all jobs button', async () => {
+ createComponent({ cancelableHandler: cancelHandler, mountFn: mount });
+
+ await waitForPromises();
+
+ expect(findCancelJobsButton().exists()).toBe(true);
+ });
+
+ it('should not display cancel all jobs button', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findCancelJobsButton().exists()).toBe(false);
+ });
+ });
+
+ describe('filtered search', () => {
+ it('should display filtered search', () => {
+ createComponent();
+
+ expect(findFilteredSearch().exists()).toBe(true);
+ });
+
+ // this test should be updated once BE supports tab and filtered search filtering
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/356210
+ it.each`
+ scope | shouldDisplay
+ ${null} | ${true}
+ ${['FAILED', 'SUCCESS', 'CANCELED']} | ${false}
+ `(
+ 'with tab scope $scope the filtered search displays $shouldDisplay',
+ async ({ scope, shouldDisplay }) => {
+ createComponent();
+
+ await waitForPromises();
+
+ await findTabs().vm.$emit('fetchJobsByStatus', scope);
+
+ expect(findFilteredSearch().exists()).toBe(shouldDisplay);
+ },
+ );
+
+ describe.each`
+ searchTokens | expectedQueryParams
+ ${[]} | ${{ runnerTypes: null, statuses: null }}
+ ${[mockFailedSearchToken]} | ${{ runnerTypes: null, statuses: 'FAILED' }}
+ ${[mockFailedSearchToken, mockSearchTokenRunnerType]} | ${{ runnerTypes: 'INSTANCE_TYPE', statuses: 'FAILED' }}
+ `('when filtering jobs by searchTokens', ({ searchTokens, expectedQueryParams }) => {
+ it(`refetches jobs query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent();
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ expect(successHandler).toHaveBeenNthCalledWith(2, { first: 50, ...expectedQueryParams });
+ });
+
+ it(`refetches jobs count query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent();
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenNthCalledWith(2, expectedQueryParams);
+ });
+ });
+
+ it('shows raw text warning when user inputs raw text', async () => {
+ const expectedWarning = {
+ message: RAW_TEXT_WARNING_ADMIN,
+ type: 'warning',
+ };
+
+ createComponent();
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', ['raw text']);
+
+ expect(createAlert).toHaveBeenCalledWith(expectedWarning);
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+ });
+
+ it('updates URL query string when filtering jobs by status', async () => {
+ createComponent();
+
+ jest.spyOn(urlUtils, 'updateHistory');
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
+
+ expect(urlUtils.updateHistory).toHaveBeenCalledWith({
+ url: `${TEST_HOST}/?statuses=FAILED`,
+ });
+ });
+
+ it('resets query param after clearing tokens', () => {
+ createComponent();
+
+ jest.spyOn(urlUtils, 'updateHistory');
+
+ findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
+
+ expect(successHandler).toHaveBeenCalledWith({
+ first: 50,
+ statuses: 'FAILED',
+ runnerTypes: null,
+ });
+ expect(urlUtils.updateHistory).toHaveBeenCalledWith({
+ url: `${TEST_HOST}/?statuses=FAILED`,
+ });
+
+ findFilteredSearch().vm.$emit('filterJobsBySearch', []);
+
+ expect(urlUtils.updateHistory).toHaveBeenCalledWith({
+ url: `${TEST_HOST}/`,
+ });
+
+ expect(successHandler).toHaveBeenCalledWith({
+ first: 50,
+ statuses: null,
+ runnerTypes: null,
+ });
+ });
+
+ describe('when feature flag `adminJobsFilterRunnerType` is disabled', () => {
+ const provideOptions = { glFeatures: { adminJobsFilterRunnerType: false } };
+
+ describe.each`
+ searchTokens | expectedQueryParams
+ ${[]} | ${{ statuses: null }}
+ ${[mockFailedSearchToken]} | ${{ statuses: 'FAILED' }}
+ ${[mockFailedSearchToken, mockSearchTokenRunnerType]} | ${{ statuses: 'FAILED' }}
+ `('when filtering jobs by searchTokens', ({ searchTokens, expectedQueryParams }) => {
+ it(`refetches jobs query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent({ provideOptions });
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ expect(successHandler).toHaveBeenNthCalledWith(2, { first: 50, ...expectedQueryParams });
+ });
+
+ it(`refetches jobs count query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent({ provideOptions });
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenNthCalledWith(2, expectedQueryParams);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pages/admin/jobs/components/cancel_jobs_modal_spec.js b/spec/frontend/ci/admin/jobs_table/components/cancel_jobs_modal_spec.js
index d90393d8ab3..c3d1d0266f4 100644
--- a/spec/frontend/pages/admin/jobs/components/cancel_jobs_modal_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/components/cancel_jobs_modal_spec.js
@@ -4,7 +4,7 @@ import { GlModal } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import CancelJobsModal from '~/pages/admin/jobs/components/cancel_jobs_modal.vue';
+import CancelJobsModal from '~/ci/admin/jobs_table/components/cancel_jobs_modal.vue';
import { setVueErrorHandler } from '../../../../__helpers__/set_vue_error_handler';
jest.mock('~/lib/utils/url_utility', () => ({
diff --git a/spec/frontend/ci/admin/jobs_table/components/cancel_jobs_spec.js b/spec/frontend/ci/admin/jobs_table/components/cancel_jobs_spec.js
new file mode 100644
index 00000000000..2884e4ed521
--- /dev/null
+++ b/spec/frontend/ci/admin/jobs_table/components/cancel_jobs_spec.js
@@ -0,0 +1,54 @@
+import { GlButton } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { TEST_HOST } from 'helpers/test_constants';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CancelJobs from '~/ci/admin/jobs_table/components/cancel_jobs.vue';
+import CancelJobsModal from '~/ci/admin/jobs_table/components/cancel_jobs_modal.vue';
+import { CANCEL_JOBS_MODAL_ID, CANCEL_BUTTON_TOOLTIP } from '~/ci/admin/jobs_table/constants';
+
+describe('CancelJobs component', () => {
+ let wrapper;
+
+ const findCancelJobs = () => wrapper.findComponent(CancelJobs);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findModal = () => wrapper.findComponent(CancelJobsModal);
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(CancelJobs, {
+ directives: {
+ GlModal: createMockDirective('gl-modal'),
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ propsData: {
+ url: `${TEST_HOST}/cancel_jobs_modal.vue/cancelAll`,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('has correct inputs', () => {
+ expect(findCancelJobs().props().url).toBe(`${TEST_HOST}/cancel_jobs_modal.vue/cancelAll`);
+ });
+
+ it('has correct button variant', () => {
+ expect(findButton().props().variant).toBe('danger');
+ });
+
+ it('checks that button and modal are connected', () => {
+ const buttonModalDirective = getBinding(findButton().element, 'gl-modal');
+ const modalId = findModal().props('modalId');
+
+ expect(buttonModalDirective.value).toBe(CANCEL_JOBS_MODAL_ID);
+ expect(modalId).toBe(CANCEL_JOBS_MODAL_ID);
+ });
+
+ it('checks that tooltip is displayed', () => {
+ const buttonTooltipDirective = getBinding(findButton().element, 'gl-tooltip');
+
+ expect(buttonTooltipDirective.value).toBe(CANCEL_BUTTON_TOOLTIP);
+ });
+});
diff --git a/spec/frontend/pages/admin/jobs/components/table/cells/project_cell_spec.js b/spec/frontend/ci/admin/jobs_table/components/cells/project_cell_spec.js
index 3366d60d9f3..3e391e74394 100644
--- a/spec/frontend/pages/admin/jobs/components/table/cells/project_cell_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/components/cells/project_cell_spec.js
@@ -1,7 +1,7 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import ProjectCell from '~/pages/admin/jobs/components/table/cell/project_cell.vue';
-import { mockAllJobsNodes } from '../../../../../../jobs/mock_data';
+import ProjectCell from '~/ci/admin/jobs_table/components/cells/project_cell.vue';
+import { mockAllJobsNodes } from 'jest/ci/jobs_mock_data';
const mockJob = mockAllJobsNodes[0];
diff --git a/spec/frontend/pages/admin/jobs/components/table/cells/runner_cell_spec.js b/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js
index 2f76ad66dd5..2f1dae71572 100644
--- a/spec/frontend/pages/admin/jobs/components/table/cells/runner_cell_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/components/cells/runner_cell_spec.js
@@ -1,8 +1,8 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import RunnerCell from '~/pages/admin/jobs/components/table/cells/runner_cell.vue';
-import { RUNNER_EMPTY_TEXT } from '~/pages/admin/jobs/components/constants';
-import { allRunnersData } from '../../../../../../ci/runner/mock_data';
+import RunnerCell from '~/ci/admin/jobs_table/components/cells/runner_cell.vue';
+import { RUNNER_EMPTY_TEXT } from '~/ci/admin/jobs_table/constants';
+import { allRunnersData } from 'jest/ci/runner/mock_data';
const mockRunner = allRunnersData.data.runners.nodes[0];
diff --git a/spec/frontend/pages/admin/jobs/components/jobs_skeleton_loader_spec.js b/spec/frontend/ci/admin/jobs_table/components/jobs_skeleton_loader_spec.js
index 03e5cd75420..0d2f5f58121 100644
--- a/spec/frontend/pages/admin/jobs/components/jobs_skeleton_loader_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/components/jobs_skeleton_loader_spec.js
@@ -1,6 +1,6 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import JobsSkeletonLoader from '~/pages/admin/jobs/components/jobs_skeleton_loader.vue';
+import JobsSkeletonLoader from '~/ci/admin/jobs_table/components/jobs_skeleton_loader.vue';
describe('jobs_skeleton_loader.vue', () => {
let wrapper;
diff --git a/spec/frontend/pages/admin/jobs/components/table/graphql/cache_config_spec.js b/spec/frontend/ci/admin/jobs_table/graphql/cache_config_spec.js
index 59e9eda6343..36fbbafac44 100644
--- a/spec/frontend/pages/admin/jobs/components/table/graphql/cache_config_spec.js
+++ b/spec/frontend/ci/admin/jobs_table/graphql/cache_config_spec.js
@@ -1,9 +1,9 @@
-import cacheConfig from '~/pages/admin/jobs/components/table/graphql/cache_config';
+import cacheConfig from '~/ci/admin/jobs_table/graphql/cache_config';
import {
CIJobConnectionExistingCache,
CIJobConnectionIncomingCache,
CIJobConnectionIncomingCacheRunningStatus,
-} from '../../../../../../jobs/mock_data';
+} from 'jest/ci/jobs_mock_data';
const firstLoadArgs = { first: 3, statuses: 'PENDING' };
const runningArgs = { first: 3, statuses: 'RUNNING' };
diff --git a/spec/frontend/ci/artifacts/components/feedback_banner_spec.js b/spec/frontend/ci/artifacts/components/feedback_banner_spec.js
deleted file mode 100644
index 53e0fdac6f6..00000000000
--- a/spec/frontend/ci/artifacts/components/feedback_banner_spec.js
+++ /dev/null
@@ -1,59 +0,0 @@
-import { GlBanner } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import FeedbackBanner from '~/ci/artifacts/components/feedback_banner.vue';
-import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
-import {
- I18N_FEEDBACK_BANNER_TITLE,
- I18N_FEEDBACK_BANNER_BUTTON,
- FEEDBACK_URL,
-} from '~/ci/artifacts/constants';
-
-const mockBannerImagePath = 'banner/image/path';
-
-describe('Artifacts management feedback banner', () => {
- let wrapper;
- let userCalloutDismissSpy;
-
- const findBanner = () => wrapper.findComponent(GlBanner);
-
- const createComponent = ({ shouldShowCallout = true } = {}) => {
- userCalloutDismissSpy = jest.fn();
-
- wrapper = shallowMount(FeedbackBanner, {
- provide: {
- artifactsManagementFeedbackImagePath: mockBannerImagePath,
- },
- stubs: {
- UserCalloutDismisser: makeMockUserCalloutDismisser({
- dismiss: userCalloutDismissSpy,
- shouldShowCallout,
- }),
- },
- });
- };
-
- it('is displayed with the correct props', () => {
- createComponent();
-
- expect(findBanner().props()).toMatchObject({
- title: I18N_FEEDBACK_BANNER_TITLE,
- buttonText: I18N_FEEDBACK_BANNER_BUTTON,
- buttonLink: FEEDBACK_URL,
- svgPath: mockBannerImagePath,
- });
- });
-
- it('dismisses the callout when closed', () => {
- createComponent();
-
- findBanner().vm.$emit('close');
-
- expect(userCalloutDismissSpy).toHaveBeenCalled();
- });
-
- it('is not displayed once it has been dismissed', () => {
- createComponent({ shouldShowCallout: false });
-
- expect(findBanner().exists()).toBe(false);
- });
-});
diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
index e062140246b..1cbb1a714c9 100644
--- a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
@@ -13,7 +13,6 @@ import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue';
-import FeedbackBanner from '~/ci/artifacts/components/feedback_banner.vue';
import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue';
import ArtifactDeleteModal from '~/ci/artifacts/components/artifact_delete_modal.vue';
import ArtifactsBulkDelete from '~/ci/artifacts/components/artifacts_bulk_delete.vue';
@@ -46,8 +45,6 @@ describe('JobArtifactsTable component', () => {
const mockToastShow = jest.fn();
- const findBanner = () => wrapper.findComponent(FeedbackBanner);
-
const findLoadingState = () => wrapper.findComponent(GlLoadingIcon);
const findTable = () => wrapper.findComponent(GlTable);
const findDetailsRows = () => wrapper.findAllComponents(ArtifactsTableRowDetails);
@@ -162,7 +159,6 @@ describe('JobArtifactsTable component', () => {
projectPath: 'project/path',
projectId,
canDestroyArtifacts,
- artifactsManagementFeedbackImagePath: 'banner/image/path',
},
mocks: {
$toast: {
@@ -175,12 +171,6 @@ describe('JobArtifactsTable component', () => {
});
};
- it('renders feedback banner', () => {
- createComponent();
-
- expect(findBanner().exists()).toBe(true);
- });
-
it('when loading, shows a loading state', () => {
createComponent();
@@ -373,6 +363,7 @@ describe('JobArtifactsTable component', () => {
it('is disabled when job has no metadata.gz', async () => {
const jobWithoutMetadata = {
...job,
+ hasArtifacts: true,
artifacts: { nodes: [archiveArtifact] },
};
@@ -389,6 +380,7 @@ describe('JobArtifactsTable component', () => {
it('is disabled when job has no artifacts', async () => {
const jobWithoutArtifacts = {
...job,
+ hasArtifacts: false,
artifacts: { nodes: [] },
};
diff --git a/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js b/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
deleted file mode 100644
index 8990a70d4ef..00000000000
--- a/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
+++ /dev/null
@@ -1,161 +0,0 @@
-import $ from 'jquery';
-import htmlPipelineSchedulesEdit from 'test_fixtures/pipeline_schedules/edit.html';
-import htmlPipelineSchedulesEditWithVariables from 'test_fixtures/pipeline_schedules/edit_with_variables.html';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import VariableList from '~/ci/ci_variable_list/ci_variable_list';
-
-const HIDE_CLASS = 'hide';
-
-describe('VariableList', () => {
- let $wrapper;
- let variableList;
-
- describe('with only key/value inputs', () => {
- describe('with no variables', () => {
- beforeEach(() => {
- setHTMLFixture(htmlPipelineSchedulesEdit);
- $wrapper = $('.js-ci-variable-list-section');
-
- variableList = new VariableList({
- container: $wrapper,
- formField: 'schedule',
- });
- variableList.init();
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('should remove the row when clicking the remove button', () => {
- $wrapper.find('.js-row-remove-button').trigger('click');
-
- expect($wrapper.find('.js-row').length).toBe(0);
- });
-
- it('should add another row when editing the last rows key input', () => {
- const $row = $wrapper.find('.js-row');
- $row.find('.js-ci-variable-input-key').val('foo').trigger('input');
-
- expect($wrapper.find('.js-row').length).toBe(2);
-
- // Check for the correct default in the new row
- const $keyInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-key');
-
- expect($keyInput.val()).toBe('');
- });
-
- it('should add another row when editing the last rows value textarea', () => {
- const $row = $wrapper.find('.js-row');
- $row.find('.js-ci-variable-input-value').val('foo').trigger('input');
-
- expect($wrapper.find('.js-row').length).toBe(2);
-
- // Check for the correct default in the new row
- const $valueInput = $wrapper.find('.js-row:last-child').find('.js-ci-variable-input-key');
-
- expect($valueInput.val()).toBe('');
- });
-
- it('should remove empty row after blurring', () => {
- const $row = $wrapper.find('.js-row');
- $row.find('.js-ci-variable-input-key').val('foo').trigger('input');
-
- expect($wrapper.find('.js-row').length).toBe(2);
-
- $row.find('.js-ci-variable-input-key').val('').trigger('input').trigger('blur');
-
- expect($wrapper.find('.js-row').length).toBe(1);
- });
- });
-
- describe('with persisted variables', () => {
- beforeEach(() => {
- setHTMLFixture(htmlPipelineSchedulesEditWithVariables);
- $wrapper = $('.js-ci-variable-list-section');
-
- variableList = new VariableList({
- container: $wrapper,
- formField: 'schedule',
- });
- variableList.init();
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('should have "Reveal values" button initially when there are already variables', () => {
- expect($wrapper.find('.js-secret-value-reveal-button').text()).toBe('Reveal values');
- });
-
- it('should reveal hidden values', () => {
- const $row = $wrapper.find('.js-row:first-child');
- const $inputValue = $row.find('.js-ci-variable-input-value');
- const $placeholder = $row.find('.js-secret-value-placeholder');
-
- expect($placeholder.hasClass(HIDE_CLASS)).toBe(false);
- expect($inputValue.hasClass(HIDE_CLASS)).toBe(true);
-
- // Reveal values
- $wrapper.find('.js-secret-value-reveal-button').click();
-
- expect($placeholder.hasClass(HIDE_CLASS)).toBe(true);
- expect($inputValue.hasClass(HIDE_CLASS)).toBe(false);
- });
- });
- });
-
- describe('toggleEnableRow method', () => {
- beforeEach(() => {
- setHTMLFixture(htmlPipelineSchedulesEditWithVariables);
- $wrapper = $('.js-ci-variable-list-section');
-
- variableList = new VariableList({
- container: $wrapper,
- formField: 'variables',
- });
- variableList.init();
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('should disable all key inputs', () => {
- expect($wrapper.find('.js-ci-variable-input-key:not([disabled])').length).toBe(3);
-
- variableList.toggleEnableRow(false);
-
- expect($wrapper.find('.js-ci-variable-input-key[disabled]').length).toBe(3);
- });
-
- it('should disable all remove buttons', () => {
- expect($wrapper.find('.js-row-remove-button:not([disabled])').length).toBe(3);
-
- variableList.toggleEnableRow(false);
-
- expect($wrapper.find('.js-row-remove-button[disabled]').length).toBe(3);
- });
-
- it('should enable all remove buttons', () => {
- variableList.toggleEnableRow(false);
-
- expect($wrapper.find('.js-row-remove-button[disabled]').length).toBe(3);
-
- variableList.toggleEnableRow(true);
-
- expect($wrapper.find('.js-row-remove-button:not([disabled])').length).toBe(3);
- });
-
- it('should enable all key inputs', () => {
- variableList.toggleEnableRow(false);
-
- expect($wrapper.find('.js-ci-variable-input-key[disabled]').length).toBe(3);
-
- variableList.toggleEnableRow(true);
-
- expect($wrapper.find('.js-ci-variable-input-key:not([disabled])').length).toBe(3);
- });
- });
-});
diff --git a/spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js b/spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js
deleted file mode 100644
index 3ef5427f288..00000000000
--- a/spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import $ from 'jquery';
-import htmlPipelineSchedulesEdit from 'test_fixtures/pipeline_schedules/edit.html';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import setupNativeFormVariableList from '~/ci/ci_variable_list/native_form_variable_list';
-
-describe('NativeFormVariableList', () => {
- let $wrapper;
-
- beforeEach(() => {
- setHTMLFixture(htmlPipelineSchedulesEdit);
- $wrapper = $('.js-ci-variable-list-section');
-
- setupNativeFormVariableList({
- container: $wrapper,
- formField: 'schedule',
- });
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- describe('onFormSubmit', () => {
- it('should clear out the `name` attribute on the inputs for the last empty row on form submission (avoid BE validation)', () => {
- const $row = $wrapper.find('.js-row');
-
- expect($row.find('.js-ci-variable-input-key').attr('name')).toBe(
- 'schedule[variables_attributes][][key]',
- );
-
- expect($row.find('.js-ci-variable-input-value').attr('name')).toBe(
- 'schedule[variables_attributes][][secret_value]',
- );
-
- $wrapper.closest('form').trigger('trigger-submit');
-
- expect($row.find('.js-ci-variable-input-key').attr('name')).toBe('');
- expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('');
- });
- });
-});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
index 762c9611dac..ab5d914a6a1 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -1,42 +1,90 @@
-import { GlDrawer, GlFormSelect } from '@gitlab/ui';
+import { GlDrawer, GlFormCombobox, GlFormInput, GlFormSelect } from '@gitlab/ui';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
+import { awsTokenList } from '~/ci/ci_variable_list/components/ci_variable_autocomplete_tokens';
import {
ADD_VARIABLE_ACTION,
+ DRAWER_EVENT_LABEL,
+ EDIT_VARIABLE_ACTION,
+ EVENT_ACTION,
variableOptions,
+ projectString,
variableTypes,
} from '~/ci/ci_variable_list/constants';
+import { mockTracking } from 'helpers/tracking_helper';
+import { mockVariablesWithScopes } from '../mocks';
describe('CI Variable Drawer', () => {
let wrapper;
+ let trackingSpy;
+
+ const mockProjectVariable = mockVariablesWithScopes(projectString)[0];
+ const mockProjectVariableFileType = mockVariablesWithScopes(projectString)[1];
+ const mockEnvScope = 'staging';
+ const mockEnvironments = ['*', 'dev', 'staging', 'production'];
+
+ // matches strings that contain at least 8 consecutive characters consisting of only
+ // letters (both uppercase and lowercase), digits, or the specified special characters
+ const maskableRegex = '^[a-zA-Z0-9_+=/@:.~-]{8,}$';
+
+ // matches strings that consist of at least 8 or more non-whitespace characters
+ const maskableRawRegex = '^\\S{8,}$';
const defaultProps = {
areEnvironmentsLoading: false,
- hasEnvScopeQuery: true,
+ areScopedVariablesAvailable: true,
+ environments: mockEnvironments,
+ hideEnvironmentScope: false,
+ selectedVariable: {},
mode: ADD_VARIABLE_ACTION,
};
- const createComponent = ({ mountFn = shallowMountExtended, props = {} } = {}) => {
+ const defaultProvide = {
+ isProtectedByDefault: true,
+ environmentScopeLink: '/help/environments',
+ maskableRawRegex,
+ maskableRegex,
+ };
+
+ const createComponent = ({
+ mountFn = shallowMountExtended,
+ props = {},
+ provide = {},
+ stubs = {},
+ } = {}) => {
wrapper = mountFn(CiVariableDrawer, {
propsData: {
...defaultProps,
...props,
},
provide: {
- environmentScopeLink: '/help/environments',
+ ...defaultProvide,
+ ...provide,
},
+ stubs,
});
};
+ const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-btn');
+ const findDisabledEnvironmentScopeDropdown = () => wrapper.findComponent(GlFormInput);
const findDrawer = () => wrapper.findComponent(GlDrawer);
+ const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
+ const findExpandedCheckbox = () => wrapper.findByTestId('ci-variable-expanded-checkbox');
+ const findKeyField = () => wrapper.findComponent(GlFormCombobox);
+ const findMaskedCheckbox = () => wrapper.findByTestId('ci-variable-masked-checkbox');
+ const findProtectedCheckbox = () => wrapper.findByTestId('ci-variable-protected-checkbox');
+ const findValueField = () => wrapper.findByTestId('ci-variable-value');
+ const findValueLabel = () => wrapper.findByTestId('ci-variable-value-label');
+ const findTitle = () => findDrawer().find('h2');
const findTypeDropdown = () => wrapper.findComponent(GlFormSelect);
describe('validations', () => {
- beforeEach(() => {
- createComponent({ mountFn: mountExtended });
- });
-
describe('type dropdown', () => {
+ beforeEach(() => {
+ createComponent({ mountFn: mountExtended });
+ });
+
it('adds each type option as a dropdown item', () => {
expect(findTypeDropdown().findAll('option')).toHaveLength(variableOptions.length);
@@ -50,20 +98,288 @@ describe('CI Variable Drawer', () => {
variableTypes.envType,
);
});
+
+ it('renders the selected variable type', () => {
+ createComponent({
+ mountFn: mountExtended,
+ props: {
+ areEnvironmentsLoading: true,
+ selectedVariable: mockProjectVariableFileType,
+ },
+ });
+
+ expect(findTypeDropdown().element.value).toBe(variableTypes.fileType);
+ });
+ });
+
+ describe('environment scope dropdown', () => {
+ it('passes correct props to the dropdown', () => {
+ createComponent({
+ props: {
+ areEnvironmentsLoading: true,
+ selectedVariable: { ...mockProjectVariable, environmentScope: mockEnvScope },
+ },
+ stubs: { CiEnvironmentsDropdown },
+ });
+
+ expect(findEnvironmentScopeDropdown().props()).toMatchObject({
+ areEnvironmentsLoading: true,
+ environments: mockEnvironments,
+ selectedEnvironmentScope: mockEnvScope,
+ });
+ });
+
+ it('hides environment scope dropdown when hideEnvironmentScope is true', () => {
+ createComponent({
+ props: { hideEnvironmentScope: true },
+ stubs: { CiEnvironmentsDropdown },
+ });
+
+ expect(findEnvironmentScopeDropdown().exists()).toBe(false);
+ });
+
+ it('disables the environment scope dropdown when areScopedVariablesAvailable is false', () => {
+ createComponent({
+ mountFn: mountExtended,
+ props: { areScopedVariablesAvailable: false },
+ });
+
+ expect(findEnvironmentScopeDropdown().exists()).toBe(false);
+ expect(findDisabledEnvironmentScopeDropdown().attributes('readonly')).toBe('readonly');
+ });
+ });
+
+ describe('protected flag', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('is true by default when isProtectedByDefault is true', () => {
+ expect(findProtectedCheckbox().attributes('checked')).toBeDefined();
+ });
+
+ it('is not checked when isProtectedByDefault is false', () => {
+ createComponent({ provide: { isProtectedByDefault: false } });
+
+ expect(findProtectedCheckbox().attributes('checked')).toBeUndefined();
+ });
+
+ it('inherits value of selected variable when editing', () => {
+ createComponent({
+ props: {
+ selectedVariable: mockProjectVariableFileType,
+ mode: EDIT_VARIABLE_ACTION,
+ },
+ });
+
+ expect(findProtectedCheckbox().attributes('checked')).toBeUndefined();
+ });
+ });
+
+ describe('masked flag', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('is false by default', () => {
+ expect(findMaskedCheckbox().attributes('checked')).toBeUndefined();
+ });
+
+ it('inherits value of selected variable when editing', () => {
+ createComponent({
+ props: {
+ selectedVariable: mockProjectVariableFileType,
+ mode: EDIT_VARIABLE_ACTION,
+ },
+ });
+
+ expect(findMaskedCheckbox().attributes('checked')).toBeDefined();
+ });
+ });
+
+ describe('expanded flag', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('is true by default when adding a variable', () => {
+ expect(findExpandedCheckbox().attributes('checked')).toBeDefined();
+ });
+
+ it('inherits value of selected variable when editing', () => {
+ createComponent({
+ props: {
+ selectedVariable: mockProjectVariableFileType,
+ mode: EDIT_VARIABLE_ACTION,
+ },
+ });
+
+ expect(findExpandedCheckbox().attributes('checked')).toBeUndefined();
+ });
+
+ it("sets the variable's raw value", async () => {
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+ await findExpandedCheckbox().vm.$emit('change');
+ await findConfirmBtn().vm.$emit('click');
+
+ const sentRawValue = wrapper.emitted('add-variable')[0][0].raw;
+ expect(sentRawValue).toBe(!defaultProps.raw);
+ });
+
+ it('shows help text when variable is not expanded (will be evaluated as raw)', async () => {
+ expect(findExpandedCheckbox().attributes('checked')).toBeDefined();
+ expect(findDrawer().text()).not.toContain(
+ 'Variable value will be evaluated as raw string.',
+ );
+
+ await findExpandedCheckbox().vm.$emit('change');
+
+ expect(findExpandedCheckbox().attributes('checked')).toBeUndefined();
+ expect(findDrawer().text()).toContain('Variable value will be evaluated as raw string.');
+ });
+
+ it('shows help text when variable is expanded and contains the $ character', async () => {
+ expect(findDrawer().text()).not.toContain(
+ 'Unselect "Expand variable reference" if you want to use the variable value as a raw string.',
+ );
+
+ await findValueField().vm.$emit('input', '$NEW_VALUE');
+
+ expect(findDrawer().text()).toContain(
+ 'Unselect "Expand variable reference" if you want to use the variable value as a raw string.',
+ );
+ });
+ });
+
+ describe('key', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('prompts AWS tokens as options', () => {
+ expect(findKeyField().props('tokenList')).toBe(awsTokenList);
+ });
+
+ it('cannot submit with empty key', async () => {
+ expect(findConfirmBtn().attributes('disabled')).toBeDefined();
+
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+
+ expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
+ });
+ });
+
+ describe('value', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('can submit empty value', async () => {
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+
+ // value is empty by default
+ expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
+ });
+
+ describe.each`
+ value | canSubmit | trackingErrorProperty
+ ${'secretValue'} | ${true} | ${null}
+ ${'~v@lid:symbols.'} | ${true} | ${null}
+ ${'short'} | ${false} | ${null}
+ ${'multiline\nvalue'} | ${false} | ${'\n'}
+ ${'dollar$ign'} | ${false} | ${'$'}
+ ${'unsupported|char'} | ${false} | ${'|'}
+ `('masking requirements', ({ value, canSubmit, trackingErrorProperty }) => {
+ beforeEach(async () => {
+ createComponent();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+ await findValueField().vm.$emit('input', value);
+ await findMaskedCheckbox().vm.$emit('input', true);
+ });
+
+ it(`${
+ canSubmit ? 'can submit' : 'shows validation errors and disables submit button'
+ } when value is '${value}'`, () => {
+ if (canSubmit) {
+ expect(findValueLabel().attributes('invalid-feedback')).toBe('');
+ expect(findConfirmBtn().attributes('disabled')).toBeUndefined();
+ } else {
+ expect(findValueLabel().attributes('invalid-feedback')).toBe(
+ 'This variable value does not meet the masking requirements.',
+ );
+ expect(findConfirmBtn().attributes('disabled')).toBeDefined();
+ }
+ });
+
+ it(`${
+ trackingErrorProperty ? 'sends the correct' : 'does not send the'
+ } variable validation tracking event when value is '${value}'`, () => {
+ const trackingEventSent = trackingErrorProperty ? 1 : 0;
+ expect(trackingSpy).toHaveBeenCalledTimes(trackingEventSent);
+
+ if (trackingErrorProperty) {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, {
+ label: DRAWER_EVENT_LABEL,
+ property: trackingErrorProperty,
+ });
+ }
+ });
+ });
+
+ it('only sends the tracking event once', async () => {
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ await findKeyField().vm.$emit('input', 'NEW_VARIABLE');
+ await findMaskedCheckbox().vm.$emit('input', true);
+
+ expect(trackingSpy).toHaveBeenCalledTimes(0);
+
+ await findValueField().vm.$emit('input', 'unsupported|char');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+
+ await findValueField().vm.$emit('input', 'dollar$ign');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ });
});
});
describe('drawer events', () => {
- beforeEach(() => {
+ it('emits `close-form` when closing the drawer', async () => {
createComponent();
- });
- it('emits `close-form` when closing the drawer', async () => {
expect(wrapper.emitted('close-form')).toBeUndefined();
await findDrawer().vm.$emit('close');
expect(wrapper.emitted('close-form')).toHaveLength(1);
});
+
+ describe('when adding a variable', () => {
+ beforeEach(() => {
+ createComponent({ stubs: { GlDrawer } });
+ });
+
+ it('title and confirm button renders the correct text', () => {
+ expect(findTitle().text()).toBe('Add Variable');
+ expect(findConfirmBtn().text()).toBe('Add Variable');
+ });
+ });
+
+ describe('when editing a variable', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { mode: EDIT_VARIABLE_ACTION },
+ stubs: { GlDrawer },
+ });
+ });
+
+ it('title and confirm button renders the correct text', () => {
+ expect(findTitle().text()).toBe('Edit Variable');
+ expect(findConfirmBtn().text()).toBe('Edit Variable');
+ });
+ });
});
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
index f5737c61eea..79dd638e2bd 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
@@ -77,6 +77,21 @@ describe('Ci variable table', () => {
selectedVariable: {},
});
});
+
+ it('passes props down correctly to the ci drawer', async () => {
+ createComponent({ featureFlags: { ciVariableDrawer: true } });
+
+ await findCiVariableTable().vm.$emit('set-selected-variable');
+
+ expect(findCiVariableDrawer().props()).toEqual({
+ areEnvironmentsLoading: defaultProps.areEnvironmentsLoading,
+ areScopedVariablesAvailable: defaultProps.areScopedVariablesAvailable,
+ environments: defaultProps.environments,
+ hideEnvironmentScope: defaultProps.hideEnvironmentScope,
+ mode: ADD_VARIABLE_ACTION,
+ selectedVariable: {},
+ });
+ });
});
describe.each`
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
index 39c03a41660..de24c389511 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
@@ -105,9 +105,8 @@ describe('Ci variable table', () => {
index | text
${0} | ${'Key (Click to sort descending)'}
${1} | ${'Value'}
- ${2} | ${'Attributes'}
- ${3} | ${'Environments'}
- ${4} | ${'Actions'}
+ ${2} | ${'Environments'}
+ ${3} | ${'Actions'}
`('renders the $text column', ({ index, text }) => {
expect(findTableColumnText(index)).toEqual(text);
});
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/ci/common/pipelines_table_spec.js
index 950a6b21e16..26dd1a2fcc5 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/ci/common/pipelines_table_spec.js
@@ -4,23 +4,23 @@ import { mount } from '@vue/test-utils';
import fixture from 'test_fixtures/pipelines/pipelines.json';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
-import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
-import PipelineOperations from '~/pipelines/components/pipelines_list/pipeline_operations.vue';
-import PipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_triggerer.vue';
-import PipelineUrl from '~/pipelines/components/pipelines_list/pipeline_url.vue';
-import PipelinesTable from '~/pipelines/components/pipelines_list/pipelines_table.vue';
-import PipelinesTimeago from '~/pipelines/components/pipelines_list/time_ago.vue';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineFailedJobsWidget from '~/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget.vue';
+import PipelineOperations from '~/ci/pipelines_page/components/pipeline_operations.vue';
+import PipelineTriggerer from '~/ci/pipelines_page/components/pipeline_triggerer.vue';
+import PipelineUrl from '~/ci/pipelines_page/components/pipeline_url.vue';
+import PipelinesTable from '~/ci/common/pipelines_table.vue';
+import PipelinesTimeago from '~/ci/pipelines_page/components/time_ago.vue';
import {
PipelineKeyOptions,
BUTTON_TOOLTIP_RETRY,
BUTTON_TOOLTIP_CANCEL,
TRACKING_CATEGORIES,
-} from '~/pipelines/constants';
+} from '~/ci/constants';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
-jest.mock('~/pipelines/event_hub');
+jest.mock('~/ci/event_hub');
describe('Pipelines Table', () => {
let pipeline;
diff --git a/spec/frontend/pipelines/graph_shared/links_layer_spec.js b/spec/frontend/ci/common/private/job_links_layer_spec.js
index 88ba84c395a..c2defc8d770 100644
--- a/spec/frontend/pipelines/graph_shared/links_layer_spec.js
+++ b/spec/frontend/ci/common/private/job_links_layer_spec.js
@@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
-import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
-import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
+import LinksInner from '~/ci/pipeline_details/graph/components/links_inner.vue';
+import LinksLayer from '~/ci/common/private/job_links_layer.vue';
-import { generateResponse } from '../graph/mock_data';
+import { generateResponse } from 'jest/ci/pipeline_details/graph/mock_data';
describe('links layer component', () => {
let wrapper;
diff --git a/spec/frontend/ci/common/private/jobs_filtered_search/jobs_filtered_search_spec.js b/spec/frontend/ci/common/private/jobs_filtered_search/jobs_filtered_search_spec.js
new file mode 100644
index 00000000000..079738557a4
--- /dev/null
+++ b/spec/frontend/ci/common/private/jobs_filtered_search/jobs_filtered_search_spec.js
@@ -0,0 +1,123 @@
+import { GlFilteredSearch } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import {
+ OPERATORS_IS,
+ TOKEN_TITLE_STATUS,
+ TOKEN_TYPE_STATUS,
+ TOKEN_TYPE_JOBS_RUNNER_TYPE,
+ TOKEN_TITLE_JOBS_RUNNER_TYPE,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+import JobsFilteredSearch from '~/ci/common/private/jobs_filtered_search/app.vue';
+import { mockFailedSearchToken } from 'jest/ci/jobs_mock_data';
+
+describe('Jobs filtered search', () => {
+ let wrapper;
+
+ const findFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
+ const getSearchToken = (type) =>
+ findFilteredSearch()
+ .props('availableTokens')
+ .find((token) => token.type === type);
+
+ const findStatusToken = () => getSearchToken('status');
+ const findRunnerTypeToken = () => getSearchToken('jobs-runner-type');
+
+ const createComponent = (props, provideOptions = {}) => {
+ wrapper = shallowMount(JobsFilteredSearch, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ glFeatures: { adminJobsFilterRunnerType: true },
+ ...provideOptions,
+ },
+ });
+ };
+
+ it('displays filtered search', () => {
+ createComponent();
+
+ expect(findFilteredSearch().exists()).toBe(true);
+ });
+
+ it('displays status token', () => {
+ createComponent();
+
+ expect(findStatusToken()).toMatchObject({
+ type: TOKEN_TYPE_STATUS,
+ icon: 'status',
+ title: TOKEN_TITLE_STATUS,
+ unique: true,
+ operators: OPERATORS_IS,
+ });
+ });
+
+ it('displays token for runner type', () => {
+ createComponent();
+
+ expect(findRunnerTypeToken()).toMatchObject({
+ type: TOKEN_TYPE_JOBS_RUNNER_TYPE,
+ title: TOKEN_TITLE_JOBS_RUNNER_TYPE,
+ operators: OPERATORS_IS,
+ });
+ });
+
+ it('emits filter token to parent component', () => {
+ createComponent();
+
+ findFilteredSearch().vm.$emit('submit', mockFailedSearchToken);
+
+ expect(wrapper.emitted('filterJobsBySearch')).toEqual([[mockFailedSearchToken]]);
+ });
+
+ it('filtered search value is empty array when no query string is passed', () => {
+ createComponent();
+
+ expect(findFilteredSearch().props('value')).toEqual([]);
+ });
+
+ describe('with query string passed', () => {
+ it('filtered search returns correct data shape', () => {
+ const tokenStatusesValue = 'SUCCESS';
+ const tokenRunnerTypesValue = 'INSTANCE_VALUE';
+
+ createComponent({
+ queryString: { statuses: tokenStatusesValue, runnerTypes: tokenRunnerTypesValue },
+ });
+
+ expect(findFilteredSearch().props('value')).toEqual([
+ { type: TOKEN_TYPE_STATUS, value: { data: tokenStatusesValue, operator: '=' } },
+ {
+ type: TOKEN_TYPE_JOBS_RUNNER_TYPE,
+ value: { data: tokenRunnerTypesValue, operator: '=' },
+ },
+ ]);
+ });
+ });
+
+ describe('when feature flag `adminJobsFilterRunnerType` is disabled', () => {
+ const provideOptions = { glFeatures: { adminJobsFilterRunnerType: false } };
+
+ it('does not display token for runner type', () => {
+ createComponent(null, provideOptions);
+
+ expect(findRunnerTypeToken()).toBeUndefined();
+ });
+
+ describe('with query string passed', () => {
+ it('filtered search returns only data shape for search token `status` and not for search token `jobs runner type`', () => {
+ const tokenStatusesValue = 'SUCCESS';
+ const tokenRunnerTypesValue = 'INSTANCE_VALUE';
+
+ createComponent(
+ { queryString: { statuses: tokenStatusesValue, runnerTypes: tokenRunnerTypesValue } },
+ provideOptions,
+ );
+
+ expect(findFilteredSearch().props('value')).toEqual([
+ { type: TOKEN_TYPE_STATUS, value: { data: tokenStatusesValue, operator: '=' } },
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/filtered_search/tokens/job_status_token_spec.js b/spec/frontend/ci/common/private/jobs_filtered_search/tokens/job_status_token_spec.js
index 6755b854f01..78a1963d939 100644
--- a/spec/frontend/jobs/components/filtered_search/tokens/job_status_token_spec.js
+++ b/spec/frontend/ci/common/private/jobs_filtered_search/tokens/job_status_token_spec.js
@@ -1,7 +1,7 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
-import JobStatusToken from '~/jobs/components/filtered_search/tokens/job_status_token.vue';
+import JobStatusToken from '~/ci/common/private/jobs_filtered_search/tokens/job_status_token.vue';
import {
TOKEN_TITLE_STATUS,
TOKEN_TYPE_STATUS,
diff --git a/spec/frontend/ci/common/private/jobs_filtered_search/utils_spec.js b/spec/frontend/ci/common/private/jobs_filtered_search/utils_spec.js
new file mode 100644
index 00000000000..8f6d2368bf4
--- /dev/null
+++ b/spec/frontend/ci/common/private/jobs_filtered_search/utils_spec.js
@@ -0,0 +1,22 @@
+import { validateQueryString } from '~/ci/common/private/jobs_filtered_search/utils';
+
+describe('Filtered search utils', () => {
+ describe('validateQueryString', () => {
+ it.each`
+ queryStringObject | expected
+ ${{ statuses: 'SUCCESS' }} | ${{ statuses: 'SUCCESS' }}
+ ${{ statuses: 'failed' }} | ${{ statuses: 'FAILED' }}
+ ${{ runnerTypes: 'instance_type' }} | ${{ runnerTypes: 'INSTANCE_TYPE' }}
+ ${{ runnerTypes: 'wrong_runner_type' }} | ${null}
+ ${{ statuses: 'SUCCESS', runnerTypes: 'instance_type' }} | ${{ statuses: 'SUCCESS', runnerTypes: 'INSTANCE_TYPE' }}
+ ${{ wrong: 'SUCCESS' }} | ${null}
+ ${{ statuses: 'wrong' }} | ${null}
+ ${{ wrong: 'wrong' }} | ${null}
+ `(
+ 'when provided $queryStringObject, the expected result is $expected',
+ ({ queryStringObject, expected }) => {
+ expect(validateQueryString(queryStringObject)).toEqual(expected);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/jobs/components/job/empty_state_spec.js b/spec/frontend/ci/job_details/components/empty_state_spec.js
index 970c2591795..992ed88e81b 100644
--- a/spec/frontend/jobs/components/job/empty_state_spec.js
+++ b/spec/frontend/ci/job_details/components/empty_state_spec.js
@@ -1,7 +1,7 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import EmptyState from '~/jobs/components/job/empty_state.vue';
-import ManualVariablesForm from '~/jobs/components/job/manual_variables_form.vue';
-import { mockFullPath, mockId } from './mock_data';
+import EmptyState from '~/ci/job_details/components/empty_state.vue';
+import ManualVariablesForm from '~/ci/job_details/components/manual_variables_form.vue';
+import { mockFullPath, mockId } from '../mock_data';
describe('Empty State', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/environments_block_spec.js b/spec/frontend/ci/job_details/components/environments_block_spec.js
index ab36f79ea5e..56ae6b44e9a 100644
--- a/spec/frontend/jobs/components/job/environments_block_spec.js
+++ b/spec/frontend/ci/job_details/components/environments_block_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import EnvironmentsBlock from '~/jobs/components/job/environments_block.vue';
+import EnvironmentsBlock from '~/ci/job_details/components/environments_block.vue';
const TEST_CLUSTER_NAME = 'test_cluster';
const TEST_CLUSTER_PATH = 'path/to/test_cluster';
diff --git a/spec/frontend/jobs/components/job/erased_block_spec.js b/spec/frontend/ci/job_details/components/erased_block_spec.js
index aeab676fc7e..7eb856f97f1 100644
--- a/spec/frontend/jobs/components/job/erased_block_spec.js
+++ b/spec/frontend/ci/job_details/components/erased_block_spec.js
@@ -1,6 +1,6 @@
import { GlLink } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import ErasedBlock from '~/jobs/components/job/erased_block.vue';
+import ErasedBlock from '~/ci/job_details/components/erased_block.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
describe('Erased block', () => {
diff --git a/spec/frontend/vue_shared/components/header_ci_component_spec.js b/spec/frontend/ci/job_details/components/job_header_spec.js
index da9bc0f8a2f..6fc55732353 100644
--- a/spec/frontend/vue_shared/components/header_ci_component_spec.js
+++ b/spec/frontend/ci/job_details/components/job_header_spec.js
@@ -2,7 +2,7 @@ import { GlButton, GlAvatarLink, GlTooltip } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
-import HeaderCi from '~/vue_shared/components/header_ci_component.vue';
+import JobHeader from '~/ci/job_details/components/job_header.vue';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
describe('Header CI Component', () => {
@@ -16,6 +16,7 @@ describe('Header CI Component', () => {
text: 'failed',
details_path: 'path',
},
+ name: 'Job build_job',
time: '2017-05-08T14:57:39.781Z',
user: {
id: 1234,
@@ -25,7 +26,7 @@ describe('Header CI Component', () => {
email: 'foo@bar.com',
avatar_url: 'link',
},
- hasSidebarButton: true,
+ shouldRenderTriggeredLabel: true,
};
const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
@@ -33,12 +34,12 @@ describe('Header CI Component', () => {
const findUserLink = () => wrapper.findComponent(GlAvatarLink);
const findSidebarToggleBtn = () => wrapper.findComponent(GlButton);
const findStatusTooltip = () => wrapper.findComponent(GlTooltip);
- const findActionButtons = () => wrapper.findByTestId('ci-header-action-buttons');
- const findHeaderItemText = () => wrapper.findByTestId('ci-header-item-text');
+ const findActionButtons = () => wrapper.findByTestId('job-header-action-buttons');
+ const findJobName = () => wrapper.findByTestId('job-name');
const createComponent = (props, slots) => {
wrapper = extendedWrapper(
- shallowMount(HeaderCi, {
+ shallowMount(JobHeader, {
propsData: {
...defaultProps,
...props,
@@ -50,7 +51,7 @@ describe('Header CI Component', () => {
describe('render', () => {
beforeEach(() => {
- createComponent({ itemName: 'Pipeline' });
+ createComponent();
});
it('should render status badge', () => {
@@ -72,7 +73,7 @@ describe('Header CI Component', () => {
describe('user avatar', () => {
beforeEach(() => {
- createComponent({ itemName: 'Pipeline' });
+ createComponent();
});
it('contains the username', () => {
@@ -93,7 +94,6 @@ describe('Header CI Component', () => {
beforeEach(() => {
createComponent({
- itemName: 'Pipeline',
user: { ...defaultProps.user, status: { message: STATUS_MESSAGE } },
});
});
@@ -108,7 +108,6 @@ describe('Header CI Component', () => {
beforeEach(() => {
createComponent({
- itemName: 'Pipeline',
user: { ...defaultProps.user, id: `gid://gitlab/User/${1}` },
});
});
@@ -125,29 +124,19 @@ describe('Header CI Component', () => {
});
});
- describe('with item id', () => {
+ describe('job name', () => {
beforeEach(() => {
- createComponent({ itemName: 'Pipeline', itemId: '123' });
+ createComponent();
});
- it('should render item name and id', () => {
- expect(findHeaderItemText().text()).toBe('Pipeline #123');
- });
- });
-
- describe('without item id', () => {
- beforeEach(() => {
- createComponent({ itemName: 'Job build_job' });
- });
-
- it('should render item name', () => {
- expect(findHeaderItemText().text()).toBe('Job build_job');
+ it('should render the job name', () => {
+ expect(findJobName().text()).toBe('Job build_job');
});
});
describe('slot', () => {
it('should render header action buttons', () => {
- createComponent({ itemName: 'Job build_job' }, { slots: { default: 'Test Actions' } });
+ createComponent({}, { slots: { default: 'Test Actions' } });
expect(findActionButtons().exists()).toBe(true);
expect(findActionButtons().text()).toBe('Test Actions');
@@ -156,10 +145,10 @@ describe('Header CI Component', () => {
describe('shouldRenderTriggeredLabel', () => {
it('should render created keyword when the shouldRenderTriggeredLabel is false', () => {
- createComponent({ shouldRenderTriggeredLabel: false, itemName: 'Job build_job' });
+ createComponent({ shouldRenderTriggeredLabel: false });
expect(wrapper.text()).toContain('created');
- expect(wrapper.text()).not.toContain('triggered');
+ expect(wrapper.text()).not.toContain('started');
});
});
});
diff --git a/spec/frontend/jobs/components/job/job_log_controllers_spec.js b/spec/frontend/ci/job_details/components/job_log_controllers_spec.js
index 7b6d58f63d1..84c664aca34 100644
--- a/spec/frontend/jobs/components/job/job_log_controllers_spec.js
+++ b/spec/frontend/ci/job_details/components/job_log_controllers_spec.js
@@ -1,10 +1,10 @@
import { GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import JobLogControllers from '~/jobs/components/job/job_log_controllers.vue';
+import JobLogControllers from '~/ci/job_details/components/job_log_controllers.vue';
import HelpPopover from '~/vue_shared/components/help_popover.vue';
import { backoffMockImplementation } from 'helpers/backoff_helper';
import * as commonUtils from '~/lib/utils/common_utils';
-import { mockJobLog } from '../../mock_data';
+import { mockJobLog } from 'jest/ci/jobs_mock_data';
const mockToastShow = jest.fn();
@@ -307,11 +307,9 @@ describe('Job log controllers', () => {
});
it('emits search results', () => {
- const expectedSearchResults = [[[mockJobLog[6].lines[1], mockJobLog[6].lines[2]]]];
-
findJobLogSearch().vm.$emit('submit');
- expect(wrapper.emitted('searchResults')).toEqual(expectedSearchResults);
+ expect(wrapper.emitted('searchResults')).toHaveLength(1);
});
it('clears search results', () => {
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
index 5adedea28a5..e3d5c448338 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import CollapsibleSection from '~/jobs/components/log/collapsible_section.vue';
+import CollapsibleSection from '~/ci/job_details/components/log/collapsible_section.vue';
+import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data';
describe('Job Log Collapsible Section', () => {
@@ -10,6 +11,7 @@ describe('Job Log Collapsible Section', () => {
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
const findCollapsibleLineSvg = () => wrapper.find('.collapsible-line svg');
+ const findLogLineHeader = () => wrapper.findComponent(LogLineHeader);
const createComponent = (props = {}) => {
wrapper = mount(CollapsibleSection, {
@@ -68,4 +70,26 @@ describe('Job Log Collapsible Section', () => {
await nextTick();
expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
});
+
+ describe('with search results', () => {
+ it('passes isHighlighted prop correctly', () => {
+ const mockSearchResults = [
+ {
+ content: [{ text: 'foo' }],
+ lineNumber: 1,
+ offset: 5,
+ section: 'prepare-script',
+ section_header: true,
+ },
+ ];
+
+ createComponent({
+ section: collapsibleSectionOpened,
+ jobLogEndpoint,
+ searchResults: mockSearchResults,
+ });
+
+ expect(findLogLineHeader().props('isHighlighted')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/log/duration_badge_spec.js b/spec/frontend/ci/job_details/components/log/duration_badge_spec.js
index 644d05366a0..0d5f60cefd1 100644
--- a/spec/frontend/jobs/components/log/duration_badge_spec.js
+++ b/spec/frontend/ci/job_details/components/log/duration_badge_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import DurationBadge from '~/jobs/components/log/duration_badge.vue';
+import DurationBadge from '~/ci/job_details/components/log/duration_badge.vue';
describe('Job Log Duration Badge', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/log/line_header_spec.js b/spec/frontend/ci/job_details/components/log/line_header_spec.js
index c02d8c22655..7d1b05346f2 100644
--- a/spec/frontend/jobs/components/log/line_header_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_header_spec.js
@@ -1,14 +1,14 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
-import DurationBadge from '~/jobs/components/log/duration_badge.vue';
-import LineHeader from '~/jobs/components/log/line_header.vue';
-import LineNumber from '~/jobs/components/log/line_number.vue';
+import DurationBadge from '~/ci/job_details/components/log/duration_badge.vue';
+import LineHeader from '~/ci/job_details/components/log/line_header.vue';
+import LineNumber from '~/ci/job_details/components/log/line_number.vue';
describe('Job Log Header Line', () => {
let wrapper;
- const data = {
+ const defaultProps = {
line: {
content: [
{
@@ -22,7 +22,7 @@ describe('Job Log Header Line', () => {
path: '/jashkenas/underscore/-/jobs/335',
};
- const createComponent = (props = {}) => {
+ const createComponent = (props = defaultProps) => {
wrapper = mount(LineHeader, {
propsData: {
...props,
@@ -32,7 +32,7 @@ describe('Job Log Header Line', () => {
describe('line', () => {
beforeEach(() => {
- createComponent(data);
+ createComponent();
});
it('renders the line number component', () => {
@@ -40,17 +40,17 @@ describe('Job Log Header Line', () => {
});
it('renders a span the provided text', () => {
- expect(wrapper.find('span').text()).toBe(data.line.content[0].text);
+ expect(wrapper.find('span').text()).toBe(defaultProps.line.content[0].text);
});
it('renders the provided style as a class attribute', () => {
- expect(wrapper.find('span').classes()).toContain(data.line.content[0].style);
+ expect(wrapper.find('span').classes()).toContain(defaultProps.line.content[0].style);
});
});
describe('when isCloses is true', () => {
beforeEach(() => {
- createComponent({ ...data, isClosed: true });
+ createComponent({ ...defaultProps, isClosed: true });
});
it('sets icon name to be chevron-lg-right', () => {
@@ -60,7 +60,7 @@ describe('Job Log Header Line', () => {
describe('when isCloses is false', () => {
beforeEach(() => {
- createComponent({ ...data, isClosed: false });
+ createComponent({ ...defaultProps, isClosed: false });
});
it('sets icon name to be chevron-lg-down', () => {
@@ -70,7 +70,7 @@ describe('Job Log Header Line', () => {
describe('on click', () => {
beforeEach(() => {
- createComponent(data);
+ createComponent();
});
it('emits toggleLine event', async () => {
@@ -83,7 +83,7 @@ describe('Job Log Header Line', () => {
describe('with duration', () => {
beforeEach(() => {
- createComponent({ ...data, duration: '00:10' });
+ createComponent({ ...defaultProps, duration: '00:10' });
});
it('renders the duration badge', () => {
@@ -96,7 +96,7 @@ describe('Job Log Header Line', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353#L77`);
- createComponent(data);
+ createComponent();
});
it('highlights line', () => {
@@ -108,12 +108,26 @@ describe('Job Log Header Line', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353`);
- createComponent(data);
+ createComponent();
});
it('does not highlight line', () => {
expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
});
});
+
+ describe('search results', () => {
+ it('highlights the job log lines', () => {
+ createComponent({ ...defaultProps, isHighlighted: true });
+
+ expect(wrapper.classes()).toContain('gl-bg-gray-700');
+ });
+
+ it('does not highlight the job log lines', () => {
+ createComponent();
+
+ expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
+ });
+ });
});
});
diff --git a/spec/frontend/jobs/components/log/line_number_spec.js b/spec/frontend/ci/job_details/components/log/line_number_spec.js
index 4130c124a30..d5c1d0fd985 100644
--- a/spec/frontend/jobs/components/log/line_number_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_number_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import LineNumber from '~/jobs/components/log/line_number.vue';
+import LineNumber from '~/ci/job_details/components/log/line_number.vue';
describe('Job Log Line Number', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/ci/job_details/components/log/line_spec.js
index fad7a03beef..b6f3a2b68df 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import Line from '~/jobs/components/log/line.vue';
-import LineNumber from '~/jobs/components/log/line_number.vue';
+import Line from '~/ci/job_details/components/log/line.vue';
+import LineNumber from '~/ci/job_details/components/log/line_number.vue';
import setWindowLocation from 'helpers/set_window_location_helper';
const httpUrl = 'http://example.com';
@@ -182,16 +182,6 @@ describe('Job Log Line', () => {
});
describe('job log search', () => {
- const mockSearchResults = [
- {
- offset: 1533,
- content: [{ text: '$ echo "82.71"', style: 'term-fg-l-green term-bold' }],
- section: 'step-script',
- lineNumber: 20,
- },
- { offset: 1560, content: [{ text: '82.71' }], section: 'step-script', lineNumber: 21 },
- ];
-
it('applies highlight class to search result elements', () => {
createComponent({
line: {
@@ -201,7 +191,7 @@ describe('Job Log Line', () => {
lineNumber: 21,
},
path: '/root/ci-project/-/jobs/1089',
- searchResults: mockSearchResults,
+ isHighlighted: true,
});
expect(wrapper.classes()).toContain('gl-bg-gray-700');
@@ -216,7 +206,6 @@ describe('Job Log Line', () => {
lineNumber: 29,
},
path: '/root/ci-project/-/jobs/1089',
- searchResults: mockSearchResults,
});
expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/ci/job_details/components/log/log_spec.js
index 9407b340950..cc1621b87d6 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/ci/job_details/components/log/log_spec.js
@@ -4,9 +4,9 @@ import Vue from 'vue';
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { scrollToElement } from '~/lib/utils/common_utils';
-import Log from '~/jobs/components/log/log.vue';
-import LogLineHeader from '~/jobs/components/log/line_header.vue';
-import { logLinesParser } from '~/jobs/store/utils';
+import Log from '~/ci/job_details/components/log/log.vue';
+import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
+import { logLinesParser } from '~/ci/job_details/store/utils';
import { jobLog } from './mock_data';
jest.mock('~/lib/utils/common_utils', () => ({
@@ -23,8 +23,11 @@ describe('Job Log', () => {
Vue.use(Vuex);
- const createComponent = () => {
+ const createComponent = (props) => {
wrapper = mount(Log, {
+ propsData: {
+ ...props,
+ },
store,
});
};
@@ -47,6 +50,7 @@ describe('Job Log', () => {
});
const findCollapsibleLine = () => wrapper.findComponent(LogLineHeader);
+ const findAllCollapsibleLines = () => wrapper.findAllComponents(LogLineHeader);
describe('line numbers', () => {
beforeEach(() => {
@@ -131,5 +135,28 @@ describe('Job Log', () => {
expect(wrapper.find('#L6').exists()).toBe(true);
});
});
+
+ describe('with search results', () => {
+ it('passes isHighlighted prop correctly', () => {
+ const mockSearchResults = [
+ {
+ offset: 1002,
+ content: [
+ {
+ text: 'Using Docker executor with image dev.gitlab.org3',
+ },
+ ],
+ section: 'prepare-executor',
+ section_header: true,
+ lineNumber: 2,
+ },
+ ];
+
+ createComponent({ searchResults: mockSearchResults });
+
+ expect(findAllCollapsibleLines().at(0).props('isHighlighted')).toBe(true);
+ expect(findAllCollapsibleLines().at(1).props('isHighlighted')).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/ci/job_details/components/log/mock_data.js
index fa51b92a044..fa51b92a044 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/ci/job_details/components/log/mock_data.js
diff --git a/spec/frontend/jobs/components/job/manual_variables_form_spec.js b/spec/frontend/ci/job_details/components/manual_variables_form_spec.js
index 989fe5c11e9..3391cafb4fc 100644
--- a/spec/frontend/jobs/components/job/manual_variables_form_spec.js
+++ b/spec/frontend/ci/job_details/components/manual_variables_form_spec.js
@@ -6,14 +6,14 @@ import { createAlert } from '~/alert';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { TYPENAME_CI_BUILD } from '~/graphql_shared/constants';
-import { JOB_GRAPHQL_ERRORS } from '~/jobs/constants';
+import { JOB_GRAPHQL_ERRORS } from '~/ci/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import waitForPromises from 'helpers/wait_for_promises';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import ManualVariablesForm from '~/jobs/components/job/manual_variables_form.vue';
-import getJobQuery from '~/jobs/components/job/graphql/queries/get_job.query.graphql';
-import playJobMutation from '~/jobs/components/job/graphql/mutations/job_play_with_variables.mutation.graphql';
-import retryJobMutation from '~/jobs/components/job/graphql/mutations/job_retry_with_variables.mutation.graphql';
+import ManualVariablesForm from '~/ci/job_details/components/manual_variables_form.vue';
+import getJobQuery from '~/ci/job_details/graphql/queries/get_job.query.graphql';
+import playJobMutation from '~/ci/job_details/graphql/mutations/job_play_with_variables.mutation.graphql';
+import retryJobMutation from '~/ci/job_details/graphql/mutations/job_retry_with_variables.mutation.graphql';
import {
mockFullPath,
@@ -22,7 +22,7 @@ import {
mockJobWithVariablesResponse,
mockJobPlayMutationData,
mockJobRetryMutationData,
-} from './mock_data';
+} from '../mock_data';
const localVue = createLocalVue();
jest.mock('~/alert');
diff --git a/spec/frontend/jobs/components/job/artifacts_block_spec.js b/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js
index f9e52a5ae43..1d61bf3243f 100644
--- a/spec/frontend/jobs/components/job/artifacts_block_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/artifacts_block_spec.js
@@ -1,7 +1,7 @@
import { GlPopover } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
-import ArtifactsBlock from '~/jobs/components/job/sidebar/artifacts_block.vue';
+import ArtifactsBlock from '~/ci/job_details/components/sidebar/artifacts_block.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
describe('Artifacts block', () => {
@@ -16,10 +16,10 @@ describe('Artifacts block', () => {
});
const findArtifactRemoveElt = () => wrapper.findByTestId('artifacts-remove-timeline');
- const findJobLockedElt = () => wrapper.findByTestId('job-locked-message');
+ const findJobLockedElt = () => wrapper.findByTestId('artifacts-locked-message-content');
const findKeepBtn = () => wrapper.findByTestId('keep-artifacts');
const findDownloadBtn = () => wrapper.findByTestId('download-artifacts');
- const findBrowseBtn = () => wrapper.findByTestId('browse-artifacts');
+ const findBrowseBtn = () => wrapper.findByTestId('browse-artifacts-button');
const findArtifactsHelpLink = () => wrapper.findByTestId('artifacts-help-link');
const findPopover = () => wrapper.findComponent(GlPopover);
diff --git a/spec/frontend/jobs/components/job/commit_block_spec.js b/spec/frontend/ci/job_details/components/sidebar/commit_block_spec.js
index 1c28b5079d7..e9a848bcd11 100644
--- a/spec/frontend/jobs/components/job/commit_block_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/commit_block_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import CommitBlock from '~/jobs/components/job/sidebar/commit_block.vue';
+import CommitBlock from '~/ci/job_details/components/sidebar/commit_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('Commit block', () => {
diff --git a/spec/frontend/ci/job_details/components/sidebar/external_links_block_spec.js b/spec/frontend/ci/job_details/components/sidebar/external_links_block_spec.js
new file mode 100644
index 00000000000..1f2c448f1c6
--- /dev/null
+++ b/spec/frontend/ci/job_details/components/sidebar/external_links_block_spec.js
@@ -0,0 +1,49 @@
+import { GlLink } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ExternalLinksBlock from '~/ci/job_details/components/sidebar/external_links_block.vue';
+
+describe('External links block', () => {
+ let wrapper;
+
+ const createWrapper = (propsData) => {
+ wrapper = mountExtended(ExternalLinksBlock, {
+ propsData: {
+ ...propsData,
+ },
+ });
+ };
+
+ const findAllLinks = () => wrapper.findAllComponents(GlLink);
+ const findLink = () => findAllLinks().at(0);
+
+ it('renders a list of links', () => {
+ createWrapper({
+ externalLinks: [
+ {
+ label: 'URL 1',
+ url: 'https://url1.example.com/',
+ },
+ {
+ label: 'URL 2',
+ url: 'https://url2.example.com/',
+ },
+ ],
+ });
+
+ expect(findAllLinks()).toHaveLength(2);
+ });
+
+ it('renders a link', () => {
+ createWrapper({
+ externalLinks: [
+ {
+ label: 'Example URL',
+ url: 'https://example.com/',
+ },
+ ],
+ });
+
+ expect(findLink().text()).toBe('Example URL');
+ expect(findLink().attributes('href')).toBe('https://example.com/');
+ });
+});
diff --git a/spec/frontend/jobs/components/job/job_container_item_spec.js b/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js
index 39782130d38..0eabaefd5de 100644
--- a/spec/frontend/jobs/components/job/job_container_item_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/job_container_item_spec.js
@@ -2,9 +2,9 @@ import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
-import JobContainerItem from '~/jobs/components/job/sidebar/job_container_item.vue';
+import JobContainerItem from '~/ci/job_details/components/sidebar/job_container_item.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
-import job from '../../mock_data';
+import job from 'jest/ci/jobs_mock_data';
describe('JobContainerItem', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/job_retry_forward_deployment_modal_spec.js b/spec/frontend/ci/job_details/components/sidebar/job_retry_forward_deployment_modal_spec.js
index a44a13259aa..075bccd57cc 100644
--- a/spec/frontend/jobs/components/job/job_retry_forward_deployment_modal_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/job_retry_forward_deployment_modal_spec.js
@@ -1,9 +1,8 @@
import { GlLink, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import JobRetryForwardDeploymentModal from '~/jobs/components/job/sidebar/job_retry_forward_deployment_modal.vue';
-import { JOB_RETRY_FORWARD_DEPLOYMENT_MODAL } from '~/jobs/constants';
-import createStore from '~/jobs/store';
-import job from '../../mock_data';
+import JobRetryForwardDeploymentModal from '~/ci/job_details/components/sidebar/job_retry_forward_deployment_modal.vue';
+import createStore from '~/ci/job_details/store';
+import job from 'jest/ci/jobs_mock_data';
describe('Job Retry Forward Deployment Modal', () => {
let store;
@@ -32,9 +31,11 @@ describe('Job Retry Forward Deployment Modal', () => {
describe('Modal configuration', () => {
it('should display the correct messages', () => {
const modal = findModal();
- expect(modal.attributes('title')).toMatch(JOB_RETRY_FORWARD_DEPLOYMENT_MODAL.title);
- expect(modal.text()).toMatch(JOB_RETRY_FORWARD_DEPLOYMENT_MODAL.info);
- expect(modal.text()).toMatch(JOB_RETRY_FORWARD_DEPLOYMENT_MODAL.areYouSure);
+ expect(modal.attributes('title')).toMatch('Are you sure you want to retry this job?');
+ expect(modal.text()).toMatch(
+ "You're about to retry a job that failed because it attempted to deploy code that is older than the latest deployment. Retrying this job could result in overwriting the environment with the older source code.",
+ );
+ expect(modal.text()).toMatch('Are you sure you want to proceed?');
});
});
@@ -49,7 +50,7 @@ describe('Job Retry Forward Deployment Modal', () => {
createWrapper({ provide: { retryOutdatedJobDocsUrl } });
expect(findLink().attributes('href')).toBe(retryOutdatedJobDocsUrl);
- expect(findLink().text()).toMatch(JOB_RETRY_FORWARD_DEPLOYMENT_MODAL.moreInfo);
+ expect(findLink().text()).toMatch('More information');
});
});
diff --git a/spec/frontend/jobs/components/job/job_sidebar_retry_button_spec.js b/spec/frontend/ci/job_details/components/sidebar/job_sidebar_retry_button_spec.js
index 8a63bfdc3d6..8fdf6b72ee1 100644
--- a/spec/frontend/jobs/components/job/job_sidebar_retry_button_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/job_sidebar_retry_button_spec.js
@@ -1,7 +1,7 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import JobsSidebarRetryButton from '~/jobs/components/job/sidebar/job_sidebar_retry_button.vue';
-import createStore from '~/jobs/store';
-import job from '../../mock_data';
+import JobsSidebarRetryButton from '~/ci/job_details/components/sidebar/job_sidebar_retry_button.vue';
+import createStore from '~/ci/job_details/store';
+import job from 'jest/ci/jobs_mock_data';
describe('Job Sidebar Retry Button', () => {
let store;
diff --git a/spec/frontend/jobs/components/job/jobs_container_spec.js b/spec/frontend/ci/job_details/components/sidebar/jobs_container_spec.js
index 05660880751..b2b675199ed 100644
--- a/spec/frontend/jobs/components/job/jobs_container_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/jobs_container_spec.js
@@ -1,7 +1,7 @@
import { GlLink } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import JobsContainer from '~/jobs/components/job/sidebar/jobs_container.vue';
+import JobsContainer from '~/ci/job_details/components/sidebar/jobs_container.vue';
describe('Jobs List block', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_detail_row_spec.js
index 546f5392caf..52c886e3c88 100644
--- a/spec/frontend/jobs/components/job/sidebar_detail_row_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_detail_row_spec.js
@@ -1,5 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import SidebarDetailRow from '~/jobs/components/job/sidebar/sidebar_detail_row.vue';
+import SidebarDetailRow from '~/ci/job_details/components/sidebar/sidebar_detail_row.vue';
import { DOCS_URL } from 'jh_else_ce/lib/utils/url_utility';
describe('Sidebar detail row', () => {
diff --git a/spec/frontend/jobs/components/job/sidebar_header_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js
index cf182330578..1063bec6f3b 100644
--- a/spec/frontend/jobs/components/job/sidebar_header_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_header_spec.js
@@ -3,10 +3,10 @@ import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import SidebarHeader from '~/jobs/components/job/sidebar/sidebar_header.vue';
-import JobRetryButton from '~/jobs/components/job/sidebar/job_sidebar_retry_button.vue';
-import getJobQuery from '~/jobs/components/job/graphql/queries/get_job.query.graphql';
-import { mockFullPath, mockId, mockJobResponse } from './mock_data';
+import SidebarHeader from '~/ci/job_details/components/sidebar/sidebar_header.vue';
+import JobRetryButton from '~/ci/job_details/components/sidebar/job_sidebar_retry_button.vue';
+import getJobQuery from '~/ci/job_details/graphql/queries/get_job.query.graphql';
+import { mockFullPath, mockId, mockJobResponse } from '../../mock_data';
Vue.use(VueApollo);
@@ -53,6 +53,8 @@ describe('Sidebar Header', () => {
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findEraseButton = () => wrapper.findByTestId('job-log-erase-link');
+ const findNewIssueButton = () => wrapper.findByTestId('job-new-issue');
+ const findTerminalLink = () => wrapper.findByTestId('terminal-link');
const findJobName = () => wrapper.findByTestId('job-name');
const findRetryButton = () => wrapper.findComponent(JobRetryButton);
@@ -67,6 +69,8 @@ describe('Sidebar Header', () => {
expect(findCancelButton().exists()).toBe(false);
expect(findEraseButton().exists()).toBe(false);
expect(findRetryButton().exists()).toBe(false);
+ expect(findNewIssueButton().exists()).toBe(false);
+ expect(findTerminalLink().exists()).toBe(false);
});
it('renders a retry button with a path', async () => {
@@ -83,5 +87,15 @@ describe('Sidebar Header', () => {
await createComponentWithApollo({ restJob: { erase_path: 'erase/path' } });
expect(findEraseButton().exists()).toBe(true);
});
+
+ it('should render link to new issue', async () => {
+ await createComponentWithApollo({ restJob: { new_issue_path: 'new/issue/path' } });
+ expect(findNewIssueButton().attributes('href')).toBe('new/issue/path');
+ });
+
+ it('should render terminal link', async () => {
+ await createComponentWithApollo({ restJob: { terminal_path: 'terminal/path' } });
+ expect(findTerminalLink().attributes('href')).toBe('terminal/path');
+ });
});
});
diff --git a/spec/frontend/jobs/components/job/job_sidebar_details_container_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
index c1028f3929d..e188d99b8b1 100644
--- a/spec/frontend/jobs/components/job/job_sidebar_details_container_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_job_details_container_spec.js
@@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import DetailRow from '~/jobs/components/job/sidebar/sidebar_detail_row.vue';
-import SidebarJobDetailsContainer from '~/jobs/components/job/sidebar/sidebar_job_details_container.vue';
-import createStore from '~/jobs/store';
-import job from '../../mock_data';
+import DetailRow from '~/ci/job_details/components/sidebar/sidebar_detail_row.vue';
+import SidebarJobDetailsContainer from '~/ci/job_details/components/sidebar/sidebar_job_details_container.vue';
+import createStore from '~/ci/job_details/store';
+import job from 'jest/ci/jobs_mock_data';
describe('Job Sidebar Details Container', () => {
let store;
@@ -53,6 +53,7 @@ describe('Job Sidebar Details Container', () => {
['erased_at', 'Erased: 3 weeks ago'],
['finished_at', 'Finished: 3 weeks ago'],
['queued_duration', 'Queued: 9 seconds'],
+ ['id', 'Job ID: #4757'],
['runner', 'Runner: #1 (ABCDEFGH) local ci runner'],
['coverage', 'Coverage: 20%'],
])('uses %s to render job-%s', async (detail, value) => {
@@ -77,7 +78,7 @@ describe('Job Sidebar Details Container', () => {
createWrapper();
await store.dispatch('receiveJobSuccess', job);
- expect(findAllDetailsRow()).toHaveLength(7);
+ expect(findAllDetailsRow()).toHaveLength(8);
});
describe('duration row', () => {
diff --git a/spec/frontend/jobs/components/job/sidebar_spec.js b/spec/frontend/ci/job_details/components/sidebar/sidebar_spec.js
index fbff64b4d78..88e1f41b270 100644
--- a/spec/frontend/jobs/components/job/sidebar_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/sidebar_spec.js
@@ -4,13 +4,14 @@ import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import ArtifactsBlock from '~/jobs/components/job/sidebar/artifacts_block.vue';
-import JobRetryForwardDeploymentModal from '~/jobs/components/job/sidebar/job_retry_forward_deployment_modal.vue';
-import JobsContainer from '~/jobs/components/job/sidebar/jobs_container.vue';
-import Sidebar from '~/jobs/components/job/sidebar/sidebar.vue';
-import StagesDropdown from '~/jobs/components/job/sidebar/stages_dropdown.vue';
-import createStore from '~/jobs/store';
-import job, { jobsInStage } from '../../mock_data';
+import ArtifactsBlock from '~/ci/job_details/components/sidebar/artifacts_block.vue';
+import ExternalLinksBlock from '~/ci/job_details/components/sidebar/external_links_block.vue';
+import JobRetryForwardDeploymentModal from '~/ci/job_details/components/sidebar/job_retry_forward_deployment_modal.vue';
+import JobsContainer from '~/ci/job_details/components/sidebar/jobs_container.vue';
+import Sidebar from '~/ci/job_details/components/sidebar/sidebar.vue';
+import StagesDropdown from '~/ci/job_details/components/sidebar/stages_dropdown.vue';
+import createStore from '~/ci/job_details/store';
+import job, { jobsInStage } from 'jest/ci/jobs_mock_data';
describe('Sidebar details block', () => {
let mock;
@@ -20,8 +21,7 @@ describe('Sidebar details block', () => {
const forwardDeploymentFailure = 'forward_deployment_failure';
const findModal = () => wrapper.findComponent(JobRetryForwardDeploymentModal);
const findArtifactsBlock = () => wrapper.findComponent(ArtifactsBlock);
- const findNewIssueButton = () => wrapper.findByTestId('job-new-issue');
- const findTerminalLink = () => wrapper.findByTestId('terminal-link');
+ const findExternalLinksBlock = () => wrapper.findComponent(ExternalLinksBlock);
const findJobStagesDropdown = () => wrapper.findComponent(StagesDropdown);
const findJobsContainer = () => wrapper.findComponent(JobsContainer);
@@ -48,36 +48,6 @@ describe('Sidebar details block', () => {
});
});
- describe('without terminal path', () => {
- it('does not render terminal link', async () => {
- createWrapper();
- await store.dispatch('receiveJobSuccess', job);
-
- expect(findTerminalLink().exists()).toBe(false);
- });
- });
-
- describe('with terminal path', () => {
- it('renders terminal link', async () => {
- createWrapper();
- await store.dispatch('receiveJobSuccess', { ...job, terminal_path: 'job/43123/terminal' });
-
- expect(findTerminalLink().exists()).toBe(true);
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- createWrapper();
- return store.dispatch('receiveJobSuccess', job);
- });
-
- it('should render link to new issue', () => {
- expect(findNewIssueButton().attributes('href')).toBe(job.new_issue_path);
- expect(findNewIssueButton().text()).toBe('New issue');
- });
- });
-
describe('forward deployment failure', () => {
describe('when the relevant data is missing', () => {
it.each`
@@ -213,4 +183,40 @@ describe('Sidebar details block', () => {
expect(findArtifactsBlock().exists()).toBe(true);
});
});
+
+ describe('external links', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('external links block is not shown if there are no external links', () => {
+ expect(findExternalLinksBlock().exists()).toBe(false);
+ });
+
+ it('external links block is shown if there are external links', async () => {
+ store.state.job.annotations = [
+ {
+ name: 'external_links',
+ data: [
+ {
+ external_link: {
+ label: 'URL 1',
+ url: 'https://url1.example.com/',
+ },
+ },
+ {
+ external_link: {
+ label: 'URL 2',
+ url: 'https://url2.example.com/',
+ },
+ },
+ ],
+ },
+ ];
+
+ await nextTick();
+
+ expect(findExternalLinksBlock().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/job/stages_dropdown_spec.js b/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js
index c42edc62183..e007896c81e 100644
--- a/spec/frontend/jobs/components/job/stages_dropdown_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/stages_dropdown_spec.js
@@ -2,20 +2,20 @@ import { GlDisclosureDropdown, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { Mousetrap } from '~/lib/mousetrap';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import StagesDropdown from '~/jobs/components/job/sidebar/stages_dropdown.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import StagesDropdown from '~/ci/job_details/components/sidebar/stages_dropdown.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import * as copyToClipboard from '~/behaviors/copy_to_clipboard';
import {
mockPipelineWithoutRef,
mockPipelineWithoutMR,
mockPipelineWithAttachedMR,
mockPipelineDetached,
-} from '../../mock_data';
+} from 'jest/ci/jobs_mock_data';
describe('Stages Dropdown', () => {
let wrapper;
- const findStatus = () => wrapper.findComponent(CiIcon);
+ const findStatus = () => wrapper.findComponent(CiBadgeLink);
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findSelectedStageText = () => findDropdown().props('toggleText');
@@ -46,7 +46,8 @@ describe('Stages Dropdown', () => {
});
it('renders pipeline status', () => {
- expect(findStatus().exists()).toBe(true);
+ expect(findStatus().props('status')).toBe(mockPipelineWithoutMR.details.status);
+ expect(findStatus().props('size')).toBe('sm');
});
it('renders dropdown with stages', () => {
diff --git a/spec/frontend/jobs/components/job/trigger_block_spec.js b/spec/frontend/ci/job_details/components/sidebar/trigger_block_spec.js
index 8bb2c1f3ad8..f2b00c42d53 100644
--- a/spec/frontend/jobs/components/job/trigger_block_spec.js
+++ b/spec/frontend/ci/job_details/components/sidebar/trigger_block_spec.js
@@ -1,6 +1,6 @@
import { GlButton, GlTableLite } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import TriggerBlock from '~/jobs/components/job/sidebar/trigger_block.vue';
+import TriggerBlock from '~/ci/job_details/components/sidebar/trigger_block.vue';
describe('Trigger block', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/stuck_block_spec.js b/spec/frontend/ci/job_details/components/stuck_block_spec.js
index 0f014a9222b..ec3b2d45a68 100644
--- a/spec/frontend/jobs/components/job/stuck_block_spec.js
+++ b/spec/frontend/ci/job_details/components/stuck_block_spec.js
@@ -1,6 +1,6 @@
import { GlBadge, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import StuckBlock from '~/jobs/components/job/stuck_block.vue';
+import StuckBlock from '~/ci/job_details/components/stuck_block.vue';
describe('Stuck Block Job component', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/unmet_prerequisites_block_spec.js b/spec/frontend/ci/job_details/components/unmet_prerequisites_block_spec.js
index 1072cdd6781..08966743901 100644
--- a/spec/frontend/jobs/components/job/unmet_prerequisites_block_spec.js
+++ b/spec/frontend/ci/job_details/components/unmet_prerequisites_block_spec.js
@@ -1,6 +1,6 @@
import { GlAlert, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import UnmetPrerequisitesBlock from '~/jobs/components/job/unmet_prerequisites_block.vue';
+import UnmetPrerequisitesBlock from '~/ci/job_details/components/unmet_prerequisites_block.vue';
describe('Unmet Prerequisites Block Job component', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/job/job_app_spec.js b/spec/frontend/ci/job_details/job_app_spec.js
index 8f5700ee22d..c2d91771495 100644
--- a/spec/frontend/jobs/components/job/job_app_spec.js
+++ b/spec/frontend/ci/job_details/job_app_spec.js
@@ -5,20 +5,20 @@ import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { TEST_HOST } from 'helpers/test_constants';
-import EmptyState from '~/jobs/components/job/empty_state.vue';
-import EnvironmentsBlock from '~/jobs/components/job/environments_block.vue';
-import ErasedBlock from '~/jobs/components/job/erased_block.vue';
-import JobApp from '~/jobs/components/job/job_app.vue';
-import JobLog from '~/jobs/components/log/log.vue';
-import JobLogTopBar from 'ee_else_ce/jobs/components/job/job_log_controllers.vue';
-import Sidebar from '~/jobs/components/job/sidebar/sidebar.vue';
-import StuckBlock from '~/jobs/components/job/stuck_block.vue';
-import UnmetPrerequisitesBlock from '~/jobs/components/job/unmet_prerequisites_block.vue';
-import createStore from '~/jobs/store';
+import EmptyState from '~/ci/job_details/components/empty_state.vue';
+import EnvironmentsBlock from '~/ci/job_details/components/environments_block.vue';
+import ErasedBlock from '~/ci/job_details/components/erased_block.vue';
+import JobApp from '~/ci/job_details/job_app.vue';
+import JobLog from '~/ci/job_details/components/log/log.vue';
+import JobLogTopBar from 'ee_else_ce/ci/job_details/components/job_log_controllers.vue';
+import Sidebar from '~/ci/job_details/components/sidebar/sidebar.vue';
+import StuckBlock from '~/ci/job_details/components/stuck_block.vue';
+import UnmetPrerequisitesBlock from '~/ci/job_details/components/unmet_prerequisites_block.vue';
+import createStore from '~/ci/job_details/store';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { MANUAL_STATUS } from '~/jobs/constants';
-import job from '../../mock_data';
+import { MANUAL_STATUS } from '~/ci/constants';
+import job from 'jest/ci/jobs_mock_data';
import { mockPendingJobData } from './mock_data';
describe('Job App', () => {
diff --git a/spec/frontend/jobs/components/job/mock_data.js b/spec/frontend/ci/job_details/mock_data.js
index fb3a361c9c9..fb3a361c9c9 100644
--- a/spec/frontend/jobs/components/job/mock_data.js
+++ b/spec/frontend/ci/job_details/mock_data.js
diff --git a/spec/frontend/jobs/store/actions_spec.js b/spec/frontend/ci/job_details/store/actions_spec.js
index 73a158d52d8..bb5c1fe32bd 100644
--- a/spec/frontend/jobs/store/actions_spec.js
+++ b/spec/frontend/ci/job_details/store/actions_spec.js
@@ -26,9 +26,9 @@ import {
hideSidebar,
showSidebar,
toggleSidebar,
-} from '~/jobs/store/actions';
-import * as types from '~/jobs/store/mutation_types';
-import state from '~/jobs/store/state';
+} from '~/ci/job_details/store/actions';
+import * as types from '~/ci/job_details/store/mutation_types';
+import state from '~/ci/job_details/store/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
diff --git a/spec/frontend/jobs/store/getters_spec.js b/spec/frontend/ci/job_details/store/getters_spec.js
index c13b051c672..dfa5f9d4781 100644
--- a/spec/frontend/jobs/store/getters_spec.js
+++ b/spec/frontend/ci/job_details/store/getters_spec.js
@@ -1,5 +1,5 @@
-import * as getters from '~/jobs/store/getters';
-import state from '~/jobs/store/state';
+import * as getters from '~/ci/job_details/store/getters';
+import state from '~/ci/job_details/store/state';
describe('Job Store Getters', () => {
let localState;
diff --git a/spec/frontend/jobs/store/helpers.js b/spec/frontend/ci/job_details/store/helpers.js
index 402ae58971a..6b186e094e7 100644
--- a/spec/frontend/jobs/store/helpers.js
+++ b/spec/frontend/ci/job_details/store/helpers.js
@@ -1,4 +1,4 @@
-import state from '~/jobs/store/state';
+import state from '~/ci/job_details/store/state';
export const resetStore = (store) => {
store.replaceState(state());
diff --git a/spec/frontend/jobs/store/mutations_spec.js b/spec/frontend/ci/job_details/store/mutations_spec.js
index 89cda3b0544..0835c534fb9 100644
--- a/spec/frontend/jobs/store/mutations_spec.js
+++ b/spec/frontend/ci/job_details/store/mutations_spec.js
@@ -1,6 +1,6 @@
-import * as types from '~/jobs/store/mutation_types';
-import mutations from '~/jobs/store/mutations';
-import state from '~/jobs/store/state';
+import * as types from '~/ci/job_details/store/mutation_types';
+import mutations from '~/ci/job_details/store/mutations';
+import state from '~/ci/job_details/store/state';
describe('Jobs Store Mutations', () => {
let stateCopy;
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/ci/job_details/store/utils_spec.js
index 37a6722c555..4ffba35761e 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/ci/job_details/store/utils_spec.js
@@ -7,7 +7,7 @@ import {
isCollapsibleSection,
findOffsetAndRemove,
getIncrementalLineNumber,
-} from '~/jobs/store/utils';
+} from '~/ci/job_details/store/utils';
import {
utilsMockData,
originalTrace,
diff --git a/spec/frontend/ci/job_details/utils_spec.js b/spec/frontend/ci/job_details/utils_spec.js
new file mode 100644
index 00000000000..7b5a97f3939
--- /dev/null
+++ b/spec/frontend/ci/job_details/utils_spec.js
@@ -0,0 +1,265 @@
+import { compactJobLog, filterAnnotations } from '~/ci/job_details/utils';
+import { mockJobLog } from 'jest/ci/jobs_mock_data';
+
+describe('Job utils', () => {
+ describe('compactJobLog', () => {
+ it('compacts job log correctly', () => {
+ const expectedResults = [
+ {
+ content: [
+ {
+ text: 'Running with gitlab-runner 15.0.0 (febb2a09)',
+ },
+ ],
+ lineNumber: 0,
+ offset: 0,
+ },
+ {
+ content: [
+ {
+ text: ' on colima-docker EwM9WzgD',
+ },
+ ],
+ lineNumber: 1,
+ offset: 54,
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-cyan term-bold',
+ text: 'Resolving secrets',
+ },
+ ],
+ lineNumber: 2,
+ offset: 91,
+ section: 'resolve-secrets',
+ section_duration: '00:00',
+ section_header: true,
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-cyan term-bold',
+ text: 'Preparing the "docker" executor',
+ },
+ ],
+ lineNumber: 4,
+ offset: 218,
+ section: 'prepare-executor',
+ section_duration: '00:01',
+ section_header: true,
+ },
+ {
+ content: [
+ {
+ text: 'Using Docker executor with image ruby:2.7 ...',
+ },
+ ],
+ lineNumber: 5,
+ offset: 317,
+ section: 'prepare-executor',
+ },
+ {
+ content: [
+ {
+ text: 'Pulling docker image ruby:2.7 ...',
+ },
+ ],
+ lineNumber: 6,
+ offset: 372,
+ section: 'prepare-executor',
+ },
+ {
+ content: [
+ {
+ text:
+ 'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
+ },
+ ],
+ lineNumber: 7,
+ offset: 415,
+ section: 'prepare-executor',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-cyan term-bold',
+ text: 'Preparing environment',
+ },
+ ],
+ lineNumber: 9,
+ offset: 665,
+ section: 'prepare-script',
+ section_duration: '00:01',
+ section_header: true,
+ },
+ {
+ content: [
+ {
+ text: 'Running on runner-ewm9wzgd-project-20-concurrent-0 via 8ea689ec6969...',
+ },
+ ],
+ lineNumber: 10,
+ offset: 752,
+ section: 'prepare-script',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-cyan term-bold',
+ text: 'Getting source from Git repository',
+ },
+ ],
+ lineNumber: 12,
+ offset: 865,
+ section: 'get-sources',
+ section_duration: '00:01',
+ section_header: true,
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-green term-bold',
+ text: 'Fetching changes with git depth set to 20...',
+ },
+ ],
+ lineNumber: 13,
+ offset: 962,
+ section: 'get-sources',
+ },
+ {
+ content: [
+ {
+ text: 'Reinitialized existing Git repository in /builds/root/ci-project/.git/',
+ },
+ ],
+ lineNumber: 14,
+ offset: 1019,
+ section: 'get-sources',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-green term-bold',
+ text: 'Checking out e0f63d76 as main...',
+ },
+ ],
+ lineNumber: 15,
+ offset: 1090,
+ section: 'get-sources',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-green term-bold',
+ text: 'Skipping Git submodules setup',
+ },
+ ],
+ lineNumber: 16,
+ offset: 1136,
+ section: 'get-sources',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-cyan term-bold',
+ text: 'Executing "step_script" stage of the job script',
+ },
+ ],
+ lineNumber: 18,
+ offset: 1217,
+ section: 'step-script',
+ section_duration: '00:00',
+ section_header: true,
+ },
+ {
+ content: [
+ {
+ text:
+ 'Using docker image sha256:55106bf6ba7f452c38d01ea760affc6ceb67d4b60068ffadab98d1b7b007668c for ruby:2.7 with digest ruby@sha256:23d08a4bae1a12ee3fce017f83204fcf9a02243443e4a516e65e5ff73810a449 ...',
+ },
+ ],
+ lineNumber: 19,
+ offset: 1327,
+ section: 'step-script',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-green term-bold',
+ text: '$ echo "82.71"',
+ },
+ ],
+ lineNumber: 20,
+ offset: 1533,
+ section: 'step-script',
+ },
+ {
+ content: [
+ {
+ text: '82.71',
+ },
+ ],
+ lineNumber: 21,
+ offset: 1560,
+ section: 'step-script',
+ },
+ {
+ content: [
+ {
+ style: 'term-fg-l-green term-bold',
+ text: 'Job succeeded',
+ },
+ ],
+ lineNumber: 23,
+ offset: 1605,
+ },
+ ];
+
+ expect(compactJobLog(mockJobLog)).toStrictEqual(expectedResults);
+ });
+ });
+
+ describe('filterAnnotations', () => {
+ it('filters annotations by type', () => {
+ const data = [
+ {
+ name: 'b',
+ data: [
+ {
+ dummy: {},
+ },
+ {
+ external_link: {
+ label: 'URL 2',
+ url: 'https://url2.example.com/',
+ },
+ },
+ ],
+ },
+ {
+ name: 'a',
+ data: [
+ {
+ external_link: {
+ label: 'URL 1',
+ url: 'https://url1.example.com/',
+ },
+ },
+ ],
+ },
+ ];
+
+ expect(filterAnnotations(data, 'external_link')).toEqual([
+ {
+ label: 'URL 1',
+ url: 'https://url1.example.com/',
+ },
+ {
+ label: 'URL 2',
+ url: 'https://url2.example.com/',
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/ci/jobs_mock_data.js
index 253e669e889..c428de3b9d8 100644
--- a/spec/frontend/jobs/mock_data.js
+++ b/spec/frontend/ci/jobs_mock_data.js
@@ -989,6 +989,7 @@ export default {
},
erase_path: '/root/ci-mock/-/jobs/4757/erase',
artifacts: [null],
+ annotations: [],
runner: {
id: 1,
short_sha: 'ABCDEFGH',
diff --git a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js
index f2d249b6014..1ffd680118e 100644
--- a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/actions_cell_spec.js
@@ -5,12 +5,12 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import ActionsCell from '~/jobs/components/table/cells/actions_cell.vue';
-import eventHub from '~/jobs/components/table/event_hub';
-import JobPlayMutation from '~/jobs/components/table/graphql/mutations/job_play.mutation.graphql';
-import JobRetryMutation from '~/jobs/components/table/graphql/mutations/job_retry.mutation.graphql';
-import JobUnscheduleMutation from '~/jobs/components/table/graphql/mutations/job_unschedule.mutation.graphql';
-import JobCancelMutation from '~/jobs/components/table/graphql/mutations/job_cancel.mutation.graphql';
+import ActionsCell from '~/ci/jobs_page/components/job_cells/actions_cell.vue';
+import eventHub from '~/ci/jobs_page/event_hub';
+import JobPlayMutation from '~/ci/jobs_page/graphql/mutations/job_play.mutation.graphql';
+import JobRetryMutation from '~/ci/jobs_page/graphql/mutations/job_retry.mutation.graphql';
+import JobUnscheduleMutation from '~/ci/jobs_page/graphql/mutations/job_unschedule.mutation.graphql';
+import JobCancelMutation from '~/ci/jobs_page/graphql/mutations/job_cancel.mutation.graphql';
import {
mockJobsNodes,
mockJobsNodesAsGuest,
@@ -18,7 +18,7 @@ import {
retryMutationResponse,
unscheduleMutationResponse,
cancelMutationResponse,
-} from '../../../mock_data';
+} from 'jest/ci/jobs_mock_data';
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/jobs/components/table/cells/duration_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js
index d015edb0e91..21f14ba0c98 100644
--- a/spec/frontend/jobs/components/table/cells/duration_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/duration_cell_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import DurationCell from '~/jobs/components/table/cells/duration_cell.vue';
+import DurationCell from '~/ci/jobs_page/components/job_cells/duration_cell.vue';
describe('Duration Cell', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/table/cells/job_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js
index 73e37eed5f1..cb8f6ed8f9b 100644
--- a/spec/frontend/jobs/components/table/cells/job_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/job_cell_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import JobCell from '~/jobs/components/table/cells/job_cell.vue';
-import { mockJobsNodes, mockJobsNodesAsGuest } from '../../../mock_data';
+import JobCell from '~/ci/jobs_page/components/job_cells/job_cell.vue';
+import { mockJobsNodes, mockJobsNodesAsGuest } from 'jest/ci/jobs_mock_data';
describe('Job Cell', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/table/cells/pipeline_cell_spec.js b/spec/frontend/ci/jobs_page/components/job_cells/pipeline_cell_spec.js
index 3d424b20964..6b212846897 100644
--- a/spec/frontend/jobs/components/table/cells/pipeline_cell_spec.js
+++ b/spec/frontend/ci/jobs_page/components/job_cells/pipeline_cell_spec.js
@@ -2,7 +2,7 @@ import { GlAvatar } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import PipelineCell from '~/jobs/components/table/cells/pipeline_cell.vue';
+import PipelineCell from '~/ci/jobs_page/components/job_cells/pipeline_cell.vue';
const mockJobWithoutUser = {
id: 'gid://gitlab/Ci::Build/2264',
diff --git a/spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js
index 05b066a9edc..f4893c4077f 100644
--- a/spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_empty_state_spec.js
@@ -1,6 +1,6 @@
import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import JobsTableEmptyState from '~/jobs/components/table/jobs_table_empty_state.vue';
+import JobsTableEmptyState from '~/ci/jobs_page/components/jobs_table_empty_state.vue';
describe('Jobs table empty state', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/table/jobs_table_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
index 654b6d1c130..3adb95bf371 100644
--- a/spec/frontend/jobs/components/table/jobs_table_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_spec.js
@@ -1,12 +1,12 @@
import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import JobsTable from '~/jobs/components/table/jobs_table.vue';
+import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
-import { DEFAULT_FIELDS_ADMIN } from '~/pages/admin/jobs/components/constants';
-import ProjectCell from '~/pages/admin/jobs/components/table/cell/project_cell.vue';
-import RunnerCell from '~/pages/admin/jobs/components/table/cells/runner_cell.vue';
-import { mockJobsNodes, mockAllJobsNodes } from '../../mock_data';
+import { DEFAULT_FIELDS_ADMIN } from '~/ci/admin/jobs_table/constants';
+import ProjectCell from '~/ci/admin/jobs_table/components/cells/project_cell.vue';
+import RunnerCell from '~/ci/admin/jobs_table/components/cells/runner_cell.vue';
+import { mockJobsNodes, mockAllJobsNodes } from 'jest/ci/jobs_mock_data';
describe('Jobs Table', () => {
let wrapper;
@@ -62,6 +62,15 @@ describe('Jobs Table', () => {
});
expect(findAllCoverageJobs()).toHaveLength(jobsThatHaveCoverage.length);
});
+
+ describe('when stage of a job is missing', () => {
+ it('shows no stage', () => {
+ const stagelessJob = { ...mockJobsNodes[0], stage: null };
+ createComponent({ jobs: [stagelessJob] });
+
+ expect(findJobStage().exists()).toBe(false);
+ });
+ });
});
describe('regular user', () => {
diff --git a/spec/frontend/jobs/components/table/jobs_table_tabs_spec.js b/spec/frontend/ci/jobs_page/components/jobs_table_tabs_spec.js
index d20a732508a..c36f3841890 100644
--- a/spec/frontend/jobs/components/table/jobs_table_tabs_spec.js
+++ b/spec/frontend/ci/jobs_page/components/jobs_table_tabs_spec.js
@@ -2,8 +2,8 @@ import { GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
-import CancelJobs from '~/pages/admin/jobs/components/cancel_jobs.vue';
+import JobsTableTabs from '~/ci/jobs_page/components/jobs_table_tabs.vue';
+import CancelJobs from '~/ci/admin/jobs_table/components/cancel_jobs.vue';
describe('Jobs Table Tabs', () => {
let wrapper;
diff --git a/spec/frontend/jobs/components/table/graphql/cache_config_spec.js b/spec/frontend/ci/jobs_page/graphql/cache_config_spec.js
index e3b1ca1cce3..cfbd77f4154 100644
--- a/spec/frontend/jobs/components/table/graphql/cache_config_spec.js
+++ b/spec/frontend/ci/jobs_page/graphql/cache_config_spec.js
@@ -1,9 +1,9 @@
-import cacheConfig from '~/jobs/components/table/graphql/cache_config';
+import cacheConfig from '~/ci/jobs_page/graphql/cache_config';
import {
CIJobConnectionExistingCache,
CIJobConnectionIncomingCache,
CIJobConnectionIncomingCacheRunningStatus,
-} from '../../../mock_data';
+} from 'jest/ci/jobs_mock_data';
const firstLoadArgs = { first: 3, statuses: 'PENDING' };
const runningArgs = { first: 3, statuses: 'RUNNING' };
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/ci/jobs_page/job_page_app_spec.js
index 032b83ca22b..77443c9d490 100644
--- a/spec/frontend/jobs/components/table/job_table_app_spec.js
+++ b/spec/frontend/ci/jobs_page/job_page_app_spec.js
@@ -7,20 +7,20 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import { createAlert } from '~/alert';
-import getJobsQuery from '~/jobs/components/table/graphql/queries/get_jobs.query.graphql';
-import getJobsCountQuery from '~/jobs/components/table/graphql/queries/get_jobs_count.query.graphql';
-import JobsTable from '~/jobs/components/table/jobs_table.vue';
-import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
-import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
-import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
-import JobsSkeletonLoader from '~/pages/admin/jobs/components/jobs_skeleton_loader.vue';
+import getJobsQuery from '~/ci/jobs_page/graphql/queries/get_jobs.query.graphql';
+import getJobsCountQuery from '~/ci/jobs_page/graphql/queries/get_jobs_count.query.graphql';
+import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
+import JobsTableApp from '~/ci/jobs_page/jobs_page_app.vue';
+import JobsTableTabs from '~/ci/jobs_page/components/jobs_table_tabs.vue';
+import JobsFilteredSearch from '~/ci/common/private/jobs_filtered_search/app.vue';
+import JobsSkeletonLoader from '~/ci/admin/jobs_table/components/jobs_skeleton_loader.vue';
import * as urlUtils from '~/lib/utils/url_utility';
import {
mockJobsResponsePaginated,
mockJobsResponseEmpty,
mockFailedSearchToken,
mockJobsCountResponse,
-} from '../../mock_data';
+} from 'jest/ci/jobs_mock_data';
const projectPath = 'gitlab-org/gitlab';
Vue.use(VueApollo);
diff --git a/spec/frontend/ci/merge_requests/components/pipelines_table_wrapper_spec.js b/spec/frontend/ci/merge_requests/components/pipelines_table_wrapper_spec.js
new file mode 100644
index 00000000000..df9bf2a4235
--- /dev/null
+++ b/spec/frontend/ci/merge_requests/components/pipelines_table_wrapper_spec.js
@@ -0,0 +1,117 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { createAlert } from '~/alert';
+import PipelinesTableWrapper from '~/ci/merge_requests/components/pipelines_table_wrapper.vue';
+import getMergeRequestsPipelines from '~/ci/merge_requests/graphql/queries/get_merge_request_pipelines.query.graphql';
+
+import { mergeRequestPipelinesResponse } from '../mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/alert');
+
+const pipelinesLength = mergeRequestPipelinesResponse.data.project.mergeRequest.pipelines.count;
+
+let wrapper;
+let mergeRequestPipelinesRequest;
+let apolloMock;
+
+const defaultProvide = {
+ graphqlPath: '/api/graphql/',
+ mergeRequestId: 1,
+ targetProjectFullPath: '/group/project',
+};
+
+const createComponent = () => {
+ const handlers = [[getMergeRequestsPipelines, mergeRequestPipelinesRequest]];
+
+ apolloMock = createMockApollo(handlers);
+
+ wrapper = shallowMount(PipelinesTableWrapper, {
+ apolloProvider: apolloMock,
+ provide: {
+ ...defaultProvide,
+ },
+ });
+
+ return waitForPromises();
+};
+
+const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+const findPipelineList = () => wrapper.findAll('li');
+
+beforeEach(() => {
+ mergeRequestPipelinesRequest = jest.fn();
+ mergeRequestPipelinesRequest.mockResolvedValue(mergeRequestPipelinesResponse);
+});
+afterEach(() => {
+ apolloMock = null;
+ createAlert.mockClear();
+});
+
+describe('PipelinesTableWrapper component', () => {
+ describe('When queries are loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not render the pipeline list', () => {
+ expect(findPipelineList()).toHaveLength(0);
+ });
+ });
+
+ describe('When there is an error fetching pipelines', () => {
+ beforeEach(async () => {
+ mergeRequestPipelinesRequest.mockRejectedValueOnce({ error: 'API error message' });
+ await createComponent();
+ });
+ it('shows an error message', () => {
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: "There was an error fetching this merge request's pipelines.",
+ });
+ });
+ });
+
+ describe('When queries have loaded', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('does not render the loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders a pipeline list', () => {
+ expect(findPipelineList()).toHaveLength(pipelinesLength);
+ });
+ });
+
+ describe('polling', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('polls every 10 seconds', () => {
+ expect(mergeRequestPipelinesRequest).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(5000);
+
+ expect(mergeRequestPipelinesRequest).toHaveBeenCalledTimes(1);
+
+ jest.advanceTimersByTime(5000);
+
+ expect(mergeRequestPipelinesRequest).toHaveBeenCalledTimes(2);
+ });
+ });
+});
diff --git a/spec/frontend/ci/merge_requests/mock_data.js b/spec/frontend/ci/merge_requests/mock_data.js
new file mode 100644
index 00000000000..1d8fdb88aa3
--- /dev/null
+++ b/spec/frontend/ci/merge_requests/mock_data.js
@@ -0,0 +1,30 @@
+const createMergeRequestPipelines = (count = 30) => {
+ const pipelines = [];
+
+ for (let i = 0; i < count; i += 1) {
+ pipelines.push({
+ id: i,
+ iid: i + 10,
+ path: `/project/pipelines/${i}`,
+ });
+ }
+
+ return {
+ count,
+ nodes: pipelines,
+ };
+};
+
+export const mergeRequestPipelinesResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/1',
+ mergeRequest: {
+ __typename: 'MergeRequest',
+ id: 'gid://gitlab/MergeRequest/1',
+ pipelines: createMergeRequestPipelines(),
+ },
+ },
+ },
+};
diff --git a/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js b/spec/frontend/ci/mixins/delayed_job_mixin_spec.js
index 098a63719fe..a1dab55bd07 100644
--- a/spec/frontend/jobs/mixins/delayed_job_mixin_spec.js
+++ b/spec/frontend/ci/mixins/delayed_job_mixin_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import delayedJobFixture from 'test_fixtures/jobs/delayed.json';
-import delayedJobMixin from '~/jobs/mixins/delayed_job_mixin';
+import delayedJobMixin from '~/ci/mixins/delayed_job_mixin';
describe('DelayedJobMixin', () => {
let wrapper;
diff --git a/spec/frontend/ci/pipeline_details/dag/components/__snapshots__/dag_graph_spec.js.snap b/spec/frontend/ci/pipeline_details/dag/components/__snapshots__/dag_graph_spec.js.snap
new file mode 100644
index 00000000000..624c89a237c
--- /dev/null
+++ b/spec/frontend/ci/pipeline_details/dag/components/__snapshots__/dag_graph_spec.js.snap
@@ -0,0 +1,743 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`The DAG graph in the basic case renders the graph svg 1`] = `
+<svg
+ height="540"
+ viewBox="0,0,1000,540"
+ width="1000"
+>
+ <g
+ fill="none"
+ stroke-opacity="0.8"
+ >
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-0"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-1"
+ x1="116"
+ x2="361.3333333333333"
+ >
+ <stop
+ offset="0%"
+ stop-color="#e17223"
+ />
+ <stop
+ offset="100%"
+ stop-color="#83ab4a"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-2"
+ >
+ <path
+ d="
+ M100, 129
+ V158
+ H377.3333333333333
+ V100
+ H100
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip63)"
+ d="M108,129L190,129L190,129L369.3333333333333,129"
+ stroke="url(#dag-grad53)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-3"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-4"
+ x1="377.3333333333333"
+ x2="622.6666666666666"
+ >
+ <stop
+ offset="0%"
+ stop-color="#83ab4a"
+ />
+ <stop
+ offset="100%"
+ stop-color="#6f3500"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-5"
+ >
+ <path
+ d="
+ M361.3333333333333, 129.0000000000002
+ V158.0000000000002
+ H638.6666666666666
+ V100
+ H361.3333333333333
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip64)"
+ d="M369.3333333333333,129L509.3333333333333,129L509.3333333333333,129.0000000000002L630.6666666666666,129.0000000000002"
+ stroke="url(#dag-grad54)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-6"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-7"
+ x1="116"
+ x2="622.6666666666666"
+ >
+ <stop
+ offset="0%"
+ stop-color="#5772ff"
+ />
+ <stop
+ offset="100%"
+ stop-color="#6f3500"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-8"
+ >
+ <path
+ d="
+ M100, 187.0000000000002
+ V241.00000000000003
+ H638.6666666666666
+ V158.0000000000002
+ H100
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip65)"
+ d="M108,212.00000000000003L306,212.00000000000003L306,187.0000000000002L630.6666666666666,187.0000000000002"
+ stroke="url(#dag-grad55)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-9"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-10"
+ x1="116"
+ x2="361.3333333333333"
+ >
+ <stop
+ offset="0%"
+ stop-color="#b24800"
+ />
+ <stop
+ offset="100%"
+ stop-color="#006887"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-11"
+ >
+ <path
+ d="
+ M100, 269.9999999999998
+ V324
+ H377.3333333333333
+ V240.99999999999977
+ H100
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip66)"
+ d="M108,295L338.93333333333334,295L338.93333333333334,269.9999999999998L369.3333333333333,269.9999999999998"
+ stroke="url(#dag-grad56)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-12"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-13"
+ x1="116"
+ x2="361.3333333333333"
+ >
+ <stop
+ offset="0%"
+ stop-color="#25d2d2"
+ />
+ <stop
+ offset="100%"
+ stop-color="#487900"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-14"
+ >
+ <path
+ d="
+ M100, 352.99999999999994
+ V407.00000000000006
+ H377.3333333333333
+ V323.99999999999994
+ H100
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip67)"
+ d="M108,378.00000000000006L144.66666666666669,378.00000000000006L144.66666666666669,352.99999999999994L369.3333333333333,352.99999999999994"
+ stroke="url(#dag-grad57)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-15"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-16"
+ x1="377.3333333333333"
+ x2="622.6666666666666"
+ >
+ <stop
+ offset="0%"
+ stop-color="#006887"
+ />
+ <stop
+ offset="100%"
+ stop-color="#d84280"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-17"
+ >
+ <path
+ d="
+ M361.3333333333333, 270.0000000000001
+ V299.0000000000001
+ H638.6666666666666
+ V240.99999999999977
+ H361.3333333333333
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip68)"
+ d="M369.3333333333333,269.9999999999998L464,269.9999999999998L464,270.0000000000001L630.6666666666666,270.0000000000001"
+ stroke="url(#dag-grad58)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-18"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-19"
+ x1="377.3333333333333"
+ x2="622.6666666666666"
+ >
+ <stop
+ offset="0%"
+ stop-color="#487900"
+ />
+ <stop
+ offset="100%"
+ stop-color="#d84280"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-20"
+ >
+ <path
+ d="
+ M361.3333333333333, 328.0000000000001
+ V381.99999999999994
+ H638.6666666666666
+ V299.0000000000001
+ H361.3333333333333
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip69)"
+ d="M369.3333333333333,352.99999999999994L522,352.99999999999994L522,328.0000000000001L630.6666666666666,328.0000000000001"
+ stroke="url(#dag-grad59)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-21"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-22"
+ x1="377.3333333333333"
+ x2="622.6666666666666"
+ >
+ <stop
+ offset="0%"
+ stop-color="#487900"
+ />
+ <stop
+ offset="100%"
+ stop-color="#3547de"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-23"
+ >
+ <path
+ d="
+ M361.3333333333333, 411
+ V440
+ H638.6666666666666
+ V381.99999999999994
+ H361.3333333333333
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip70)"
+ d="M369.3333333333333,410.99999999999994L580,410.99999999999994L580,411L630.6666666666666,411"
+ stroke="url(#dag-grad60)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-24"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-25"
+ x1="638.6666666666666"
+ x2="884"
+ >
+ <stop
+ offset="0%"
+ stop-color="#d84280"
+ />
+ <stop
+ offset="100%"
+ stop-color="#006887"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-26"
+ >
+ <path
+ d="
+ M622.6666666666666, 270.1890725105691
+ V299.1890725105691
+ H900
+ V241.0000000000001
+ H622.6666666666666
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip71)"
+ d="M630.6666666666666,270.0000000000001L861.6,270.0000000000001L861.6,270.1890725105691L892,270.1890725105691"
+ stroke="url(#dag-grad61)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ <g
+ class="dag-link gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke-opacity gl-transition-timing-function-ease"
+ id="reference-27"
+ >
+ <lineargradient
+ gradientUnits="userSpaceOnUse"
+ id="reference-28"
+ x1="638.6666666666666"
+ x2="884"
+ >
+ <stop
+ offset="0%"
+ stop-color="#3547de"
+ />
+ <stop
+ offset="100%"
+ stop-color="#275600"
+ />
+ </lineargradient>
+ <clippath
+ id="reference-29"
+ >
+ <path
+ d="
+ M622.6666666666666, 411
+ V440
+ H900
+ V382
+ H622.6666666666666
+ Z
+ "
+ />
+ </clippath>
+ <path
+ clip-path="url(#dag-clip72)"
+ d="M630.6666666666666,411L679.9999999999999,411L679.9999999999999,411L892,411"
+ stroke="url(#dag-grad62)"
+ stroke-width="56"
+ style="stroke-linejoin: round;"
+ />
+ </g>
+ </g>
+ <g>
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-30"
+ stroke="#e17223"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="108"
+ x2="108"
+ y1="104"
+ y2="154.00000000000003"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-31"
+ stroke="#83ab4a"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="369"
+ x2="369"
+ y1="104"
+ y2="154"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-32"
+ stroke="#5772ff"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="108"
+ x2="108"
+ y1="187.00000000000003"
+ y2="237.00000000000003"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-33"
+ stroke="#b24800"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="108"
+ x2="108"
+ y1="270"
+ y2="320.00000000000006"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-34"
+ stroke="#25d2d2"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="108"
+ x2="108"
+ y1="353.00000000000006"
+ y2="403.0000000000001"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-35"
+ stroke="#6f3500"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="630"
+ x2="630"
+ y1="104.0000000000002"
+ y2="212.00000000000009"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-36"
+ stroke="#006887"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="369"
+ x2="369"
+ y1="244.99999999999977"
+ y2="294.99999999999994"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-37"
+ stroke="#487900"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="369"
+ x2="369"
+ y1="327.99999999999994"
+ y2="436"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-38"
+ stroke="#d84280"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="630"
+ x2="630"
+ y1="245.00000000000009"
+ y2="353"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-39"
+ stroke="#3547de"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="630"
+ x2="630"
+ y1="386"
+ y2="436"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-40"
+ stroke="#006887"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="892"
+ x2="892"
+ y1="245.18907251056908"
+ y2="295.1890725105691"
+ />
+ <line
+ class="dag-node gl-cursor-pointer gl-transition-duration-slow gl-transition-property-stroke gl-transition-timing-function-ease"
+ id="reference-41"
+ stroke="#275600"
+ stroke-linecap="round"
+ stroke-width="16"
+ x1="892"
+ x2="892"
+ y1="386"
+ y2="436"
+ />
+ </g>
+ <g
+ class="gl-font-sm"
+ >
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58.00000000000003px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="8"
+ y="100"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58.00000000000003px; text-align: right;"
+ >
+ build_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="369.3333333333333"
+ y="75"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: left;"
+ >
+ test_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="8"
+ y="183.00000000000003"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58px; text-align: right;"
+ >
+ test_b
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58.00000000000006px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="8"
+ y="266"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58.00000000000006px; text-align: right;"
+ >
+ post_test_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58.00000000000006px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="8"
+ y="349.00000000000006"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58.00000000000006px; text-align: right;"
+ >
+ post_test_b
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="630.6666666666666"
+ y="75.0000000000002"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: right;"
+ >
+ post_test_c
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="369.3333333333333"
+ y="215.99999999999977"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: left;"
+ >
+ staging_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="369.3333333333333"
+ y="298.99999999999994"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: left;"
+ >
+ staging_b
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="630.6666666666666"
+ y="216.00000000000009"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: right;"
+ >
+ canary_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="25px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="630.6666666666666"
+ y="357"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 25px; text-align: right;"
+ >
+ canary_c
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="908"
+ y="241.18907251056908"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58px; text-align: left;"
+ >
+ production_a
+ </div>
+ </foreignobject>
+ <foreignobject
+ class="gl-overflow-visible"
+ height="58px"
+ requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"
+ width="84"
+ x="908"
+ y="382"
+ >
+ <div
+ class="gl-display-flex gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break gl-pointer-events-none"
+ style="height: 58px; text-align: left;"
+ >
+ production_d
+ </div>
+ </foreignobject>
+ </g>
+</svg>
+`;
diff --git a/spec/frontend/pipelines/components/dag/dag_annotations_spec.js b/spec/frontend/ci/pipeline_details/dag/components/dag_annotations_spec.js
index 124f02bcec7..d1c338e50c6 100644
--- a/spec/frontend/pipelines/components/dag/dag_annotations_spec.js
+++ b/spec/frontend/ci/pipeline_details/dag/components/dag_annotations_spec.js
@@ -1,8 +1,8 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import DagAnnotations from '~/pipelines/components/dag/dag_annotations.vue';
-import { singleNote, multiNote } from './mock_data';
+import DagAnnotations from '~/ci/pipeline_details/dag/components/dag_annotations.vue';
+import { singleNote, multiNote } from '../mock_data';
describe('The DAG annotations', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/dag/dag_graph_spec.js b/spec/frontend/ci/pipeline_details/dag/components/dag_graph_spec.js
index 6b46be3dd49..aff83c00e79 100644
--- a/spec/frontend/pipelines/components/dag/dag_graph_spec.js
+++ b/spec/frontend/ci/pipeline_details/dag/components/dag_graph_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import { IS_HIGHLIGHTED, LINK_SELECTOR, NODE_SELECTOR } from '~/pipelines/components/dag/constants';
-import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
-import { createSankey } from '~/pipelines/components/dag/drawing_utils';
-import { highlightIn, highlightOut } from '~/pipelines/components/dag/interactions';
-import { removeOrphanNodes } from '~/pipelines/components/parsing_utils';
-import { parsedData } from './mock_data';
+import { IS_HIGHLIGHTED, LINK_SELECTOR, NODE_SELECTOR } from '~/ci/pipeline_details/dag/constants';
+import DagGraph from '~/ci/pipeline_details/dag/components/dag_graph.vue';
+import { createSankey } from '~/ci/pipeline_details/dag/utils/drawing_utils';
+import { highlightIn, highlightOut } from '~/ci/pipeline_details/dag/utils/interactions';
+import { removeOrphanNodes } from '~/ci/pipeline_details/utils/parsing_utils';
+import { parsedData } from '../mock_data';
describe('The DAG graph', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/dag/dag_spec.js b/spec/frontend/ci/pipeline_details/dag/dag_spec.js
index 53719065611..de9490be607 100644
--- a/spec/frontend/pipelines/components/dag/dag_spec.js
+++ b/spec/frontend/ci/pipeline_details/dag/dag_spec.js
@@ -1,12 +1,12 @@
import { GlAlert, GlEmptyState } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import { ADD_NOTE, REMOVE_NOTE, REPLACE_NOTES } from '~/pipelines/components/dag/constants';
-import Dag from '~/pipelines/components/dag/dag.vue';
-import DagAnnotations from '~/pipelines/components/dag/dag_annotations.vue';
-import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
+import { ADD_NOTE, REMOVE_NOTE, REPLACE_NOTES } from '~/ci/pipeline_details/dag/constants';
+import Dag from '~/ci/pipeline_details/dag/dag.vue';
+import DagAnnotations from '~/ci/pipeline_details/dag/components/dag_annotations.vue';
+import DagGraph from '~/ci/pipeline_details/dag/components/dag_graph.vue';
-import { PARSE_FAILURE, UNSUPPORTED_DATA } from '~/pipelines/constants';
+import { PARSE_FAILURE, UNSUPPORTED_DATA } from '~/ci/pipeline_details/constants';
import {
mockParsedGraphQLNodes,
tooSmallGraph,
diff --git a/spec/frontend/pipelines/components/dag/mock_data.js b/spec/frontend/ci/pipeline_details/dag/mock_data.js
index f27e7cf3d6b..f27e7cf3d6b 100644
--- a/spec/frontend/pipelines/components/dag/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/dag/mock_data.js
diff --git a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js b/spec/frontend/ci/pipeline_details/dag/utils/drawing_utils_spec.js
index 095ded01298..aea8e894bd4 100644
--- a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/dag/utils/drawing_utils_spec.js
@@ -1,6 +1,6 @@
-import { createSankey } from '~/pipelines/components/dag/drawing_utils';
-import { parseData } from '~/pipelines/components/parsing_utils';
-import { mockParsedGraphQLNodes } from './mock_data';
+import { createSankey } from '~/ci/pipeline_details/dag/utils/drawing_utils';
+import { parseData } from '~/ci/pipeline_details/utils/parsing_utils';
+import { mockParsedGraphQLNodes } from '../mock_data';
describe('DAG visualization drawing utilities', () => {
const parsed = parseData(mockParsedGraphQLNodes);
diff --git a/spec/frontend/ci/pipeline_details/graph/components/__snapshots__/links_inner_spec.js.snap b/spec/frontend/ci/pipeline_details/graph/components/__snapshots__/links_inner_spec.js.snap
new file mode 100644
index 00000000000..b31c0e59a33
--- /dev/null
+++ b/spec/frontend/ci/pipeline_details/graph/components/__snapshots__/links_inner_spec.js.snap
@@ -0,0 +1,110 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Links Inner component with a large number of needs matches snapshot and has expected path 1`] = `
+<div
+ class="gl-display-flex gl-relative"
+ totalgroups="10"
+>
+ <svg
+ class="gl-absolute gl-pointer-events-none"
+ height="445px"
+ id="reference-0"
+ viewBox="0,0,1019,445"
+ width="1019px"
+ >
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M202,118C52,118,52,138,102,138"
+ stroke-width="2"
+ />
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M202,118C62,118,62,148,112,148"
+ stroke-width="2"
+ />
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M222,138C72,138,72,158,122,158"
+ stroke-width="2"
+ />
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M212,128C82,128,82,168,132,168"
+ stroke-width="2"
+ />
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M232,148C92,148,92,178,142,178"
+ stroke-width="2"
+ />
+ </svg>
+</div>
+`;
+
+exports[`Links Inner component with a parallel need matches snapshot and has expected path 1`] = `
+<div
+ class="gl-display-flex gl-relative"
+ totalgroups="10"
+>
+ <svg
+ class="gl-absolute gl-pointer-events-none"
+ height="445px"
+ id="reference-0"
+ viewBox="0,0,1019,445"
+ width="1019px"
+ >
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M192,108C32,108,32,118,82,118"
+ stroke-width="2"
+ />
+ </svg>
+</div>
+`;
+
+exports[`Links Inner component with one need matches snapshot and has expected path 1`] = `
+<div
+ class="gl-display-flex gl-relative"
+ totalgroups="10"
+>
+ <svg
+ class="gl-absolute gl-pointer-events-none"
+ height="445px"
+ id="reference-0"
+ viewBox="0,0,1019,445"
+ width="1019px"
+ >
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M202,118C52,118,52,138,102,138"
+ stroke-width="2"
+ />
+ </svg>
+</div>
+`;
+
+exports[`Links Inner component with same stage needs matches snapshot and has expected path 1`] = `
+<div
+ class="gl-display-flex gl-relative"
+ totalgroups="10"
+>
+ <svg
+ class="gl-absolute gl-pointer-events-none"
+ height="445px"
+ id="reference-0"
+ viewBox="0,0,1019,445"
+ width="1019px"
+ >
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M192,108C32,108,32,118,82,118"
+ stroke-width="2"
+ />
+ <path
+ class="gl-fill-transparent gl-stroke-gray-200 gl-transition-duration-slow gl-transition-timing-function-ease"
+ d="M202,118C42,118,42,128,92,128"
+ stroke-width="2"
+ />
+ </svg>
+</div>
+`;
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/ci/pipeline_details/graph/components/action_component_spec.js
index 890255f225e..9e177156d0e 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/action_component_spec.js
@@ -5,7 +5,7 @@ import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
+import ActionComponent from '~/ci/common/private/job_action_component.vue';
describe('pipeline graph action component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/ci/pipeline_details/graph/components/graph_component_spec.js
index e9bce037800..a98e79c69fe 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/graph_component_spec.js
@@ -1,15 +1,15 @@
import { shallowMount } from '@vue/test-utils';
import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
-import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
-import JobItem from '~/pipelines/components/graph/job_item.vue';
-import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
-import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
-import { calculatePipelineLayersInfo } from '~/pipelines/components/graph/utils';
-import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-
-import { generateResponse, pipelineWithUpstreamDownstream } from './mock_data';
+import { LAYER_VIEW, STAGE_VIEW } from '~/ci/pipeline_details/graph/constants';
+import PipelineGraph from '~/ci/pipeline_details/graph/components/graph_component.vue';
+import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
+import LinkedPipelinesColumn from '~/ci/pipeline_details/graph/components/linked_pipelines_column.vue';
+import StageColumnComponent from '~/ci/pipeline_details/graph/components/stage_column_component.vue';
+import { calculatePipelineLayersInfo } from '~/ci/pipeline_details/graph/utils';
+import LinksLayer from '~/ci/common/private/job_links_layer.vue';
+
+import { generateResponse, pipelineWithUpstreamDownstream } from '../mock_data';
describe('graph component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/graph_view_selector_spec.js b/spec/frontend/ci/pipeline_details/graph/components/graph_view_selector_spec.js
index 65ae9d19978..bf98995de9c 100644
--- a/spec/frontend/pipelines/graph/graph_view_selector_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/graph_view_selector_spec.js
@@ -1,7 +1,7 @@
import { GlAlert, GlButton, GlButtonGroup, GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
-import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
+import { LAYER_VIEW, STAGE_VIEW } from '~/ci/pipeline_details/graph/constants';
+import GraphViewSelector from '~/ci/pipeline_details/graph/components/graph_view_selector.vue';
describe('the graph view selector component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_group_dropdown_spec.js
index 1419a7b9982..d5a1cfffe68 100644
--- a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_group_dropdown_spec.js
@@ -1,5 +1,5 @@
import { shallowMount, mount } from '@vue/test-utils';
-import JobGroupDropdown from '~/pipelines/components/graph/job_group_dropdown.vue';
+import JobGroupDropdown from '~/ci/pipeline_details/graph/components/job_group_dropdown.vue';
describe('job group dropdown component', () => {
const group = {
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
index 8a8b0e9aa63..107f0df5c02 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_item_spec.js
@@ -1,10 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import { GlBadge, GlModal, GlToast } from '@gitlab/ui';
-import JobItem from '~/pipelines/components/graph/job_item.vue';
+import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
+import ActionComponent from '~/ci/common/private/job_action_component.vue';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
@@ -15,7 +15,7 @@ import {
mockFailedJob,
triggerJob,
triggerJobWithRetryAction,
-} from './mock_data';
+} from '../mock_data';
describe('pipeline graph job item', () => {
useLocalStorageSpy();
diff --git a/spec/frontend/pipelines/graph/job_name_component_spec.js b/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js
index fca4c43d9fa..ca201aee648 100644
--- a/spec/frontend/pipelines/graph/job_name_component_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/job_name_component_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import jobNameComponent from '~/pipelines/components/jobs_shared/job_name_component.vue';
+import jobNameComponent from '~/ci/common/private/job_name_component.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
describe('job name component', () => {
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
index 8dae2aac664..5541b0db54a 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/linked_pipeline_spec.js
@@ -6,10 +6,10 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
-import { ACTION_FAILURE, UPSTREAM, DOWNSTREAM } from '~/pipelines/components/graph/constants';
-import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
-import CancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
-import RetryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
+import { ACTION_FAILURE, UPSTREAM, DOWNSTREAM } from '~/ci/pipeline_details/graph/constants';
+import LinkedPipelineComponent from '~/ci/pipeline_details/graph/components/linked_pipeline.vue';
+import CancelPipelineMutation from '~/ci/pipeline_details/graphql/mutations/cancel_pipeline.mutation.graphql';
+import RetryPipelineMutation from '~/ci/pipeline_details/graphql/mutations/retry_pipeline.mutation.graphql';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import mockPipeline from './linked_pipelines_mock_data';
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_column_spec.js
index bcea140f2dd..30f05baceab 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_column_spec.js
@@ -10,14 +10,14 @@ import {
UPSTREAM,
LAYER_VIEW,
STAGE_VIEW,
-} from '~/pipelines/components/graph/constants';
-import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
-import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
-import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
-import * as parsingUtils from '~/pipelines/components/parsing_utils';
-import { LOAD_FAILURE } from '~/pipelines/constants';
-
-import { pipelineWithUpstreamDownstream, wrappedPipelineReturn } from './mock_data';
+} from '~/ci/pipeline_details/graph/constants';
+import PipelineGraph from '~/ci/pipeline_details/graph/components/graph_component.vue';
+import LinkedPipeline from '~/ci/pipeline_details/graph/components/linked_pipeline.vue';
+import LinkedPipelinesColumn from '~/ci/pipeline_details/graph/components/linked_pipelines_column.vue';
+import * as parsingUtils from '~/ci/pipeline_details/utils/parsing_utils';
+import { LOAD_FAILURE } from '~/ci/pipeline_details/constants';
+
+import { pipelineWithUpstreamDownstream, wrappedPipelineReturn } from '../mock_data';
const processedPipeline = pipelineWithUpstreamDownstream(mockPipelineResponse);
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js b/spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_mock_data.js
index f7f5738e46d..f7f5738e46d 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/linked_pipelines_mock_data.js
diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/ci/pipeline_details/graph/components/links_inner_spec.js
index b4ffd2658fe..655b2ac74ac 100644
--- a/spec/frontend/pipelines/graph_shared/links_inner_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/links_inner_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
-import { parseData } from '~/pipelines/components/parsing_utils';
-import { createJobsHash } from '~/pipelines/utils';
+import LinksInner from '~/ci/pipeline_details/graph/components/links_inner.vue';
+import { parseData } from '~/ci/pipeline_details/utils/parsing_utils';
+import { createJobsHash } from '~/ci/pipeline_details/utils';
import {
jobRect,
largePipelineData,
@@ -11,7 +11,7 @@ import {
pipelineDataWithNoNeeds,
rootRect,
sameStageNeeds,
-} from '../pipeline_graph/mock_data';
+} from 'jest/ci/pipeline_editor/components/graph/mock_data';
describe('Links Inner component', () => {
const containerId = 'pipeline-graph-container';
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/ci/pipeline_details/graph/components/stage_column_component_spec.js
index d4d7f1618c5..cc79205ec41 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/components/stage_column_component_spec.js
@@ -1,7 +1,7 @@
import { mount, shallowMount } from '@vue/test-utils';
-import JobItem from '~/pipelines/components/graph/job_item.vue';
-import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
-import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
+import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
+import StageColumnComponent from '~/ci/pipeline_details/graph/components/stage_column_component.vue';
+import ActionComponent from '~/ci/common/private/job_action_component.vue';
const mockJob = {
id: 4250,
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/ci/pipeline_details/graph/graph_component_wrapper_spec.js
index 7b59d82ae6f..372ed2a4e1c 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/ci/pipeline_details/graph/graph_component_wrapper_spec.js
@@ -23,15 +23,15 @@ import {
LAYER_VIEW,
STAGE_VIEW,
VIEW_TYPE_KEY,
-} from '~/pipelines/components/graph/constants';
-import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
-import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
-import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
-import * as Api from '~/pipelines/components/graph_shared/api';
-import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import * as parsingUtils from '~/pipelines/components/parsing_utils';
-import getPipelineHeaderData from '~/pipelines/graphql/queries/get_pipeline_header_data.query.graphql';
-import * as sentryUtils from '~/pipelines/utils';
+} from '~/ci/pipeline_details/graph/constants';
+import PipelineGraph from '~/ci/pipeline_details/graph/components/graph_component.vue';
+import PipelineGraphWrapper from '~/ci/pipeline_details/graph/graph_component_wrapper.vue';
+import GraphViewSelector from '~/ci/pipeline_details/graph/components/graph_view_selector.vue';
+import * as Api from '~/ci/pipeline_details/graph/api_utils';
+import LinksLayer from '~/ci/common/private/job_links_layer.vue';
+import * as parsingUtils from '~/ci/pipeline_details/utils/parsing_utils';
+import getPipelineHeaderData from '~/ci/pipeline_details/header/graphql/queries/get_pipeline_header_data.query.graphql';
+import * as sentryUtils from '~/ci/utils';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import { mockRunningPipelineHeaderData } from '../mock_data';
import {
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/ci/pipeline_details/graph/mock_data.js
index 8d06d6931ed..a880a9cf4b0 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/graph/mock_data.js
@@ -1,10 +1,6 @@
import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
-import { unwrapPipelineData } from '~/pipelines/components/graph/utils';
-import {
- BUILD_KIND,
- BRIDGE_KIND,
- RETRY_ACTION_TITLE,
-} from '~/pipelines/components/graph/constants';
+import { unwrapPipelineData } from '~/ci/pipeline_details/graph/utils';
+import { BUILD_KIND, BRIDGE_KIND, RETRY_ACTION_TITLE } from '~/ci/pipeline_details/graph/constants';
// We mock this instead of using fixtures for performance reason.
const mockPipelineResponseCopy = JSON.parse(JSON.stringify(mockPipelineResponse));
diff --git a/spec/frontend/pipelines/pipeline_details_header_spec.js b/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js
index 5c75020afad..6e13658a773 100644
--- a/spec/frontend/pipelines/pipeline_details_header_spec.js
+++ b/spec/frontend/ci/pipeline_details/header/pipeline_details_header_spec.js
@@ -5,13 +5,13 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import PipelineDetailsHeader from '~/pipelines/components/pipeline_details_header.vue';
-import { BUTTON_TOOLTIP_RETRY, BUTTON_TOOLTIP_CANCEL } from '~/pipelines/constants';
+import PipelineDetailsHeader from '~/ci/pipeline_details/header/pipeline_details_header.vue';
+import { BUTTON_TOOLTIP_RETRY, BUTTON_TOOLTIP_CANCEL } from '~/ci/constants';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
-import cancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
-import deletePipelineMutation from '~/pipelines/graphql/mutations/delete_pipeline.mutation.graphql';
-import retryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
-import getPipelineDetailsQuery from '~/pipelines/graphql/queries/get_pipeline_header_data.query.graphql';
+import cancelPipelineMutation from '~/ci/pipeline_details/graphql/mutations/cancel_pipeline.mutation.graphql';
+import deletePipelineMutation from '~/ci/pipeline_details/graphql/mutations/delete_pipeline.mutation.graphql';
+import retryPipelineMutation from '~/ci/pipeline_details/graphql/mutations/retry_pipeline.mutation.graphql';
+import getPipelineDetailsQuery from '~/ci/pipeline_details/header/graphql/queries/get_pipeline_header_data.query.graphql';
import {
pipelineHeaderSuccess,
pipelineHeaderRunning,
@@ -23,7 +23,7 @@ import {
pipelineRetryMutationResponseFailed,
pipelineCancelMutationResponseFailed,
pipelineDeleteMutationResponseFailed,
-} from './mock_data';
+} from '../mock_data';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js b/spec/frontend/ci/pipeline_details/jobs/components/failed_jobs_table_spec.js
index 99a178120cc..7110a35ad4e 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
+++ b/spec/frontend/ci/pipeline_details/jobs/components/failed_jobs_table_spec.js
@@ -7,9 +7,9 @@ import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { createAlert } from '~/alert';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
-import RetryFailedJobMutation from '~/pipelines/graphql/mutations/retry_failed_job.mutation.graphql';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
+import FailedJobsTable from '~/ci/pipeline_details/jobs/components/failed_jobs_table.vue';
+import RetryFailedJobMutation from '~/ci/pipeline_details/jobs/graphql/mutations/retry_failed_job.mutation.graphql';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
import {
successRetryMutationResponse,
failedRetryMutationResponse,
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js b/spec/frontend/ci/pipeline_details/jobs/failed_jobs_app_spec.js
index 6a2453704db..17b43aa422b 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
+++ b/spec/frontend/ci/pipeline_details/jobs/failed_jobs_app_spec.js
@@ -5,10 +5,10 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import FailedJobsApp from '~/pipelines/components/jobs/failed_jobs_app.vue';
-import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
-import GetFailedJobsQuery from '~/pipelines/graphql/queries/get_failed_jobs.query.graphql';
-import { mockFailedJobsQueryResponse } from '../../mock_data';
+import FailedJobsApp from '~/ci/pipeline_details/jobs/failed_jobs_app.vue';
+import FailedJobsTable from '~/ci/pipeline_details/jobs/components/failed_jobs_table.vue';
+import GetFailedJobsQuery from '~/ci/pipeline_details/jobs/graphql/queries/get_failed_jobs.query.graphql';
+import { mockFailedJobsQueryResponse } from 'jest/ci/pipeline_details/mock_data';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js b/spec/frontend/ci/pipeline_details/jobs/jobs_app_spec.js
index 39475788fe2..4a3a901502e 100644
--- a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
+++ b/spec/frontend/ci/pipeline_details/jobs/jobs_app_spec.js
@@ -5,10 +5,10 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import JobsApp from '~/pipelines/components/jobs/jobs_app.vue';
-import JobsTable from '~/jobs/components/table/jobs_table.vue';
-import getPipelineJobsQuery from '~/pipelines/graphql/queries/get_pipeline_jobs.query.graphql';
-import { mockPipelineJobsQueryResponse } from '../../mock_data';
+import JobsApp from '~/ci/pipeline_details/jobs/jobs_app.vue';
+import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
+import getPipelineJobsQuery from '~/ci/pipeline_details/jobs/graphql/queries/get_pipeline_jobs.query.graphql';
+import { mockPipelineJobsQueryResponse } from '../mock_data';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/linked_pipelines_mock.json b/spec/frontend/ci/pipeline_details/linked_pipelines_mock.json
index a68283032d2..a68283032d2 100644
--- a/spec/frontend/pipelines/linked_pipelines_mock.json
+++ b/spec/frontend/ci/pipeline_details/linked_pipelines_mock.json
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/ci/pipeline_details/mock_data.js
index 673db3b5178..e32d0a0df47 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/mock_data.js
@@ -197,108 +197,6 @@ export const mockRunningPipelineHeaderData = {
},
};
-export const stageReply = {
- name: 'deploy',
- title: 'deploy: running',
- latest_statuses: [
- {
- id: 928,
- name: 'stop staging',
- started: false,
- build_path: '/twitter/flight/-/jobs/928',
- cancel_path: '/twitter/flight/-/jobs/928/cancel',
- playable: false,
- created_at: '2018-04-04T20:02:02.728Z',
- updated_at: '2018-04-04T20:02:02.766Z',
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/twitter/flight/-/jobs/928',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_pending-db32e1faf94b9f89530ac519790920d1f18ea8f6af6cd2e0a26cd6840cacf101.ico',
- action: {
- icon: 'cancel',
- title: 'Cancel',
- path: '/twitter/flight/-/jobs/928/cancel',
- method: 'post',
- },
- },
- },
- {
- id: 926,
- name: 'production',
- started: false,
- build_path: '/twitter/flight/-/jobs/926',
- retry_path: '/twitter/flight/-/jobs/926/retry',
- play_path: '/twitter/flight/-/jobs/926/play',
- playable: true,
- created_at: '2018-04-04T20:00:57.202Z',
- updated_at: '2018-04-04T20:11:13.110Z',
- status: {
- icon: 'status_canceled',
- text: 'canceled',
- label: 'manual play action',
- group: 'canceled',
- tooltip: 'canceled',
- has_details: true,
- details_path: '/twitter/flight/-/jobs/926',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_canceled-5491840b9b6feafba0bc599cbd49ee9580321dc809683856cf1b0d51532b1af6.ico',
- action: {
- icon: 'play',
- title: 'Play',
- path: '/twitter/flight/-/jobs/926/play',
- method: 'post',
- },
- },
- },
- {
- id: 217,
- name: 'staging',
- started: '2018-03-07T08:41:46.234Z',
- build_path: '/twitter/flight/-/jobs/217',
- retry_path: '/twitter/flight/-/jobs/217/retry',
- playable: false,
- created_at: '2018-03-07T14:41:58.093Z',
- updated_at: '2018-03-07T14:41:58.093Z',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- tooltip: 'passed',
- has_details: true,
- details_path: '/twitter/flight/-/jobs/217',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/twitter/flight/-/jobs/217/retry',
- method: 'post',
- },
- },
- },
- ],
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- tooltip: 'running',
- has_details: true,
- details_path: '/twitter/flight/pipelines/13#deploy',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
- },
- path: '/twitter/flight/pipelines/13#deploy',
- dropdown_path: '/twitter/flight/pipelines/13/stage.json?stage=deploy',
-};
-
export const users = [
{
id: 1,
diff --git a/spec/frontend/pipelines/pipeline_tabs_spec.js b/spec/frontend/ci/pipeline_details/pipeline_tabs_spec.js
index 8d1cd98e981..8d67cdef05c 100644
--- a/spec/frontend/pipelines/pipeline_tabs_spec.js
+++ b/spec/frontend/ci/pipeline_details/pipeline_tabs_spec.js
@@ -1,4 +1,4 @@
-import { createAppOptions } from '~/pipelines/pipeline_tabs';
+import { createAppOptions } from '~/ci/pipeline_details/pipeline_tabs';
jest.mock('~/lib/utils/url_utility', () => ({
removeParams: () => 'gitlab.com',
@@ -6,11 +6,11 @@ jest.mock('~/lib/utils/url_utility', () => ({
setUrlFragment: () => {},
}));
-jest.mock('~/pipelines/utils', () => ({
+jest.mock('~/ci/pipeline_details/utils', () => ({
getPipelineDefaultTab: () => '',
}));
-describe('~/pipelines/pipeline_tabs.js', () => {
+describe('~/ci/pipeline_details/pipeline_tabs.js', () => {
describe('createAppOptions', () => {
const SELECTOR = 'SELECTOR';
diff --git a/spec/frontend/pipelines/pipelines_store_spec.js b/spec/frontend/ci/pipeline_details/pipelines_store_spec.js
index f374ecd0c0a..43e605f4306 100644
--- a/spec/frontend/pipelines/pipelines_store_spec.js
+++ b/spec/frontend/ci/pipeline_details/pipelines_store_spec.js
@@ -1,4 +1,4 @@
-import PipelineStore from '~/pipelines/stores/pipelines_store';
+import PipelineStore from '~/ci/pipeline_details/stores/pipelines_store';
describe('Pipelines Store', () => {
let store;
diff --git a/spec/frontend/pipelines/components/pipeline_tabs_spec.js b/spec/frontend/ci/pipeline_details/tabs/pipeline_tabs_spec.js
index 0951e1ffb46..0f1835b7ec8 100644
--- a/spec/frontend/pipelines/components/pipeline_tabs_spec.js
+++ b/spec/frontend/ci/pipeline_details/tabs/pipeline_tabs_spec.js
@@ -1,8 +1,8 @@
import { GlTab } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import PipelineTabs from '~/pipelines/components/pipeline_tabs.vue';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
+import PipelineTabs from '~/ci/pipeline_details/tabs/pipeline_tabs.vue';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
describe('The Pipeline Tabs', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/test_reports/empty_state_spec.js b/spec/frontend/ci/pipeline_details/test_reports/empty_state_spec.js
index ee0f8a90a11..ed1d6bc7d37 100644
--- a/spec/frontend/pipelines/test_reports/empty_state_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/empty_state_spec.js
@@ -1,6 +1,6 @@
import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import EmptyState, { i18n } from '~/pipelines/components/test_reports/empty_state.vue';
+import EmptyState, { i18n } from '~/ci/pipeline_details/test_reports/empty_state.vue';
describe('Test report empty state', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/test_reports/mock_data.js b/spec/frontend/ci/pipeline_details/test_reports/mock_data.js
index c3ca1429842..7c9f9287c86 100644
--- a/spec/frontend/pipelines/test_reports/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/mock_data.js
@@ -1,4 +1,4 @@
-import { TestStatus } from '~/pipelines/constants';
+import { TestStatus } from '~/ci/pipeline_details/constants';
export default [
{
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/ci/pipeline_details/test_reports/stores/actions_spec.js
index e05d2151f0a..6636a7f1ed6 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/stores/actions_spec.js
@@ -5,8 +5,8 @@ import testAction from 'helpers/vuex_action_helper';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import * as actions from '~/pipelines/stores/test_reports/actions';
-import * as types from '~/pipelines/stores/test_reports/mutation_types';
+import * as actions from '~/ci/pipeline_details/stores/test_reports/actions';
+import * as types from '~/ci/pipeline_details/stores/test_reports/mutation_types';
jest.mock('~/alert');
diff --git a/spec/frontend/pipelines/test_reports/stores/getters_spec.js b/spec/frontend/ci/pipeline_details/test_reports/stores/getters_spec.js
index 70e3a01dbf1..e52e9a07ae0 100644
--- a/spec/frontend/pipelines/test_reports/stores/getters_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/stores/getters_spec.js
@@ -1,10 +1,10 @@
import testReports from 'test_fixtures/pipelines/test_report.json';
-import * as getters from '~/pipelines/stores/test_reports/getters';
+import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
import {
iconForTestStatus,
formatFilePath,
formattedTime,
-} from '~/pipelines/stores/test_reports/utils';
+} from '~/ci/pipeline_details/stores/test_reports/utils';
describe('Getters TestReports Store', () => {
let state;
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/ci/pipeline_details/test_reports/stores/mutations_spec.js
index 685ac6ea3e5..d58515dcc6d 100644
--- a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/stores/mutations_spec.js
@@ -1,6 +1,6 @@
import testReports from 'test_fixtures/pipelines/test_report.json';
-import * as types from '~/pipelines/stores/test_reports/mutation_types';
-import mutations from '~/pipelines/stores/test_reports/mutations';
+import * as types from '~/ci/pipeline_details/stores/test_reports/mutation_types';
+import mutations from '~/ci/pipeline_details/stores/test_reports/mutations';
import { createAlert } from '~/alert';
jest.mock('~/alert');
diff --git a/spec/frontend/pipelines/test_reports/stores/utils_spec.js b/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js
index 703fe69026c..c0ffc2b34fb 100644
--- a/spec/frontend/pipelines/test_reports/stores/utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/stores/utils_spec.js
@@ -1,4 +1,4 @@
-import { formatFilePath, formattedTime } from '~/pipelines/stores/test_reports/utils';
+import { formatFilePath, formattedTime } from '~/ci/pipeline_details/stores/test_reports/utils';
describe('Test reports utils', () => {
describe('formatFilePath', () => {
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_case_details_spec.js
index f8663408817..0f651b9d456 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_case_details_spec.js
@@ -1,7 +1,7 @@
import { GlModal, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
+import TestCaseDetails from '~/ci/pipeline_details/test_reports/test_case_details.vue';
import CodeBlock from '~/vue_shared/components/code_block.vue';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
index de16f496eff..8ff060026da 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
@@ -5,11 +5,11 @@ import Vue from 'vue';
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import EmptyState from '~/pipelines/components/test_reports/empty_state.vue';
-import TestReports from '~/pipelines/components/test_reports/test_reports.vue';
-import TestSummary from '~/pipelines/components/test_reports/test_summary.vue';
-import TestSummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
-import * as getters from '~/pipelines/stores/test_reports/getters';
+import EmptyState from '~/ci/pipeline_details/test_reports/empty_state.vue';
+import TestReports from '~/ci/pipeline_details/test_reports/test_reports.vue';
+import TestSummary from '~/ci/pipeline_details/test_reports/test_summary.vue';
+import TestSummaryTable from '~/ci/pipeline_details/test_reports/test_summary_table.vue';
+import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
Vue.use(Vuex);
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js
index 08b430fa703..5bdea6bbcbf 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js
@@ -4,11 +4,11 @@ import Vue from 'vue';
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import SuiteTable, { i18n } from '~/pipelines/components/test_reports/test_suite_table.vue';
-import { TestStatus } from '~/pipelines/constants';
-import * as getters from '~/pipelines/stores/test_reports/getters';
-import { formatFilePath } from '~/pipelines/stores/test_reports/utils';
-import { ARTIFACTS_EXPIRED_ERROR_MESSAGE } from '~/pipelines/stores/test_reports/constants';
+import SuiteTable, { i18n } from '~/ci/pipeline_details/test_reports/test_suite_table.vue';
+import { TestStatus } from '~/ci/pipeline_details/constants';
+import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
+import { formatFilePath } from '~/ci/pipeline_details/stores/test_reports/utils';
+import { ARTIFACTS_EXPIRED_ERROR_MESSAGE } from '~/ci/pipeline_details/stores/test_reports/constants';
import skippedTestCases from './mock_data';
Vue.use(Vuex);
diff --git a/spec/frontend/pipelines/test_reports/test_summary_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_summary_spec.js
index 7eed6671fb9..f9182d52c8a 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_summary_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import testReports from 'test_fixtures/pipelines/test_report.json';
-import Summary from '~/pipelines/components/test_reports/test_summary.vue';
-import { formattedTime } from '~/pipelines/stores/test_reports/utils';
+import Summary from '~/ci/pipeline_details/test_reports/test_summary.vue';
+import { formattedTime } from '~/ci/pipeline_details/stores/test_reports/utils';
describe('Test reports summary', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_summary_table_spec.js
index a45946d5a03..bb62fbcb32c 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_summary_table_spec.js
@@ -3,8 +3,8 @@ import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
-import SummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
-import * as getters from '~/pipelines/stores/test_reports/getters';
+import SummaryTable from '~/ci/pipeline_details/test_reports/test_summary_table.vue';
+import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
Vue.use(Vuex);
diff --git a/spec/frontend/pipelines/pipeline_graph/utils_spec.js b/spec/frontend/ci/pipeline_details/utils/index_spec.js
index 96b18fcf96f..61230cb52e6 100644
--- a/spec/frontend/pipelines/pipeline_graph/utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/utils/index_spec.js
@@ -1,5 +1,9 @@
-import { createJobsHash, generateJobNeedsDict, getPipelineDefaultTab } from '~/pipelines/utils';
-import { validPipelineTabNames, pipelineTabName } from '~/pipelines/constants';
+import {
+ createJobsHash,
+ generateJobNeedsDict,
+ getPipelineDefaultTab,
+} from '~/ci/pipeline_details/utils';
+import { validPipelineTabNames, pipelineTabName } from '~/ci/pipeline_details/constants';
describe('utils functions', () => {
const jobName1 = 'build_1';
diff --git a/spec/frontend/pipelines/utils_spec.js b/spec/frontend/ci/pipeline_details/utils/parsing_utils_spec.js
index 286d79edc6c..9390f076d3d 100644
--- a/spec/frontend/pipelines/utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/utils/parsing_utils_spec.js
@@ -1,5 +1,5 @@
import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
-import { createSankey } from '~/pipelines/components/dag/drawing_utils';
+import { createSankey } from '~/ci/pipeline_details/dag/utils/drawing_utils';
import {
makeLinksFromNodes,
filterByAncestors,
@@ -9,13 +9,13 @@ import {
parseData,
removeOrphanNodes,
getMaxNodes,
-} from '~/pipelines/components/parsing_utils';
-import { createNodeDict } from '~/pipelines/utils';
+} from '~/ci/pipeline_details/utils/parsing_utils';
+import { createNodeDict } from '~/ci/pipeline_details/utils';
-import { mockDownstreamPipelinesRest } from '../vue_merge_request_widget/mock_data';
-import { mockDownstreamPipelinesGraphql } from '../commit/mock_data';
-import { mockParsedGraphQLNodes, missingJob } from './components/dag/mock_data';
-import { generateResponse } from './graph/mock_data';
+import { mockDownstreamPipelinesRest } from '../../../vue_merge_request_widget/mock_data';
+import { mockDownstreamPipelinesGraphql } from '../../../commit/mock_data';
+import { mockParsedGraphQLNodes, missingJob } from '../dag/mock_data';
+import { generateResponse } from '../graph/mock_data';
describe('DAG visualization parsing utilities', () => {
const nodeDict = createNodeDict(mockParsedGraphQLNodes);
diff --git a/spec/frontend/pipelines/unwrapping_utils_spec.js b/spec/frontend/ci/pipeline_details/utils/unwrapping_utils_spec.js
index a6ce7d4049f..99ee2eff1e4 100644
--- a/spec/frontend/pipelines/unwrapping_utils_spec.js
+++ b/spec/frontend/ci/pipeline_details/utils/unwrapping_utils_spec.js
@@ -2,7 +2,7 @@ import {
unwrapGroups,
unwrapNodesWithName,
unwrapStagesWithNeeds,
-} from '~/pipelines/components/unwrapping_utils';
+} from '~/ci/pipeline_details/utils/unwrapping_utils';
const groupsArray = [
{
diff --git a/spec/frontend/pipelines/pipeline_graph/mock_data.js b/spec/frontend/ci/pipeline_editor/components/graph/mock_data.js
index db77e0a0573..db77e0a0573 100644
--- a/spec/frontend/pipelines/pipeline_graph/mock_data.js
+++ b/spec/frontend/ci/pipeline_editor/components/graph/mock_data.js
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/ci/pipeline_editor/components/graph/pipeline_graph_spec.js
index 123f2e011c3..95edfb01cf0 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/graph/pipeline_graph_spec.js
@@ -2,11 +2,11 @@ import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { setHTMLFixture } from 'helpers/fixtures';
import { CI_CONFIG_STATUS_VALID } from '~/ci/pipeline_editor/constants';
-import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
-import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
-import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
-import StageName from '~/pipelines/components/pipeline_graph/stage_name.vue';
+import LinksInner from '~/ci/pipeline_details/graph/components/links_inner.vue';
+import LinksLayer from '~/ci/common/private/job_links_layer.vue';
+import JobPill from '~/ci/pipeline_editor/components/graph/job_pill.vue';
+import PipelineGraph from '~/ci/pipeline_editor/components/graph/pipeline_graph.vue';
+import StageName from '~/ci/pipeline_editor/components/graph/stage_name.vue';
import { pipelineData, singleStageData } from './mock_data';
describe('pipeline graph component', () => {
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_header_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_header_spec.js
index a651664851e..655bfe538c6 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_header_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_header_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlCard } from '@gitlab/ui';
import PipelineEditorHeader from '~/ci/pipeline_editor/components/header/pipeline_editor_header.vue';
import PipelineStatus from '~/ci/pipeline_editor/components/header/pipeline_status.vue';
import ValidationSegment from '~/ci/pipeline_editor/components/header/validation_segment.vue';
@@ -20,6 +21,9 @@ describe('Pipeline editor header', () => {
isNewCiConfigFile: false,
...props,
},
+ stubs: {
+ GlCard,
+ },
});
};
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
index f5e0b65d615..4ec1dd4b605 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
@@ -4,8 +4,8 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
-import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import getLinkedPipelinesQuery from '~/ci/pipeline_details/graphql/queries/get_linked_pipelines.query.graphql';
import { PIPELINE_FAILURE } from '~/ci/pipeline_editor/constants';
import { mockLinkedPipelines, mockProjectFullPath, mockProjectPipeline } from '../../mock_data';
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
index 3bbe14adb88..1a2ed60a6f4 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
@@ -6,8 +6,9 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineStatus, { i18n } from '~/ci/pipeline_editor/components/header/pipeline_status.vue';
import getPipelineQuery from '~/ci/pipeline_editor/graphql/queries/pipeline.query.graphql';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import PipelineMiniGraph from '~/ci/pipeline_mini_graph/pipeline_mini_graph.vue';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
+import getPipelineEtag from '~/ci/pipeline_editor/graphql/queries/client/pipeline_etag.query.graphql';
import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data';
Vue.use(VueApollo);
@@ -21,6 +22,16 @@ describe('Pipeline Status', () => {
const handlers = [[getPipelineQuery, mockPipelineQuery]];
mockApollo = createMockApollo(handlers);
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: getPipelineEtag,
+ data: {
+ etags: {
+ __typename: 'EtagValues',
+ pipeline: 'pipelines/1',
+ },
+ },
+ });
+
wrapper = shallowMount(PipelineStatus, {
apolloProvider: mockApollo,
propsData: {
diff --git a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 77252a5c0b6..69e91f11309 100644
--- a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -19,7 +19,7 @@ import {
VALIDATE_TAB,
VALIDATE_TAB_BADGE_DISMISSED_KEY,
} from '~/ci/pipeline_editor/constants';
-import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
+import PipelineGraph from '~/ci/pipeline_editor/components/graph/pipeline_graph.vue';
import getBlobContent from '~/ci/pipeline_editor/graphql/queries/blob_content.query.graphql';
import {
mockBlobContentQueryResponse,
diff --git a/spec/frontend/ci/pipeline_editor/mock_data.js b/spec/frontend/ci/pipeline_editor/mock_data.js
index 007abde939f..e08c35f1555 100644
--- a/spec/frontend/ci/pipeline_editor/mock_data.js
+++ b/spec/frontend/ci/pipeline_editor/mock_data.js
@@ -1,5 +1,5 @@
import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/ci/pipeline_editor/constants';
-import { unwrapStagesWithNeeds } from '~/pipelines/components/unwrapping_utils';
+import { unwrapStagesWithNeeds } from '~/ci/pipeline_details/utils/unwrapping_utils';
import { DOCS_URL_IN_EE_DIR } from 'jh_else_ce/lib/utils/url_utility';
export const commonOptions = {
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js b/spec/frontend/ci/pipeline_mini_graph/job_item_spec.js
index b89f27e5c05..9c14e75caa4 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/job_item_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/job_item_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import JobItem from '~/pipelines/components/pipeline_mini_graph/job_item.vue';
+import JobItem from '~/ci/pipeline_mini_graph/job_item.vue';
describe('JobItem', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js
index 6661bb079d2..916f3053153 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_mini_graph_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
-import PipelineStages from '~/pipelines/components/pipeline_mini_graph/pipeline_stages.vue';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineStages from '~/ci/pipeline_mini_graph/pipeline_stages.vue';
import mockLinkedPipelines from './linked_pipelines_mock_data';
const mockStages = pipelines[0].details.stages;
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
index 3697eaeea1a..30a0b868c5f 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/legacy_pipeline_stage_spec.js
@@ -5,10 +5,10 @@ import MockAdapter from 'axios-mock-adapter';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import LegacyPipelineStage from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage.vue';
-import eventHub from '~/pipelines/event_hub';
+import LegacyPipelineStage from '~/ci/pipeline_mini_graph/legacy_pipeline_stage.vue';
+import eventHub from '~/ci/event_hub';
import waitForPromises from 'helpers/wait_for_promises';
-import { stageReply } from '../../mock_data';
+import { stageReply } from './mock_data';
const dropdownPath = 'path.json';
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mini_list_spec.js b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
index a4ecb9041c9..0396029cdaf 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mini_list_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import LinkedPipelinesMiniList from '~/pipelines/components/pipeline_mini_graph/linked_pipelines_mini_list.vue';
+import LinkedPipelinesMiniList from '~/ci/pipeline_mini_graph/linked_pipelines_mini_list.vue';
import mockData from './linked_pipelines_mock_data';
describe('Linked pipeline mini list', () => {
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mock_data.js b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mock_data.js
index 117c7f2ae52..117c7f2ae52 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/ci/pipeline_mini_graph/linked_pipelines_mock_data.js
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js b/spec/frontend/ci/pipeline_mini_graph/mock_data.js
index 1c13e9eb62b..231375b40dd 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js
+++ b/spec/frontend/ci/pipeline_mini_graph/mock_data.js
@@ -148,3 +148,105 @@ export const mockUpstreamDownstreamQueryResponse = {
export const linkedPipelinesFetchError = 'There was a problem fetching linked pipelines.';
export const stagesFetchError = 'There was a problem fetching the pipeline stages.';
+
+export const stageReply = {
+ name: 'deploy',
+ title: 'deploy: running',
+ latest_statuses: [
+ {
+ id: 928,
+ name: 'stop staging',
+ started: false,
+ build_path: '/twitter/flight/-/jobs/928',
+ cancel_path: '/twitter/flight/-/jobs/928/cancel',
+ playable: false,
+ created_at: '2018-04-04T20:02:02.728Z',
+ updated_at: '2018-04-04T20:02:02.766Z',
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/twitter/flight/-/jobs/928',
+ favicon:
+ '/assets/ci_favicons/dev/favicon_status_pending-db32e1faf94b9f89530ac519790920d1f18ea8f6af6cd2e0a26cd6840cacf101.ico',
+ action: {
+ icon: 'cancel',
+ title: 'Cancel',
+ path: '/twitter/flight/-/jobs/928/cancel',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 926,
+ name: 'production',
+ started: false,
+ build_path: '/twitter/flight/-/jobs/926',
+ retry_path: '/twitter/flight/-/jobs/926/retry',
+ play_path: '/twitter/flight/-/jobs/926/play',
+ playable: true,
+ created_at: '2018-04-04T20:00:57.202Z',
+ updated_at: '2018-04-04T20:11:13.110Z',
+ status: {
+ icon: 'status_canceled',
+ text: 'canceled',
+ label: 'manual play action',
+ group: 'canceled',
+ tooltip: 'canceled',
+ has_details: true,
+ details_path: '/twitter/flight/-/jobs/926',
+ favicon:
+ '/assets/ci_favicons/dev/favicon_status_canceled-5491840b9b6feafba0bc599cbd49ee9580321dc809683856cf1b0d51532b1af6.ico',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/twitter/flight/-/jobs/926/play',
+ method: 'post',
+ },
+ },
+ },
+ {
+ id: 217,
+ name: 'staging',
+ started: '2018-03-07T08:41:46.234Z',
+ build_path: '/twitter/flight/-/jobs/217',
+ retry_path: '/twitter/flight/-/jobs/217/retry',
+ playable: false,
+ created_at: '2018-03-07T14:41:58.093Z',
+ updated_at: '2018-03-07T14:41:58.093Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/twitter/flight/-/jobs/217',
+ favicon:
+ '/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/twitter/flight/-/jobs/217/retry',
+ method: 'post',
+ },
+ },
+ },
+ ],
+ status: {
+ icon: 'status_running',
+ text: 'running',
+ label: 'running',
+ group: 'running',
+ tooltip: 'running',
+ has_details: true,
+ details_path: '/twitter/flight/pipelines/13#deploy',
+ favicon:
+ '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ },
+ path: '/twitter/flight/pipelines/13#deploy',
+ dropdown_path: '/twitter/flight/pipelines/13/stage.json?stage=deploy',
+};
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js b/spec/frontend/ci/pipeline_mini_graph/pipeline_mini_graph_spec.js
index b3e157f75f6..6833726a297 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_mini_graph_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/pipeline_mini_graph_spec.js
@@ -7,10 +7,10 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
-import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
-import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import getLinkedPipelinesQuery from '~/ci/pipeline_details/graphql/queries/get_linked_pipelines.query.graphql';
+import getPipelineStagesQuery from '~/ci/pipeline_mini_graph/graphql/queries/get_pipeline_stages.query.graphql';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineMiniGraph from '~/ci/pipeline_mini_graph/pipeline_mini_graph.vue';
import * as sharedGraphQlUtils from '~/graphql_shared/utils';
import {
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js b/spec/frontend/ci/pipeline_mini_graph/pipeline_stage_spec.js
index 1989aad12b0..96966bcbb84 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stage_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/pipeline_stage_spec.js
@@ -4,8 +4,8 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
-import getPipelineStageQuery from '~/pipelines/graphql/queries/get_pipeline_stage.query.graphql';
-import PipelineStage from '~/pipelines/components/pipeline_mini_graph/pipeline_stage.vue';
+import getPipelineStageQuery from '~/ci/pipeline_mini_graph/graphql/queries/get_pipeline_stage.query.graphql';
+import PipelineStage from '~/ci/pipeline_mini_graph/pipeline_stage.vue';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js b/spec/frontend/ci/pipeline_mini_graph/pipeline_stages_spec.js
index c212087b7e3..bbd39c6fcd9 100644
--- a/spec/frontend/pipelines/components/pipeline_mini_graph/pipeline_stages_spec.js
+++ b/spec/frontend/ci/pipeline_mini_graph/pipeline_stages_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { pipelines } from 'test_fixtures/pipelines/pipelines.json';
-import LegacyPipelineStage from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_stage.vue';
-import PipelineStages from '~/pipelines/components/pipeline_mini_graph/pipeline_stages.vue';
+import LegacyPipelineStage from '~/ci/pipeline_mini_graph/legacy_pipeline_stage.vue';
+import PipelineStages from '~/ci/pipeline_mini_graph/pipeline_stages.vue';
const mockStages = pipelines[0].details.stages;
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
index 79a0cfa0dc9..33cf24c9ed1 100644
--- a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_form_spec.js
@@ -97,6 +97,7 @@ describe('Pipeline schedules form', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
// Variables
const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row');
+ const findVariableTypes = () => wrapper.findAllByTestId('pipeline-form-ci-variable-type');
const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key');
const findValueInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-value');
const findHiddenValueInputs = () =>
@@ -182,6 +183,16 @@ describe('Pipeline schedules form', () => {
mock.restore();
});
+ it('changes variable type', async () => {
+ expect(findVariableTypes().at(0).props('selected')).toBe('ENV_VAR');
+
+ findVariableTypes().at(0).vm.$emit('select', 'FILE');
+
+ await nextTick();
+
+ expect(findVariableTypes().at(0).props('selected')).toBe('FILE');
+ });
+
it('creates blank variable on input change event', async () => {
expect(findVariableRows()).toHaveLength(1);
diff --git a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js
index 5cc3829efbd..70b4c7a5224 100644
--- a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js
@@ -1,5 +1,6 @@
import { GlIcon, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { s__ } from '~/locale';
import PipelineScheduleTarget from '~/ci/pipeline_schedules/components/table/cells/pipeline_schedule_target.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
@@ -20,18 +21,35 @@ describe('Pipeline schedule target', () => {
const findIcon = () => wrapper.findComponent(GlIcon);
const findLink = () => wrapper.findComponent(GlLink);
+ const findTarget = () => wrapper.findComponent('[data-testid="pipeline-schedule-target"]');
- beforeEach(() => {
- createComponent();
- });
+ describe('with ref', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays icon', () => {
+ expect(findIcon().exists()).toBe(true);
+ expect(findIcon().props('name')).toBe('fork');
+ });
- it('displays icon', () => {
- expect(findIcon().exists()).toBe(true);
- expect(findIcon().props('name')).toBe('fork');
+ it('displays ref link', () => {
+ expect(findLink().attributes('href')).toBe(defaultProps.schedule.refPath);
+ expect(findLink().text()).toBe(defaultProps.schedule.refForDisplay);
+ });
});
- it('displays ref link', () => {
- expect(findLink().attributes('href')).toBe(defaultProps.schedule.refPath);
- expect(findLink().text()).toBe(defaultProps.schedule.refForDisplay);
+ describe('without refPath', () => {
+ beforeEach(() => {
+ createComponent({
+ schedule: { ...mockPipelineScheduleNodes[0], refPath: null, refForDisplay: null },
+ });
+ });
+
+ it('displays none for the target', () => {
+ expect(findIcon().exists()).toBe(false);
+ expect(findLink().exists()).toBe(false);
+ expect(findTarget().text()).toBe(s__('PipelineSchedules|None'));
+ });
});
});
diff --git a/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_legacy_spec.js b/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_legacy_spec.js
deleted file mode 100644
index e4ff9a0545b..00000000000
--- a/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_legacy_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import TakeOwnershipModalLegacy from '~/ci/pipeline_schedules/components/take_ownership_modal_legacy.vue';
-
-describe('Take ownership modal', () => {
- let wrapper;
- const url = `/root/job-log-tester/-/pipeline_schedules/3/take_ownership`;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(TakeOwnershipModalLegacy, {
- propsData: {
- ownershipUrl: url,
- ...props,
- },
- });
- };
-
- const findModal = () => wrapper.findComponent(GlModal);
-
- beforeEach(() => {
- createComponent();
- });
-
- it('has a primary action set to a url and a post data-method', () => {
- const actionPrimary = findModal().props('actionPrimary');
-
- expect(actionPrimary.attributes).toEqual(
- expect.objectContaining({
- category: 'primary',
- variant: 'confirm',
- href: url,
- 'data-method': 'post',
- }),
- );
- });
-
- it('shows a take ownership message', () => {
- expect(findModal().text()).toBe(
- 'Only the owner of a pipeline schedule can make changes to it. Do you want to take ownership of this schedule?',
- );
- });
-});
diff --git a/spec/frontend/ci/pipeline_schedules/mock_data.js b/spec/frontend/ci/pipeline_schedules/mock_data.js
index 8d4e0f1bea6..711b120c61e 100644
--- a/spec/frontend/ci/pipeline_schedules/mock_data.js
+++ b/spec/frontend/ci/pipeline_schedules/mock_data.js
@@ -1,8 +1,8 @@
// Fixture located at spec/frontend/fixtures/pipeline_schedules.rb
+import mockGetSinglePipelineScheduleGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.single.json';
import mockGetPipelineSchedulesGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.json';
import mockGetPipelineSchedulesAsGuestGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.as_guest.json';
import mockGetPipelineSchedulesTakeOwnershipGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.take_ownership.json';
-import mockGetSinglePipelineScheduleGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.single.json';
const {
data: {
diff --git a/spec/frontend/pipelines/components/pipelines_list/empty_state/ci_templates_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/ci_templates_spec.js
index b560eea4882..980a8be24ea 100644
--- a/spec/frontend/pipelines/components/pipelines_list/empty_state/ci_templates_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/ci_templates_spec.js
@@ -1,6 +1,6 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import CiTemplates from '~/pipelines/components/pipelines_list/empty_state/ci_templates.vue';
+import CiTemplates from '~/ci/pipelines_page/components/empty_state/ci_templates.vue';
const pipelineEditorPath = '/-/ci/editor';
const suggestedCiTemplates = [
diff --git a/spec/frontend/pipelines/components/pipelines_list/empty_state/ios_templates_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/ios_templates_spec.js
index 700be076e0c..8620d41886e 100644
--- a/spec/frontend/pipelines/components/pipelines_list/empty_state/ios_templates_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/ios_templates_spec.js
@@ -3,8 +3,8 @@ import { nextTick } from 'vue';
import { GlPopover, GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
-import IosTemplates from '~/pipelines/components/pipelines_list/empty_state/ios_templates.vue';
-import CiTemplates from '~/pipelines/components/pipelines_list/empty_state/ci_templates.vue';
+import IosTemplates from '~/ci/pipelines_page/components/empty_state/ios_templates.vue';
+import CiTemplates from '~/ci/pipelines_page/components/empty_state/ci_templates.vue';
const pipelineEditorPath = '/-/ci/editor';
const registrationToken = 'SECRET_TOKEN';
diff --git a/spec/frontend/pipelines/empty_state_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/no_ci_empty_state_spec.js
index 5465e4d77da..0c42723f753 100644
--- a/spec/frontend/pipelines/empty_state_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/no_ci_empty_state_spec.js
@@ -2,10 +2,10 @@ import '~/commons';
import { shallowMount } from '@vue/test-utils';
import { GlEmptyState } from '@gitlab/ui';
import { stubExperiments } from 'helpers/experimentation_helper';
-import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import EmptyState from '~/ci/pipelines_page/components/empty_state/no_ci_empty_state.vue';
import GitlabExperiment from '~/experimentation/components/gitlab_experiment.vue';
-import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates.vue';
-import IosTemplates from '~/pipelines/components/pipelines_list/empty_state/ios_templates.vue';
+import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
+import IosTemplates from '~/ci/pipelines_page/components/empty_state/ios_templates.vue';
describe('Pipelines Empty State', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
index 4bf4257f462..fbef4aa08eb 100644
--- a/spec/frontend/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
@@ -1,8 +1,8 @@
import '~/commons';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates.vue';
-import CiTemplates from '~/pipelines/components/pipelines_list/empty_state/ci_templates.vue';
+import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
+import CiTemplates from '~/ci/pipelines_page/components/empty_state/ci_templates.vue';
const pipelineEditorPath = '/-/ci/editor';
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js b/spec/frontend/ci/pipelines_page/components/failure_widget/failed_job_details_spec.js
index 479ee854ecf..6967a369338 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_job_details_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/failure_widget/failed_job_details_spec.js
@@ -6,9 +6,9 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import FailedJobDetails from '~/pipelines/components/pipelines_list/failure_widget/failed_job_details.vue';
-import RetryMrFailedJobMutation from '~/pipelines/graphql/mutations/retry_mr_failed_job.mutation.graphql';
-import { BRIDGE_KIND } from '~/pipelines/components/graph/constants';
+import FailedJobDetails from '~/ci/pipelines_page/components/failure_widget/failed_job_details.vue';
+import RetryMrFailedJobMutation from '~/ci/merge_requests/graphql/mutations/retry_mr_failed_job.mutation.graphql';
+import { BRIDGE_KIND } from '~/ci/pipeline_details/graph/constants';
import { job } from './mock';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js b/spec/frontend/ci/pipelines_page/components/failure_widget/failed_jobs_list_spec.js
index 967812cc627..af075b02b64 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/failed_jobs_list_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/failure_widget/failed_jobs_list_spec.js
@@ -6,10 +6,10 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import FailedJobsList from '~/pipelines/components/pipelines_list/failure_widget/failed_jobs_list.vue';
-import FailedJobDetails from '~/pipelines/components/pipelines_list/failure_widget/failed_job_details.vue';
-import * as utils from '~/pipelines/components/pipelines_list/failure_widget/utils';
-import getPipelineFailedJobs from '~/pipelines/graphql/queries/get_pipeline_failed_jobs.query.graphql';
+import FailedJobsList from '~/ci/pipelines_page/components/failure_widget/failed_jobs_list.vue';
+import FailedJobDetails from '~/ci/pipelines_page/components/failure_widget/failed_job_details.vue';
+import * as utils from '~/ci/pipelines_page/components/failure_widget/utils';
+import getPipelineFailedJobs from '~/ci/pipelines_page/graphql/queries/get_pipeline_failed_jobs.query.graphql';
import { failedJobsMock, failedJobsMock2, failedJobsMockEmpty, activeFailedJobsMock } from './mock';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js b/spec/frontend/ci/pipelines_page/components/failure_widget/mock.js
index 318d787a984..318d787a984 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
+++ b/spec/frontend/ci/pipelines_page/components/failure_widget/mock.js
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js b/spec/frontend/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget_spec.js
index 5bbb874edb0..e52b62feb23 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget_spec.js
@@ -1,7 +1,7 @@
import { GlButton, GlCard, GlIcon, GlPopover } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
-import FailedJobsList from '~/pipelines/components/pipelines_list/failure_widget/failed_jobs_list.vue';
+import PipelineFailedJobsWidget from '~/ci/pipelines_page/components/failure_widget/pipeline_failed_jobs_widget.vue';
+import FailedJobsList from '~/ci/pipelines_page/components/failure_widget/failed_jobs_list.vue';
jest.mock('~/alert');
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js b/spec/frontend/ci/pipelines_page/components/failure_widget/utils_spec.js
index 44f16478151..5755cd846ac 100644
--- a/spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/failure_widget/utils_spec.js
@@ -1,7 +1,4 @@
-import {
- isFailedJob,
- sortJobsByStatus,
-} from '~/pipelines/components/pipelines_list/failure_widget/utils';
+import { isFailedJob, sortJobsByStatus } from '~/ci/pipelines_page/components/failure_widget/utils';
describe('isFailedJob', () => {
describe('when the job argument is undefined', () => {
diff --git a/spec/frontend/pipelines/nav_controls_spec.js b/spec/frontend/ci/pipelines_page/components/nav_controls_spec.js
index 15de7dc51f1..f4858ac27ea 100644
--- a/spec/frontend/pipelines/nav_controls_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/nav_controls_spec.js
@@ -1,5 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import NavControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
+import NavControls from '~/ci/pipelines_page/components/nav_controls.vue';
describe('Pipelines Nav Controls', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipeline_labels_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js
index 6a37e36352b..b5c9a3030e0 100644
--- a/spec/frontend/pipelines/pipeline_labels_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_labels_spec.js
@@ -1,7 +1,7 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
-import PipelineLabelsComponent from '~/pipelines/components/pipelines_list/pipeline_labels.vue';
-import { mockPipeline } from './mock_data';
+import PipelineLabelsComponent from '~/ci/pipelines_page/components/pipeline_labels.vue';
+import { mockPipeline } from 'jest/ci/pipeline_details/mock_data';
const projectPath = 'test/test';
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_multi_actions_spec.js
index 0fdc45a5931..7ae21db8815 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_multi_actions_spec.js
@@ -1,5 +1,12 @@
import { nextTick } from 'vue';
-import { GlAlert, GlDropdown, GlSprintf, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import {
+ GlAlert,
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ GlSprintf,
+ GlLoadingIcon,
+ GlSearchBoxByType,
+} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
@@ -10,13 +17,12 @@ import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PipelineMultiActions, {
i18n,
-} from '~/pipelines/components/pipelines_list/pipeline_multi_actions.vue';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
+} from '~/ci/pipelines_page/components/pipeline_multi_actions.vue';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
describe('Pipeline Multi Actions Dropdown', () => {
let wrapper;
let mockAxios;
- const focusInputMock = jest.fn();
const artifacts = [
{
@@ -58,26 +64,27 @@ describe('Pipeline Multi Actions Dropdown', () => {
pipelineId,
},
stubs: {
+ GlAlert,
GlSprintf,
- GlDropdown,
- GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
- methods: { focusInput: focusInputMock },
- }),
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ GlSearchBoxByType: stubComponent(GlSearchBoxByType),
},
}),
);
};
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAlert = () => wrapper.findByTestId('artifacts-fetch-error');
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAllArtifactItems = () => wrapper.findAllByTestId(artifactItemTestId);
const findFirstArtifactItem = () => wrapper.findByTestId(artifactItemTestId);
const findAllArtifactItemsData = () =>
- wrapper.findAllByTestId(artifactItemTestId).wrappers.map((x) => ({
- path: x.attributes('href'),
- name: x.text(),
- }));
+ findDropdown()
+ .props('items')
+ .map(({ text, href }) => ({
+ name: text,
+ path: href,
+ }));
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findEmptyMessage = () => wrapper.findByTestId('artifacts-empty-message');
const findWarning = () => wrapper.findByTestId('artifacts-fetch-warning');
@@ -108,7 +115,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
it('should render a loading spinner and no empty message', async () => {
createComponent();
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
expect(findLoadingIcon().exists()).toBe(true);
@@ -123,7 +130,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
createComponent();
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
@@ -135,13 +142,29 @@ describe('Pipeline Multi Actions Dropdown', () => {
it('should focus the search box when opened with artifacts', () => {
findDropdown().vm.$emit('shown');
- expect(focusInputMock).toHaveBeenCalled();
+ expect(findSearchBox().attributes('autofocus')).not.toBe(undefined);
});
- it('should render all the provided artifacts when search query is empty', () => {
+ it('should clear searchQuery when dropdown is closed', async () => {
+ findDropdown().vm.$emit('shown');
+ findSearchBox().vm.$emit('input', 'job-2');
+ await waitForPromises();
+
+ expect(findSearchBox().vm.value).toBe('job-2');
+
+ findDropdown().vm.$emit('hidden');
+ await waitForPromises();
+
+ expect(findSearchBox().vm.value).toBe('');
+ });
+
+ it('should render all the provided artifacts when search query is empty', async () => {
findSearchBox().vm.$emit('input', '');
+ await waitForPromises();
- expect(findAllArtifactItems()).toHaveLength(artifacts.length);
+ expect(findAllArtifactItemsData()).toEqual(
+ artifacts.map(({ name, path }) => ({ name, path })),
+ );
expect(findEmptyMessage().exists()).toBe(false);
});
@@ -149,7 +172,12 @@ describe('Pipeline Multi Actions Dropdown', () => {
findSearchBox().vm.$emit('input', 'job-2');
await waitForPromises();
- expect(findAllArtifactItems()).toHaveLength(1);
+ expect(findAllArtifactItemsData()).toEqual([
+ {
+ name: 'job-2 my-artifact-2',
+ path: '/download/path-two',
+ },
+ ]);
expect(findEmptyMessage().exists()).toBe(false);
});
@@ -164,12 +192,12 @@ describe('Pipeline Multi Actions Dropdown', () => {
mockAxios.resetHistory();
mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts: newArtifacts });
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await nextTick();
});
it('should hide list and render a loading spinner on dropdown click', () => {
- expect(findAllArtifactItems()).toHaveLength(0);
+ expect(findAllArtifactItemsData()).toHaveLength(0);
expect(findLoadingIcon().exists()).toBe(true);
});
@@ -189,7 +217,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
beforeEach(async () => {
mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
@@ -217,7 +245,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
beforeEach(async () => {
mockAxios.onGet(newEndpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
@@ -227,7 +255,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
});
it('should clear list', () => {
- expect(findAllArtifactItems()).toHaveLength(0);
+ expect(findAllArtifactItemsData()).toHaveLength(0);
});
});
});
@@ -241,7 +269,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
it('should render empty message and no search box when no artifacts are found', async () => {
createComponent();
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
expect(findEmptyMessage().exists()).toBe(true);
@@ -258,7 +286,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
it('should render an error message', async () => {
createComponent();
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
const error = findAlert();
@@ -278,7 +306,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
createComponent();
- findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_artifacts_dropdown', {
label: TRACKING_CATEGORIES.table,
diff --git a/spec/frontend/pipelines/pipeline_operations_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js
index b2191453824..d2eab64b317 100644
--- a/spec/frontend/pipelines/pipeline_operations_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_operations_spec.js
@@ -1,8 +1,8 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import PipelinesManualActions from '~/pipelines/components/pipelines_list/pipelines_manual_actions.vue';
-import PipelineMultiActions from '~/pipelines/components/pipelines_list/pipeline_multi_actions.vue';
-import PipelineOperations from '~/pipelines/components/pipelines_list/pipeline_operations.vue';
-import eventHub from '~/pipelines/event_hub';
+import PipelinesManualActions from '~/ci/pipelines_page/components/pipelines_manual_actions.vue';
+import PipelineMultiActions from '~/ci/pipelines_page/components/pipeline_multi_actions.vue';
+import PipelineOperations from '~/ci/pipelines_page/components/pipeline_operations.vue';
+import eventHub from '~/ci/event_hub';
describe('Pipeline operations', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js
index 249126390f1..4d78a923542 100644
--- a/spec/frontend/pipelines/components/pipelines_list/pipieline_stop_modal_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_stop_modal_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
-import PipelineStopModal from '~/pipelines/components/pipelines_list/pipeline_stop_modal.vue';
-import { mockPipelineHeader } from '../../mock_data';
+import { mockPipelineHeader } from 'jest/ci/pipeline_details/mock_data';
+import PipelineStopModal from '~/ci/pipelines_page/components/pipeline_stop_modal.vue';
describe('PipelineStopModal', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipeline_triggerer_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_triggerer_spec.js
index 856c0484075..cb04171f031 100644
--- a/spec/frontend/pipelines/pipeline_triggerer_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_triggerer_spec.js
@@ -1,6 +1,6 @@
import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import pipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_triggerer.vue';
+import pipelineTriggerer from '~/ci/pipelines_page/components/pipeline_triggerer.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
describe('Pipelines Triggerer', () => {
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/ci/pipelines_page/components/pipeline_url_spec.js
index 797ec676ccc..0ee22dda826 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipeline_url_spec.js
@@ -1,10 +1,14 @@
import { merge } from 'lodash';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import PipelineUrlComponent from '~/pipelines/components/pipelines_list/pipeline_url.vue';
+import PipelineUrlComponent from '~/ci/pipelines_page/components/pipeline_url.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
-import { mockPipeline, mockPipelineBranch, mockPipelineTag } from './mock_data';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
+import {
+ mockPipeline,
+ mockPipelineBranch,
+ mockPipelineTag,
+} from 'jest/ci/pipeline_details/mock_data';
const projectPath = 'test/test';
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/ci/pipelines_page/components/pipelines_artifacts_spec.js
index 1abc2887682..557403b3de9 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipelines_artifacts_spec.js
@@ -5,7 +5,7 @@ import {
GlSprintf,
} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
+import PipelineArtifacts from '~/ci/pipelines_page/components/pipelines_artifacts.vue';
describe('Pipelines Artifacts dropdown', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/ci/pipelines_page/components/pipelines_filtered_search_spec.js
index 51a4487a3ef..4cd85b86e31 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipelines_filtered_search_spec.js
@@ -5,13 +5,13 @@ import { nextTick } from 'vue';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
-import PipelinesFilteredSearch from '~/pipelines/components/pipelines_list/pipelines_filtered_search.vue';
+import PipelinesFilteredSearch from '~/ci/pipelines_page/components/pipelines_filtered_search.vue';
import {
FILTERED_SEARCH_TERM,
OPERATORS_IS,
} from '~/vue_shared/components/filtered_search_bar/constants';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
-import { users, mockSearch, branches, tags } from '../mock_data';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
+import { users, mockSearch, branches, tags } from 'jest/ci/pipeline_details/mock_data';
describe('Pipelines filtered search', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipelines_manual_actions_spec.js b/spec/frontend/ci/pipelines_page/components/pipelines_manual_actions_spec.js
index 82cab88c9eb..a24e136f1ff 100644
--- a/spec/frontend/pipelines/pipelines_manual_actions_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/pipelines_manual_actions_spec.js
@@ -11,9 +11,9 @@ import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
-import PipelinesManualActions from '~/pipelines/components/pipelines_list/pipelines_manual_actions.vue';
-import getPipelineActionsQuery from '~/pipelines/graphql/queries/get_pipeline_actions.query.graphql';
-import { TRACKING_CATEGORIES } from '~/pipelines/constants';
+import PipelinesManualActions from '~/ci/pipelines_page/components/pipelines_manual_actions.vue';
+import getPipelineActionsQuery from '~/ci/pipelines_page/graphql/queries/get_pipeline_actions.query.graphql';
+import { TRACKING_CATEGORIES } from '~/ci/constants';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
Vue.use(VueApollo);
diff --git a/spec/frontend/pipelines/time_ago_spec.js b/spec/frontend/ci/pipelines_page/components/time_ago_spec.js
index d2aa340a980..f7203f8d1b4 100644
--- a/spec/frontend/pipelines/time_ago_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/time_ago_spec.js
@@ -1,7 +1,7 @@
import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import TimeAgo from '~/pipelines/components/pipelines_list/time_ago.vue';
+import TimeAgo from '~/ci/pipelines_page/components/time_ago.vue';
describe('Timeago component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/ci/pipelines_page/pipelines_spec.js
index cc85d6d99e0..5d1f431e57c 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/ci/pipelines_page/pipelines_spec.js
@@ -24,12 +24,12 @@ import { createAlert, VARIANT_WARNING } from '~/alert';
import setSortPreferenceMutation from '~/issues/list/queries/set_sort_preference.mutation.graphql';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
-import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
-import PipelinesCiTemplates from '~/pipelines/components/pipelines_list/empty_state/pipelines_ci_templates.vue';
-import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipelines_table.vue';
-import { RAW_TEXT_WARNING, TRACKING_CATEGORIES } from '~/pipelines/constants';
-import Store from '~/pipelines/stores/pipelines_store';
+import NavigationControls from '~/ci/pipelines_page/components/nav_controls.vue';
+import PipelinesComponent from '~/ci/pipelines_page/pipelines.vue';
+import PipelinesCiTemplates from '~/ci/pipelines_page/components/empty_state/pipelines_ci_templates.vue';
+import PipelinesTableComponent from '~/ci/common/pipelines_table.vue';
+import { RAW_TEXT_WARNING, TRACKING_CATEGORIES } from '~/ci/constants';
+import Store from '~/ci/pipeline_details/stores/pipelines_store';
import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
import {
@@ -37,7 +37,8 @@ import {
setIdTypePreferenceMutationResponseWithErrors,
} from 'jest/issues/list/mock_data';
-import { stageReply, users, mockSearch, branches } from './mock_data';
+import { stageReply } from 'jest/ci/pipeline_mini_graph/mock_data';
+import { users, mockSearch, branches } from '../pipeline_details/mock_data';
jest.mock('@sentry/browser');
jest.mock('~/alert');
diff --git a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js b/spec/frontend/ci/pipelines_page/tokens/pipeline_branch_name_token_spec.js
index d518519a424..ea615d85c4b 100644
--- a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
+++ b/spec/frontend/ci/pipelines_page/tokens/pipeline_branch_name_token_spec.js
@@ -3,8 +3,8 @@ import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
-import PipelineBranchNameToken from '~/pipelines/components/pipelines_list/tokens/pipeline_branch_name_token.vue';
-import { branches, mockBranchesAfterMap } from '../mock_data';
+import PipelineBranchNameToken from '~/ci/pipelines_page/tokens/pipeline_branch_name_token.vue';
+import { branches, mockBranchesAfterMap } from 'jest/ci/pipeline_details/mock_data';
describe('Pipeline Branch Name Token', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/tokens/pipeline_source_token_spec.js b/spec/frontend/ci/pipelines_page/tokens/pipeline_source_token_spec.js
index 60abb63a7e0..0ea2b641b33 100644
--- a/spec/frontend/pipelines/tokens/pipeline_source_token_spec.js
+++ b/spec/frontend/ci/pipelines_page/tokens/pipeline_source_token_spec.js
@@ -1,8 +1,8 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { PIPELINE_SOURCES } from 'ee_else_ce/pipelines/components/pipelines_list/tokens/constants';
+import { PIPELINE_SOURCES } from 'ee_else_ce/ci/pipelines_page/tokens/constants';
import { stubComponent } from 'helpers/stub_component';
-import PipelineSourceToken from '~/pipelines/components/pipelines_list/tokens/pipeline_source_token.vue';
+import PipelineSourceToken from '~/ci/pipelines_page/tokens/pipeline_source_token.vue';
describe('Pipeline Source Token', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/tokens/pipeline_status_token_spec.js b/spec/frontend/ci/pipelines_page/tokens/pipeline_status_token_spec.js
index cf4ccb5ce43..b8f98666438 100644
--- a/spec/frontend/pipelines/tokens/pipeline_status_token_spec.js
+++ b/spec/frontend/ci/pipelines_page/tokens/pipeline_status_token_spec.js
@@ -1,7 +1,7 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
-import PipelineStatusToken from '~/pipelines/components/pipelines_list/tokens/pipeline_status_token.vue';
+import PipelineStatusToken from '~/ci/pipelines_page/tokens/pipeline_status_token.vue';
import {
TOKEN_TITLE_STATUS,
TOKEN_TYPE_STATUS,
diff --git a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js b/spec/frontend/ci/pipelines_page/tokens/pipeline_tag_name_token_spec.js
index 88c88d8f16f..d23d9f07df3 100644
--- a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
+++ b/spec/frontend/ci/pipelines_page/tokens/pipeline_tag_name_token_spec.js
@@ -1,8 +1,8 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Api from '~/api';
-import PipelineTagNameToken from '~/pipelines/components/pipelines_list/tokens/pipeline_tag_name_token.vue';
-import { tags, mockTagsAfterMap } from '../mock_data';
+import PipelineTagNameToken from '~/ci/pipelines_page/tokens/pipeline_tag_name_token.vue';
+import { tags, mockTagsAfterMap } from 'jest/ci/pipeline_details/mock_data';
describe('Pipeline Branch Name Token', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js b/spec/frontend/ci/pipelines_page/tokens/pipeline_trigger_author_token_spec.js
index e9ec684a350..eccb90b0c94 100644
--- a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
+++ b/spec/frontend/ci/pipelines_page/tokens/pipeline_trigger_author_token_spec.js
@@ -2,8 +2,8 @@ import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import Api from '~/api';
-import PipelineTriggerAuthorToken from '~/pipelines/components/pipelines_list/tokens/pipeline_trigger_author_token.vue';
-import { users } from '../mock_data';
+import PipelineTriggerAuthorToken from '~/ci/pipelines_page/tokens/pipeline_trigger_author_token.vue';
+import { users } from 'jest/ci/pipeline_details/mock_data';
describe('Pipeline Trigger Author Token', () => {
let wrapper;
diff --git a/spec/frontend/ci/reports/components/__snapshots__/issue_status_icon_spec.js.snap b/spec/frontend/ci/reports/components/__snapshots__/issue_status_icon_spec.js.snap
index b5a4cb42463..2de634a6209 100644
--- a/spec/frontend/ci/reports/components/__snapshots__/issue_status_icon_spec.js.snap
+++ b/spec/frontend/ci/reports/components/__snapshots__/issue_status_icon_spec.js.snap
@@ -2,10 +2,9 @@
exports[`IssueStatusIcon renders "failed" state correctly 1`] = `
<div
- class="report-block-list-icon failed"
+ class="failed report-block-list-icon"
>
<gl-icon-stub
- data-qa-selector="status_failed_icon"
name="status_failed_borderless"
size="24"
/>
@@ -14,10 +13,9 @@ exports[`IssueStatusIcon renders "failed" state correctly 1`] = `
exports[`IssueStatusIcon renders "neutral" state correctly 1`] = `
<div
- class="report-block-list-icon neutral"
+ class="neutral report-block-list-icon"
>
<gl-icon-stub
- data-qa-selector="status_neutral_icon"
name="dash"
size="24"
/>
@@ -29,7 +27,6 @@ exports[`IssueStatusIcon renders "success" state correctly 1`] = `
class="report-block-list-icon success"
>
<gl-icon-stub
- data-qa-selector="status_success_icon"
name="status_success_borderless"
size="24"
/>
diff --git a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
index ad20d7682ed..bc77b7b89dd 100644
--- a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
+++ b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
@@ -13,7 +13,6 @@ import {
INSTANCE_TYPE,
I18N_INSTANCE_TYPE,
PROJECT_TYPE,
- I18N_NO_DESCRIPTION,
I18N_CREATED_AT_LABEL,
I18N_CREATED_AT_BY_LABEL,
} from '~/ci/runner/constants';
@@ -102,15 +101,6 @@ describe('RunnerTypeCell', () => {
it('Displays the runner description', () => {
expect(wrapper.text()).toContain(mockRunner.description);
- expect(wrapper.findByText(I18N_NO_DESCRIPTION).exists()).toBe(false);
- });
-
- it('Displays "No description" for missing runner description', () => {
- createComponent({
- runner: { description: null },
- });
-
- expect(wrapper.findByText(I18N_NO_DESCRIPTION).classes()).toContain('gl-text-secondary');
});
it('Displays last contact', () => {
diff --git a/spec/frontend/ci/runner/components/runner_create_form_spec.js b/spec/frontend/ci/runner/components/runner_create_form_spec.js
index c452e32b0e4..3c5f8c4d6a9 100644
--- a/spec/frontend/ci/runner/components/runner_create_form_spec.js
+++ b/spec/frontend/ci/runner/components/runner_create_form_spec.js
@@ -61,6 +61,7 @@ describe('RunnerCreateForm', () => {
createComponent();
expect(findRunnerFormFields().props('value')).toEqual(defaultRunnerModel);
+ expect(findRunnerFormFields().props('runnerType')).toEqual(INSTANCE_TYPE);
});
it('shows a submit button', () => {
diff --git a/spec/frontend/ci/runner/components/runner_form_fields_spec.js b/spec/frontend/ci/runner/components/runner_form_fields_spec.js
index 93be4d9d35e..7e39a6b72f9 100644
--- a/spec/frontend/ci/runner/components/runner_form_fields_spec.js
+++ b/spec/frontend/ci/runner/components/runner_form_fields_spec.js
@@ -132,8 +132,8 @@ describe('RunnerFormFields', () => {
it('when runner is of project type, locked checkbox can be checked', async () => {
createComponent({
+ runnerType: PROJECT_TYPE,
value: {
- runnerType: PROJECT_TYPE,
locked: false,
},
});
@@ -144,7 +144,6 @@ describe('RunnerFormFields', () => {
expect(wrapper.emitted('input').at(-1)).toEqual([
{
- runnerType: PROJECT_TYPE,
locked: true,
},
]);
diff --git a/spec/frontend/ci/runner/components/runner_managers_table_spec.js b/spec/frontend/ci/runner/components/runner_managers_table_spec.js
index cde6ee6eea0..d5782e21a2f 100644
--- a/spec/frontend/ci/runner/components/runner_managers_table_spec.js
+++ b/spec/frontend/ci/runner/components/runner_managers_table_spec.js
@@ -60,8 +60,8 @@ describe('RunnerJobs', () => {
it('shows status', () => {
createComponent();
- expect(findCellText({ field: 'status', i: 0 })).toBe(s__('Runners|Online'));
- expect(findCellText({ field: 'status', i: 1 })).toBe(s__('Runners|Online'));
+ expect(findCellText({ field: 'status', i: 0 })).toContain(s__('Runners|Online'));
+ expect(findCellText({ field: 'status', i: 0 })).toContain(s__('Runners|Idle'));
});
it('shows version', () => {
diff --git a/spec/frontend/ci/runner/components/runner_update_form_spec.js b/spec/frontend/ci/runner/components/runner_update_form_spec.js
index 5851078a8d3..2ba1c31fe52 100644
--- a/spec/frontend/ci/runner/components/runner_update_form_spec.js
+++ b/spec/frontend/ci/runner/components/runner_update_form_spec.js
@@ -15,6 +15,7 @@ import RunnerUpdateForm from '~/ci/runner/components/runner_update_form.vue';
import runnerUpdateMutation from '~/ci/runner/graphql/edit/runner_update.mutation.graphql';
import { captureException } from '~/ci/runner/sentry_utils';
import { saveAlertToLocalStorage } from '~/ci/runner/local_storage_alert/save_alert_to_local_storage';
+import { INSTANCE_TYPE } from '~/ci/runner/constants';
import { runnerFormData } from '../mock_data';
jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
@@ -119,6 +120,7 @@ describe('RunnerUpdateForm', () => {
it('shows runner fields', () => {
expect(findRunnerFormFields().props('value')).toEqual(runnerToModel(mockRunner));
+ expect(findRunnerFormFields().props('runnerType')).toEqual(INSTANCE_TYPE);
});
it('form has not been submitted', () => {
diff --git a/spec/frontend/ci_secure_files/components/metadata/__snapshots__/modal_spec.js.snap b/spec/frontend/ci_secure_files/components/metadata/__snapshots__/modal_spec.js.snap
index 79194c20ff5..d0c1987829f 100644
--- a/spec/frontend/ci_secure_files/components/metadata/__snapshots__/modal_spec.js.snap
+++ b/spec/frontend/ci_secure_files/components/metadata/__snapshots__/modal_spec.js.snap
@@ -13,22 +13,16 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
<div
data-testid="slot-default"
>
-
<table
aria-busy=""
aria-colcount="2"
- class="table b-table gl-table"
+ class="b-table gl-table table"
role="table"
>
- <!---->
- <!---->
<thead
- class=""
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<th
@@ -56,14 +50,11 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
<tbody
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -72,21 +63,16 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- Apple Distribution: Team Name (ABC123XYZ)
-
+ Apple Distribution: Team Name (ABC123XYZ)
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -95,21 +81,16 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- 33669367788748363528491290218354043267
-
+ 33669367788748363528491290218354043267
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -118,21 +99,16 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- Team Name (ABC123XYZ)
-
+ Team Name (ABC123XYZ)
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -141,21 +117,16 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- Apple Worldwide Developer Relations Certification Authority - G3
-
+ Apple Worldwide Developer Relations Certification Authority - G3
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -164,18 +135,12 @@ exports[`Secure File Metadata Modal when a .cer file is supplied matches cer the
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- April 26, 2023 at 7:20:39 PM GMT
-
+ April 26, 2023 at 7:20:39 PM GMT
</td>
</tr>
- <!---->
- <!---->
</tbody>
- <!---->
</table>
</div>
</div>
@@ -194,22 +159,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
<div
data-testid="slot-default"
>
-
<table
aria-busy=""
aria-colcount="2"
- class="table b-table gl-table"
+ class="b-table gl-table table"
role="table"
>
- <!---->
- <!---->
<thead
- class=""
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<th
@@ -237,14 +196,11 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
<tbody
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -253,21 +209,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- 6b9fcce1-b9a9-4b37-b2ce-ec4da2044abf
-
+ 6b9fcce1-b9a9-4b37-b2ce-ec4da2044abf
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -276,21 +227,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- iOS
-
+ iOS
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -299,21 +245,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- Team Name (ABC123XYZ)
-
+ Team Name (ABC123XYZ)
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -322,21 +263,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- iOS Demo - match Development com.gitlab.ios-demo
-
+ iOS Demo - match Development com.gitlab.ios-demo
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -345,21 +281,16 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- 33669367788748363528491290218354043267
-
+ 33669367788748363528491290218354043267
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
<strong>
@@ -368,18 +299,12 @@ exports[`Secure File Metadata Modal when a .mobileprovision file is supplied mat
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
-
- August 1, 2023 at 11:15:13 PM GMT
-
+ August 1, 2023 at 11:15:13 PM GMT
</td>
</tr>
- <!---->
- <!---->
</tbody>
- <!---->
</table>
</div>
</div>
diff --git a/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
index 21ffda8578a..f90acb5cb22 100644
--- a/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
@@ -1,9 +1,20 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`NewCluster renders the cluster component correctly 1`] = `
-"<div class=\\"gl-pt-4\\">
- <h4>Enter your Kubernetes cluster certificate details</h4>
- <p>Enter details about your cluster. <b-link-stub href=\\"/help/user/project/clusters/add_existing_cluster\\" class=\\"gl-link\\">How do I use a certificate to connect to my cluster?</b-link-stub>
+<div
+ class="gl-pt-4"
+>
+ <h4>
+ Enter your Kubernetes cluster certificate details
+ </h4>
+ <p>
+ Enter details about your cluster.
+ <b-link-stub
+ class="gl-link"
+ href="/help/user/project/clusters/add_existing_cluster"
+ >
+ How do I use a certificate to connect to my cluster?
+ </b-link-stub>
</p>
-</div>"
+</div>
`;
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index 67b0ecdf7eb..b5fc3247165 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -5,42 +5,27 @@ exports[`Remove cluster confirmation modal renders buttons with modal included 1
class="gl-display-flex"
>
<button
- class="btn gl-mr-3 btn-danger btn-md gl-button"
+ class="btn btn-danger btn-md gl-button gl-mr-3"
data-testid="remove-integration-and-resources-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration and resources
-
+ Remove integration and resources
</span>
</button>
-
<button
- class="btn btn-danger btn-md gl-button btn-danger-secondary"
+ class="btn btn-danger btn-danger-secondary btn-md gl-button"
data-testid="remove-integration-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration
-
+ Remove integration
</span>
</button>
-
- <!---->
</div>
`;
@@ -49,63 +34,44 @@ exports[`Remove cluster confirmation modal two buttons open modal with "cleanup"
class="gl-display-flex"
>
<button
- class="btn gl-mr-3 btn-danger btn-md gl-button"
+ class="btn btn-danger btn-md gl-button gl-mr-3"
data-testid="remove-integration-and-resources-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration and resources
-
+ Remove integration and resources
</span>
</button>
-
<button
- class="btn btn-danger btn-md gl-button btn-danger-secondary"
+ class="btn btn-danger btn-danger-secondary btn-md gl-button"
data-testid="remove-integration-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration
-
+ Remove integration
</span>
</button>
-
<div
kind="danger"
>
<p>
You are about to remove your cluster integration and all GitLab-created resources associated with this cluster.
</p>
-
<div>
-
This will permanently delete the following resources:
-
<ul>
<li>
Any project namespaces
</li>
-
<li>
<code>
clusterroles
</code>
</li>
-
<li>
<code>
clusterrolebindings
@@ -113,15 +79,13 @@ exports[`Remove cluster confirmation modal two buttons open modal with "cleanup"
</li>
</ul>
</div>
-
<strong>
- To remove your integration and resources, type
+ To remove your integration and resources, type
<code>
my-test-cluster
</code>
- to confirm:
+ to confirm:
</strong>
-
<form
action="clusterPath"
class="gl-mb-5"
@@ -132,27 +96,23 @@ exports[`Remove cluster confirmation modal two buttons open modal with "cleanup"
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
/>
-
<input
name="cleanup"
type="hidden"
value="true"
/>
-
<input
autocomplete="off"
- class="gl-form-input form-control"
- id="__BVID__14"
+ class="form-control gl-form-input"
+ id="reference-0"
name="confirm_cluster_name_input"
type="text"
/>
</form>
-
<span>
If you do not wish to delete all associated GitLab resources, you can simply remove the integration.
</span>
@@ -165,58 +125,40 @@ exports[`Remove cluster confirmation modal two buttons open modal without "clean
class="gl-display-flex"
>
<button
- class="btn gl-mr-3 btn-danger btn-md gl-button"
+ class="btn btn-danger btn-md gl-button gl-mr-3"
data-testid="remove-integration-and-resources-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration and resources
-
+ Remove integration and resources
</span>
</button>
-
<button
- class="btn btn-danger btn-md gl-button btn-danger-secondary"
+ class="btn btn-danger btn-danger-secondary btn-md gl-button"
data-testid="remove-integration-button"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- Remove integration
-
+ Remove integration
</span>
</button>
-
<div
kind="danger"
>
<p>
You are about to remove your cluster integration.
</p>
-
- <!---->
-
<strong>
- To remove your integration, type
+ To remove your integration, type
<code>
my-test-cluster
</code>
- to confirm:
+ to confirm:
</strong>
-
<form
action="clusterPath"
class="gl-mb-5"
@@ -227,28 +169,23 @@ exports[`Remove cluster confirmation modal two buttons open modal without "clean
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
/>
-
<input
name="cleanup"
type="hidden"
value="true"
/>
-
<input
autocomplete="off"
- class="gl-form-input form-control"
- id="__BVID__21"
+ class="form-control gl-form-input"
+ id="reference-0"
name="confirm_cluster_name_input"
type="text"
/>
</form>
-
- <!---->
</div>
</div>
`;
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
index 36d2c2cabc5..1c2bdd2f8bc 100644
--- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -2,14 +2,13 @@
exports[`Code navigation popover component renders popover 1`] = `
<div
- class="popover code-navigation-popover popover-font-size-normal gl-popover bs-popover-bottom show"
+ class="bs-popover-bottom code-navigation-popover gl-popover popover popover-font-size-normal show"
style="left: 0px; top: 0px;"
>
<div
class="arrow"
style="left: 0px;"
/>
-
<gl-tabs-stub
contentclass="gl-py-0"
navclass="gl-hidden"
@@ -21,13 +20,11 @@ exports[`Code navigation popover component renders popover 1`] = `
titlelinkclass=""
>
<div
- class="overflow-auto code-navigation-popover-container"
+ class="code-navigation-popover-container overflow-auto"
>
- <div
- class=""
- >
+ <div>
<pre
- class="border-0 bg-transparent m-0 code highlight text-wrap"
+ class="bg-transparent border-0 code highlight m-0 text-wrap"
>
<span
class="line"
@@ -39,9 +36,8 @@ exports[`Code navigation popover component renders popover 1`] = `
function
</span>
<span>
- main() {
+ main() {
</span>
-
<br />
</span>
<span
@@ -51,15 +47,13 @@ exports[`Code navigation popover component renders popover 1`] = `
<span>
}
</span>
-
<br />
</span>
</pre>
</div>
</div>
-
<div
- class="popover-body border-top"
+ class="border-top popover-body"
>
<gl-button-stub
buttontextclasses=""
@@ -72,25 +66,19 @@ exports[`Code navigation popover component renders popover 1`] = `
target="_blank"
variant="default"
>
-
Go to definition
-
</gl-button-stub>
</div>
</gl-tab-stub>
-
<gl-tab-stub
class="py-2"
data-testid="references-tab"
titlelinkclass=""
>
-
<p
class="gl-my-4 gl-px-4"
>
-
No references found
-
</p>
</gl-tab-stub>
</gl-tabs-stub>
diff --git a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
index 60c87aa10eb..24b2677f497 100644
--- a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
+++ b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
@@ -13,23 +13,20 @@ exports[`Comment templates list item component renders list item 1`] = `
>
test
</h6>
-
<div
class="gl-ml-auto"
>
<div
- class="gl-new-dropdown gl-disclosure-dropdown"
+ class="gl-disclosure-dropdown gl-new-dropdown"
>
<button
- aria-controls="base-dropdown-7"
- aria-labelledby="actions-toggle-3"
- class="btn btn-default btn-md gl-button btn-default-tertiary gl-new-dropdown-toggle gl-new-dropdown-icon-only gl-new-dropdown-toggle-no-caret"
+ aria-controls="reference-1"
+ aria-labelledby="reference-0"
+ class="btn btn-default btn-default-tertiary btn-md gl-button gl-new-dropdown-icon-only gl-new-dropdown-toggle gl-new-dropdown-toggle-no-caret"
data-testid="base-dropdown-toggle"
- id="actions-toggle-3"
+ id="reference-0"
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -40,36 +37,29 @@ exports[`Comment templates list item component renders list item 1`] = `
href="file-mock#ellipsis_v"
/>
</svg>
-
<span
class="gl-button-text"
>
<span
class="gl-new-dropdown-button-text gl-sr-only"
>
-
- Comment template actions
-
+ Comment template actions
</span>
-
- <!---->
</span>
</button>
-
<div
class="gl-new-dropdown-panel gl-w-31!"
data-testid="base-dropdown-menu"
- id="base-dropdown-7"
+ id="reference-1"
>
<div
class="gl-new-dropdown-inner"
>
-
<ul
- aria-labelledby="actions-toggle-3"
+ aria-labelledby="reference-0"
class="gl-new-dropdown-contents"
data-testid="disclosure-content"
- id="disclosure-4"
+ id="reference-2"
tabindex="-1"
>
<li
@@ -86,9 +76,7 @@ exports[`Comment templates list item component renders list item 1`] = `
<span
class="gl-new-dropdown-item-text-wrapper"
>
-
- Edit
-
+ Edit
</span>
</button>
</li>
@@ -106,36 +94,25 @@ exports[`Comment templates list item component renders list item 1`] = `
<span
class="gl-new-dropdown-item-text-wrapper"
>
-
- Delete
-
+ Delete
</span>
</button>
</li>
</ul>
-
</div>
</div>
</div>
-
<div
class="gl-tooltip"
>
-
Comment template actions
-
</div>
</div>
</div>
-
<div
- class="gl-font-monospace gl-white-space-pre-line gl-font-sm gl-mt-n5"
+ class="gl-font-monospace gl-font-sm gl-mt-n5 gl-white-space-pre-line"
>
-
/assign_reviewer
-
</div>
-
- <!---->
</li>
`;
diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
index 3b3e5098857..891cd0a6b83 100644
--- a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
+++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
@@ -7,11 +7,11 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import CommitBoxPipelineMiniGraph from '~/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue';
-import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import PipelineMiniGraph from '~/ci/pipeline_mini_graph/pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { COMMIT_BOX_POLL_INTERVAL } from '~/projects/commit_box/info/constants';
-import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
-import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
+import getLinkedPipelinesQuery from '~/ci/pipeline_details/graphql/queries/get_linked_pipelines.query.graphql';
+import getPipelineStagesQuery from '~/ci/pipeline_mini_graph/graphql/queries/get_pipeline_stages.query.graphql';
import * as sharedGraphQlUtils from '~/graphql_shared/utils';
import {
mockDownstreamQueryResponse,
diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js
index 009ec68ddcf..4af292e3588 100644
--- a/spec/frontend/commit/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/commit/pipelines/legacy_pipelines_table_wrapper_spec.js
@@ -7,7 +7,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
-import PipelinesTable from '~/commit/pipelines/pipelines_table.vue';
+import LegacyPipelinesTableWraper from '~/commit/pipelines/legacy_pipelines_table_wrapper.vue';
import {
HTTP_STATUS_BAD_REQUEST,
HTTP_STATUS_INTERNAL_SERVER_ERROR,
@@ -15,7 +15,7 @@ import {
HTTP_STATUS_UNAUTHORIZED,
} from '~/lib/utils/http_status';
import { createAlert } from '~/alert';
-import { TOAST_MESSAGE } from '~/pipelines/constants';
+import { TOAST_MESSAGE } from '~/ci/pipeline_details/constants';
import axios from '~/lib/utils/axios_utils';
const $toast = {
@@ -42,7 +42,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const createComponent = ({ props = {} } = {}) => {
wrapper = extendedWrapper(
- mount(PipelinesTable, {
+ mount(LegacyPipelinesTableWraper, {
propsData: {
endpoint: 'endpoint.json',
emptyStateSvgPath: 'foo',
diff --git a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
index d9f161b47b1..b17987dad89 100644
--- a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
+++ b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
@@ -7,17 +7,13 @@ exports[`Confidential merge request project form group component renders empty s
<label>
Project
</label>
-
<div>
- <!---->
-
<p
- class="gl-text-gray-600 gl-mt-1 gl-mb-0"
+ class="gl-mb-0 gl-mt-1 gl-text-gray-600"
>
-
- No forks are available to you.
+ No forks are available to you.
<br />
- To protect this issue's confidentiality,
+ To protect this issue's confidentiality,
<a
class="help-link"
href="https://test.com"
@@ -25,9 +21,9 @@ exports[`Confidential merge request project form group component renders empty s
>
fork this project
</a>
- and set the fork's visibility to private.
+ and set the fork's visibility to private.
<gl-link-stub
- class="gl-w-auto gl-p-0 gl-display-inline-block gl-bg-transparent"
+ class="gl-bg-transparent gl-display-inline-block gl-p-0 gl-w-auto"
href="/help"
target="_blank"
>
@@ -36,7 +32,6 @@ exports[`Confidential merge request project form group component renders empty s
>
Read more
</span>
-
<gl-icon-stub
name="question-o"
size="16"
@@ -54,21 +49,17 @@ exports[`Confidential merge request project form group component renders fork dr
<label>
Project
</label>
-
<div>
<dropdown-stub
projects="[object Object],[object Object]"
selectedproject="[object Object]"
/>
-
<p
- class="gl-text-gray-600 gl-mt-1 gl-mb-0"
+ class="gl-mb-0 gl-mt-1 gl-text-gray-600"
>
-
- To protect this issue's confidentiality, a private fork of this project was selected.
-
+ To protect this issue's confidentiality, a private fork of this project was selected.
<gl-link-stub
- class="gl-w-auto gl-p-0 gl-display-inline-block gl-bg-transparent"
+ class="gl-bg-transparent gl-display-inline-block gl-p-0 gl-w-auto"
href="/help"
target="_blank"
>
@@ -77,7 +68,6 @@ exports[`Confidential merge request project form group component renders fork dr
>
Read more
</span>
-
<gl-icon-stub
name="question-o"
size="16"
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
index a328f79e4e7..a708f7d5f47 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
@@ -1,9 +1,19 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`content_editor/components/toolbar_button displays tertiary, medium button with a provided label and icon 1`] = `
-"<b-button-stub size=\\"sm\\" tag=\\"button\\" type=\\"button\\" variant=\\"default\\" aria-label=\\"Bold\\" title=\\"Bold\\" class=\\"gl-mr-3 gl-button btn-default-tertiary btn-icon\\">
- <!---->
- <gl-icon-stub name=\\"bold\\" size=\\"16\\" class=\\"gl-button-icon\\"></gl-icon-stub>
- <!---->
-</b-button-stub>"
+<b-button-stub
+ aria-label="Bold"
+ class="btn-default-tertiary btn-icon gl-button gl-mr-3"
+ size="sm"
+ tag="button"
+ title="Bold"
+ type="button"
+ variant="default"
+>
+ <gl-icon-stub
+ class="gl-button-icon"
+ name="bold"
+ size="16"
+ />
+</b-button-stub>
`;
diff --git a/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap b/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap
index a9d42769789..e058f05fec4 100644
--- a/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap
+++ b/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap
@@ -2,23 +2,18 @@
exports[`content/components/wrappers/table_of_contents collects all headings and renders a nested list of headings 1`] = `
<div
- class="table-of-contents gl-border-1 gl-border-solid gl-border-gray-100 gl-mb-5 gl-p-4!"
+ class="gl-border-1 gl-border-gray-100 gl-border-solid gl-mb-5 gl-p-4! table-of-contents"
data-testid="table-of-contents"
>
-
Table of contents
-
<li
dir="auto"
>
<a
href="#"
>
-
- Heading 1
-
+ Heading 1
</a>
-
<ul
dir="auto"
>
@@ -28,11 +23,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.1
-
+ Heading 1.1
</a>
-
<ul
dir="auto"
>
@@ -42,12 +34,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.1.1
-
+ Heading 1.1.1
</a>
-
- <!---->
</li>
</ul>
</li>
@@ -57,11 +45,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.2
-
+ Heading 1.2
</a>
-
<ul
dir="auto"
>
@@ -71,12 +56,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.2.1
-
+ Heading 1.2.1
</a>
-
- <!---->
</li>
</ul>
</li>
@@ -86,12 +67,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.3
-
+ Heading 1.3
</a>
-
- <!---->
</li>
<li
dir="auto"
@@ -99,11 +76,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.4
-
+ Heading 1.4
</a>
-
<ul
dir="auto"
>
@@ -113,12 +87,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 1.4.1
-
+ Heading 1.4.1
</a>
-
- <!---->
</li>
</ul>
</li>
@@ -130,12 +100,8 @@ exports[`content/components/wrappers/table_of_contents collects all headings and
<a
href="#"
>
-
- Heading 2
-
+ Heading 2
</a>
-
- <!---->
</li>
</div>
`;
diff --git a/spec/frontend/content_editor/components/wrappers/code_block_spec.js b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
index e802681dfc6..0093393eceb 100644
--- a/spec/frontend/content_editor/components/wrappers/code_block_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
@@ -11,6 +11,9 @@ import CodeBlockWrapper from '~/content_editor/components/wrappers/code_block.vu
import codeBlockLanguageLoader from '~/content_editor/services/code_block_language_loader';
import { emitEditorEvent, createTestEditor, mockChainedCommands } from '../../test_utils';
+// Disabled due to eslint reporting errors for inline snapshots
+/* eslint-disable no-irregular-whitespace */
+
const SAMPLE_README_CONTENT = `# Sample README
This is a sample README.
@@ -212,12 +215,20 @@ describe('content/components/wrappers/code_block', () => {
it('shows a code suggestion block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 5 to 5');
- expect(findCodeDeleted()).toMatchInlineSnapshot(
- `"<code data-line-number=\\"5\\">## Usage\u200b</code>"`,
- );
- expect(findCodeAdded()).toMatchInlineSnapshot(
- `"<code data-line-number=\\"5\\">\u200b</code>"`,
- );
+ expect(findCodeDeleted()).toMatchInlineSnapshot(`
+ <code
+ data-line-number="5"
+ >
+ ## Usage​
+ </code>
+ `);
+ expect(findCodeAdded()).toMatchInlineSnapshot(`
+ <code
+ data-line-number="5"
+ >
+ ​
+ </code>
+ `);
});
describe('decrement line start button', () => {
@@ -232,9 +243,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 4 to 5');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"4\\">\u200b
+ <code
+ data-line-number="4"
+ >
+ ​
</code>
- <code data-line-number=\\"5\\">## Usage\u200b</code>"
`);
});
@@ -248,15 +261,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 1 to 5');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"1\\"># Sample README\u200b
- </code>
- <code data-line-number=\\"2\\">\u200b
- </code>
- <code data-line-number=\\"3\\">This is a sample README.\u200b
- </code>
- <code data-line-number=\\"4\\">\u200b
+ <code
+ data-line-number="1"
+ >
+ # Sample README​
</code>
- <code data-line-number=\\"5\\">## Usage\u200b</code>"
`);
expect(button.attributes('disabled')).toBe('disabled');
@@ -291,9 +300,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 4 to 5');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"4\\">\u200b
+ <code
+ data-line-number="4"
+ >
+ ​
</code>
- <code data-line-number=\\"5\\">## Usage\u200b</code>"
`);
});
});
@@ -326,9 +337,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 5 to 6');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"5\\">## Usage\u200b
+ <code
+ data-line-number="5"
+ >
+ ## Usage​
</code>
- <code data-line-number=\\"6\\">\u200b</code>"
`);
});
});
@@ -345,9 +358,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 5 to 6');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"5\\">## Usage\u200b
+ <code
+ data-line-number="5"
+ >
+ ## Usage​
</code>
- <code data-line-number=\\"6\\">\u200b</code>"
`);
});
@@ -361,15 +376,11 @@ describe('content/components/wrappers/code_block', () => {
expect(findCodeSuggestionBoxText()).toContain('Suggested change From line 5 to 9');
expect(findCodeDeleted()).toMatchInlineSnapshot(`
- "<code data-line-number=\\"5\\">## Usage\u200b
- </code>
- <code data-line-number=\\"6\\">\u200b
- </code>
- <code data-line-number=\\"7\\">\`\`\`yaml\u200b
- </code>
- <code data-line-number=\\"8\\">foo: bar\u200b
+ <code
+ data-line-number="5"
+ >
+ ## Usage​
</code>
- <code data-line-number=\\"9\\">\`\`\`\u200b</code>"
`);
expect(button.attributes('disabled')).toBe('disabled');
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 7be8114902a..3eb00f69345 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -175,7 +175,7 @@ describe('markdownSerializer', () => {
inlineDiff({ type: 'deletion' }, '-10 lines'),
),
),
- ).toBe('{+\\+30 lines+}{-\\-10 lines-}');
+ ).toBe('{++30 lines+}{--10 lines-}');
});
it('correctly serializes highlight', () => {
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js
index 4be4aa50dfc..50b12244a55 100644
--- a/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_created_spec.js
@@ -23,15 +23,15 @@ describe('ContributionEventCreated', () => {
};
describe.each`
- event | expectedMessage | expectedIconName | expectedIconClass
- ${eventProjectCreated()} | ${'Created project %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${eventMilestoneCreated()} | ${'Opened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${eventIssueCreated()} | ${'Opened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${eventMergeRequestCreated()} | ${'Opened merge request %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${eventWikiPageCreated()} | ${'Created wiki page %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${eventDesignCreated()} | ${'Added design %{targetLink} in %{resourceParentLink}.'} | ${'upload'} | ${null}
- ${{ resource_parent: { type: 'unsupported type' } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
- ${{ target: { type: 'unsupported type' } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
+ event | expectedMessage | expectedIconName | expectedIconClass
+ ${eventProjectCreated()} | ${'Created project %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventMilestoneCreated()} | ${'Opened milestone %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventIssueCreated()} | ${'Opened issue %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventMergeRequestCreated()} | ${'Opened merge request %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventWikiPageCreated()} | ${'Created wiki page %{targetLink} in %{resourceParentLink}.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${eventDesignCreated()} | ${'Added design %{targetLink} in %{resourceParentLink}.'} | ${'upload'} | ${null}
+ ${{ resource_parent: { type: 'unsupported type' }, target: { type: null } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Created resource.'} | ${'status_open'} | ${'gl-text-green-500'}
`(
'when event target type is $event.target.type',
({ event, expectedMessage, expectedIconName, expectedIconClass }) => {
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_destroyed_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_destroyed_spec.js
new file mode 100644
index 00000000000..b296b75ce0a
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_destroyed_spec.js
@@ -0,0 +1,32 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventDestroyed from '~/contribution_events/components/contribution_event/contribution_event_destroyed.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import { eventDesignDestroyed, eventWikiPageDestroyed, eventMilestoneDestroyed } from '../../utils';
+
+describe('ContributionEventDestroyed', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = shallowMountExtended(ContributionEventDestroyed, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage | iconName
+ ${eventDesignDestroyed()} | ${'Archived design in %{resourceParentLink}.'} | ${'archive'}
+ ${eventWikiPageDestroyed()} | ${'Deleted wiki page in %{resourceParentLink}.'} | ${'remove'}
+ ${eventMilestoneDestroyed()} | ${'Deleted milestone in %{resourceParentLink}.'} | ${'remove'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Deleted resource.'} | ${'remove'}
+ `('when event target type is $event.target.type', ({ event, expectedMessage, iconName }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_updated_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_updated_spec.js
new file mode 100644
index 00000000000..e8e25b24dc9
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_updated_spec.js
@@ -0,0 +1,31 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventUpdated from '~/contribution_events/components/contribution_event/contribution_event_updated.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import { eventDesignUpdated, eventWikiPageUpdated } from '../../utils';
+
+describe('ContributionEventUpdated', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = shallowMountExtended(ContributionEventUpdated, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage
+ ${eventDesignUpdated()} | ${'Updated design %{targetLink} in %{resourceParentLink}.'}
+ ${eventWikiPageUpdated()} | ${'Updated wiki page %{targetLink} in %{resourceParentLink}.'}
+ ${{ target: { type: 'unsupported type' } }} | ${'Updated resource.'}
+ `('when event target type is $event.target.type', ({ event, expectedMessage }) => {
+ it('renders `ContributionEventBase` with correct props', () => {
+ createComponent({ propsData: { event } });
+
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ message: expectedMessage,
+ iconName: 'pencil',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_events_spec.js b/spec/frontend/contribution_events/components/contribution_events_spec.js
index 7493d248e2b..dc460a698bd 100644
--- a/spec/frontend/contribution_events/components/contribution_events_spec.js
+++ b/spec/frontend/contribution_events/components/contribution_events_spec.js
@@ -11,6 +11,8 @@ import ContributionEventCreated from '~/contribution_events/components/contribut
import ContributionEventClosed from '~/contribution_events/components/contribution_event/contribution_event_closed.vue';
import ContributionEventReopened from '~/contribution_events/components/contribution_event/contribution_event_reopened.vue';
import ContributionEventCommented from '~/contribution_events/components/contribution_event/contribution_event_commented.vue';
+import ContributionEventUpdated from '~/contribution_events/components/contribution_event/contribution_event_updated.vue';
+import ContributionEventDestroyed from '~/contribution_events/components/contribution_event/contribution_event_destroyed.vue';
import {
eventApproved,
eventExpired,
@@ -23,6 +25,8 @@ import {
eventClosed,
eventReopened,
eventCommented,
+ eventUpdated,
+ eventDestroyed,
} from '../utils';
describe('ContributionEvents', () => {
@@ -43,6 +47,8 @@ describe('ContributionEvents', () => {
eventClosed(),
eventReopened(),
eventCommented(),
+ eventUpdated(),
+ eventDestroyed(),
],
},
});
@@ -61,6 +67,8 @@ describe('ContributionEvents', () => {
${ContributionEventClosed} | ${eventClosed()}
${ContributionEventReopened} | ${eventReopened()}
${ContributionEventCommented} | ${eventCommented()}
+ ${ContributionEventUpdated} | ${eventUpdated()}
+ ${ContributionEventDestroyed} | ${eventDestroyed()}
`(
'renders `$expectedComponent.name` component and passes expected event',
({ expectedComponent, expectedEvent }) => {
diff --git a/spec/frontend/contribution_events/components/target_link_spec.js b/spec/frontend/contribution_events/components/target_link_spec.js
index 40650b3585c..968a9d3bd3d 100644
--- a/spec/frontend/contribution_events/components/target_link_spec.js
+++ b/spec/frontend/contribution_events/components/target_link_spec.js
@@ -49,7 +49,7 @@ describe('TargetLink', () => {
});
});
- describe('when target is not defined', () => {
+ describe('when target type is not defined', () => {
beforeEach(() => {
createComponent({ propsData: { event: eventJoined() } });
});
diff --git a/spec/frontend/contribution_events/utils.js b/spec/frontend/contribution_events/utils.js
index 8b34506c6ac..f91a4dd800b 100644
--- a/spec/frontend/contribution_events/utils.js
+++ b/spec/frontend/contribution_events/utils.js
@@ -10,6 +10,8 @@ import {
EVENT_TYPE_CLOSED,
EVENT_TYPE_REOPENED,
EVENT_TYPE_COMMENTED,
+ EVENT_TYPE_UPDATED,
+ EVENT_TYPE_DESTROYED,
PUSH_EVENT_REF_TYPE_BRANCH,
PUSH_EVENT_REF_TYPE_TAG,
EVENT_TYPE_CREATED,
@@ -32,22 +34,12 @@ import {
COMMIT_NOTEABLE_TYPE,
} from '~/notes/constants';
+// Private finders
const findEventByAction = (action) => () => events.find((event) => event.action === action);
const findEventByActionAndTargetType = (action, targetType) => () =>
events.find((event) => event.action === action && event.target?.type === targetType);
const findEventByActionAndIssueType = (action, issueType) => () =>
events.find((event) => event.action === action && event.target.issue_type === issueType);
-
-export const eventApproved = findEventByAction(EVENT_TYPE_APPROVED);
-
-export const eventExpired = findEventByAction(EVENT_TYPE_EXPIRED);
-
-export const eventJoined = findEventByAction(EVENT_TYPE_JOINED);
-
-export const eventLeft = findEventByAction(EVENT_TYPE_LEFT);
-
-export const eventMerged = findEventByAction(EVENT_TYPE_MERGED);
-
const findPushEvent = ({
isNew = false,
isRemoved = false,
@@ -62,6 +54,45 @@ const findPushEvent = ({
ref.type === refType &&
commit.count === commitCount,
);
+const findEventByActionAndNoteableType = (action, noteableType) => () =>
+ events.find((event) => event.action === action && event.noteable?.type === noteableType);
+const findCommentedSnippet = (resourceParentType) => () =>
+ events.find(
+ (event) =>
+ event.action === EVENT_TYPE_COMMENTED &&
+ event.noteable?.type === SNIPPET_NOTEABLE_TYPE &&
+ event.resource_parent?.type === resourceParentType,
+ );
+const findUpdatedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_UPDATED, targetType);
+const findDestroyedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_DESTROYED, targetType);
+
+// Finders that are used by EE
+export const findCreatedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
+export const findWorkItemCreatedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_CREATED, issueType);
+export const findClosedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
+export const findWorkItemClosedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_CLOSED, issueType);
+export const findReopenedEvent = (targetType) =>
+ findEventByActionAndTargetType(EVENT_TYPE_REOPENED, targetType);
+export const findWorkItemReopenedEvent = (issueType) =>
+ findEventByActionAndIssueType(EVENT_TYPE_REOPENED, issueType);
+export const findCommentedEvent = (noteableType) =>
+ findEventByActionAndNoteableType(EVENT_TYPE_COMMENTED, noteableType);
+
+export const eventApproved = findEventByAction(EVENT_TYPE_APPROVED);
+
+export const eventExpired = findEventByAction(EVENT_TYPE_EXPIRED);
+
+export const eventJoined = findEventByAction(EVENT_TYPE_JOINED);
+
+export const eventLeft = findEventByAction(EVENT_TYPE_LEFT);
+
+export const eventMerged = findEventByAction(EVENT_TYPE_MERGED);
export const eventPushedNewBranch = findPushEvent({ isNew: true });
export const eventPushedNewTag = findPushEvent({ isNew: true, refType: PUSH_EVENT_REF_TYPE_TAG });
@@ -77,13 +108,7 @@ export const eventBulkPushedBranch = findPushEvent({ commitCount: 5 });
export const eventPrivate = () => ({ ...events[0], action: EVENT_TYPE_PRIVATE });
export const eventCreated = findEventByAction(EVENT_TYPE_CREATED);
-
-export const findCreatedEvent = (targetType) =>
- findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
-export const findWorkItemCreatedEvent = (issueType) =>
- findEventByActionAndIssueType(EVENT_TYPE_CREATED, issueType);
-
-export const eventProjectCreated = findCreatedEvent(undefined);
+export const eventProjectCreated = findCreatedEvent(null);
export const eventMilestoneCreated = findCreatedEvent(TARGET_TYPE_MILESTONE);
export const eventIssueCreated = findCreatedEvent(TARGET_TYPE_ISSUE);
export const eventMergeRequestCreated = findCreatedEvent(TARGET_TYPE_MERGE_REQUEST);
@@ -93,12 +118,6 @@ export const eventTaskCreated = findWorkItemCreatedEvent(WORK_ITEM_ISSUE_TYPE_TA
export const eventIncidentCreated = findWorkItemCreatedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
export const eventClosed = findEventByAction(EVENT_TYPE_CLOSED);
-
-export const findClosedEvent = (targetType) =>
- findEventByActionAndTargetType(EVENT_TYPE_CREATED, targetType);
-export const findWorkItemClosedEvent = (issueType) =>
- findEventByActionAndIssueType(EVENT_TYPE_CLOSED, issueType);
-
export const eventMilestoneClosed = findClosedEvent(TARGET_TYPE_MILESTONE);
export const eventIssueClosed = findClosedEvent(TARGET_TYPE_ISSUE);
export const eventMergeRequestClosed = findClosedEvent(TARGET_TYPE_MERGE_REQUEST);
@@ -108,12 +127,6 @@ export const eventTaskClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_TASK
export const eventIncidentClosed = findWorkItemClosedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
export const eventReopened = findEventByAction(EVENT_TYPE_REOPENED);
-
-export const findReopenedEvent = (targetType) =>
- findEventByActionAndTargetType(EVENT_TYPE_REOPENED, targetType);
-export const findWorkItemReopenedEvent = (issueType) =>
- findEventByActionAndIssueType(EVENT_TYPE_REOPENED, issueType);
-
export const eventMilestoneReopened = findReopenedEvent(TARGET_TYPE_MILESTONE);
export const eventMergeRequestReopened = findReopenedEvent(TARGET_TYPE_MERGE_REQUEST);
export const eventWikiPageReopened = findReopenedEvent(TARGET_TYPE_WIKI);
@@ -123,19 +136,6 @@ export const eventTaskReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_
export const eventIncidentReopened = findWorkItemReopenedEvent(WORK_ITEM_ISSUE_TYPE_INCIDENT);
export const eventCommented = findEventByAction(EVENT_TYPE_COMMENTED);
-
-const findEventByActionAndNoteableType = (action, noteableType) => () =>
- events.find((event) => event.action === action && event.noteable?.type === noteableType);
-export const findCommentedEvent = (noteableType) =>
- findEventByActionAndNoteableType(EVENT_TYPE_COMMENTED, noteableType);
-export const findCommentedSnippet = (resourceParentType) => () =>
- events.find(
- (event) =>
- event.action === EVENT_TYPE_COMMENTED &&
- event.noteable?.type === SNIPPET_NOTEABLE_TYPE &&
- event.resource_parent?.type === resourceParentType,
- );
-
export const eventCommentedIssue = findCommentedEvent(ISSUE_NOTEABLE_TYPE);
export const eventCommentedMergeRequest = findCommentedEvent(MERGE_REQUEST_NOTEABLE_TYPE);
export const eventCommentedSnippet = findCommentedEvent(SNIPPET_NOTEABLE_TYPE);
@@ -153,3 +153,12 @@ export const eventCommentedCommit = () => ({
first_line_in_markdown: '\u003cp\u003eMy title 9\u003c/p\u003e',
},
});
+
+export const eventUpdated = findEventByAction(EVENT_TYPE_UPDATED);
+export const eventDesignUpdated = findUpdatedEvent(TARGET_TYPE_DESIGN);
+export const eventWikiPageUpdated = findUpdatedEvent(TARGET_TYPE_WIKI);
+
+export const eventDestroyed = findEventByAction(EVENT_TYPE_DESTROYED);
+export const eventDesignDestroyed = findDestroyedEvent(TARGET_TYPE_DESIGN);
+export const eventWikiPageDestroyed = findDestroyedEvent(TARGET_TYPE_WIKI);
+export const eventMilestoneDestroyed = findDestroyedEvent(TARGET_TYPE_MILESTONE);
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index 5cfb4702be7..8b76a627c1e 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Contributors charts should render charts and a RefSelector when loading completed and there is chart data 1`] = `
<div>
<div
- class="gl-border-b gl-border-gray-100 gl-mb-6 gl-bg-gray-10 gl-p-5"
+ class="gl-bg-gray-10 gl-border-b gl-border-gray-100 gl-mb-6 gl-p-5"
>
<div
class="gl-display-flex"
@@ -20,26 +20,19 @@ exports[`Contributors charts should render charts and a RefSelector when loading
value="main"
/>
</div>
-
<a
class="btn btn-default btn-md gl-button"
data-testid="history-button"
href="some/path"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
History
-
</span>
</a>
</div>
</div>
-
<div
data-testid="contributors-charts"
>
@@ -48,11 +41,9 @@ exports[`Contributors charts should render charts and a RefSelector when loading
>
Commits to main
</h4>
-
<span>
Excluding merge commits. Limited to 6,000 commits.
</span>
-
<glareachart-stub
annotations=""
class="gl-mb-5"
@@ -70,27 +61,22 @@ exports[`Contributors charts should render charts and a RefSelector when loading
thresholds=""
width="auto"
/>
-
<div
class="row"
>
<div
- class="col-lg-6 col-12 gl-my-5"
+ class="col-12 col-lg-6 gl-my-5"
>
<h4
class="gl-mb-2 gl-mt-0"
>
John
</h4>
-
<p
class="gl-mb-3"
>
-
- 2 commits (jawnnypoo@gmail.com)
-
+ 2 commits (jawnnypoo@gmail.com)
</p>
-
<glareachart-stub
annotations=""
data="[object Object]"
diff --git a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
index 4e87d4d8192..c69547deb1c 100644
--- a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
@@ -3,14 +3,11 @@
exports[`Custom emoji settings list component renders table of custom emoji 1`] = `
<div>
<div
- class="tabs gl-tabs"
+ class="gl-tabs tabs"
>
- <!---->
- <div
- class=""
- >
+ <div>
<ul
- class="nav gl-tabs-nav"
+ class="gl-tabs-nav nav"
role="tablist"
>
<div
@@ -23,22 +20,12 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
href="/new"
to="/new"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- New custom emoji
-
+ New custom emoji
</span>
</a>
-
- <!---->
-
- <!---->
</div>
<div
class="gl-actions-tabs-end"
@@ -50,30 +37,19 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
href="/new"
to="/new"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
-
- New custom emoji
-
+ New custom emoji
</span>
</a>
-
- <!---->
-
- <!---->
</div>
</ul>
</div>
<div
- class="tab-content gl-pt-0 gl-tab-content"
+ class="gl-pt-0 gl-tab-content tab-content"
>
<transition-stub
- css="true"
enteractiveclass=""
enterclass=""
entertoclass="show"
@@ -89,14 +65,12 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
role="tabpanel"
style="display: none;"
>
-
<table
aria-busy=""
aria-colcount="4"
- class="table b-table gl-table gl-table-layout-fixed"
+ class="b-table gl-table gl-table-layout-fixed table"
role="table"
>
- <!---->
<colgroup>
<col
style="width: 70px;"
@@ -110,12 +84,9 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
/>
</colgroup>
<thead
- class=""
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<th
@@ -162,9 +133,7 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
<tbody
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<td
@@ -180,7 +149,7 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
</td>
<td
aria-colindex="2"
- class="gl-vertical-align-middle! gl-font-monospace"
+ class="gl-font-monospace gl-vertical-align-middle!"
role="cell"
>
<strong
@@ -194,26 +163,17 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
class="gl-vertical-align-middle!"
role="cell"
>
-
- created-at
-
+ created-at
</td>
<td
aria-colindex="4"
- class=""
role="cell"
/>
</tr>
- <!---->
- <!---->
</tbody>
- <!---->
</table>
-
- <!---->
</div>
</transition-stub>
- <!---->
</div>
</div>
</div>
diff --git a/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
index 560533891c9..8560b80ac9c 100644
--- a/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
@@ -2,17 +2,16 @@
exports[`Design management design presentation component currentCommentForm is equal to current annotation position when isAnnotating is true 1`] = `
<div
- class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
+ class="gl-h-full gl-p-5 gl-relative gl-w-full overflow-auto"
>
<div
- class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
+ class="gl-align-items-center gl-display-flex gl-h-full gl-relative gl-w-full"
>
<design-image-stub
image="test.jpg"
name="test"
scale="1"
/>
-
<design-overlay-stub
currentcommentform="[object Object]"
dimensions="[object Object]"
@@ -25,17 +24,16 @@ exports[`Design management design presentation component currentCommentForm is e
exports[`Design management design presentation component currentCommentForm is null when isAnnotating is false 1`] = `
<div
- class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
+ class="gl-h-full gl-p-5 gl-relative gl-w-full overflow-auto"
>
<div
- class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
+ class="gl-align-items-center gl-display-flex gl-h-full gl-relative gl-w-full"
>
<design-image-stub
image="test.jpg"
name="test"
scale="1"
/>
-
<design-overlay-stub
dimensions="[object Object]"
notes=""
@@ -47,17 +45,16 @@ exports[`Design management design presentation component currentCommentForm is n
exports[`Design management design presentation component currentCommentForm is null when isAnnotating is true but annotation position is falsey 1`] = `
<div
- class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
+ class="gl-h-full gl-p-5 gl-relative gl-w-full overflow-auto"
>
<div
- class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
+ class="gl-align-items-center gl-display-flex gl-h-full gl-relative gl-w-full"
>
<design-image-stub
image="test.jpg"
name="test"
scale="1"
/>
-
<design-overlay-stub
dimensions="[object Object]"
notes=""
@@ -69,31 +66,26 @@ exports[`Design management design presentation component currentCommentForm is n
exports[`Design management design presentation component renders empty state when no image provided 1`] = `
<div
- class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
+ class="gl-h-full gl-p-5 gl-relative gl-w-full overflow-auto"
>
<div
- class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
- >
- <!---->
-
- <!---->
- </div>
+ class="gl-align-items-center gl-display-flex gl-h-full gl-relative gl-w-full"
+ />
</div>
`;
exports[`Design management design presentation component renders image and overlay when image provided 1`] = `
<div
- class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
+ class="gl-h-full gl-p-5 gl-relative gl-w-full overflow-auto"
>
<div
- class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
+ class="gl-align-items-center gl-display-flex gl-h-full gl-relative gl-w-full"
>
<design-image-stub
image="test.jpg"
name="test"
scale="1"
/>
-
<design-overlay-stub
dimensions="[object Object]"
notes=""
diff --git a/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
index 1f4e579f075..573ad5f872f 100644
--- a/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
@@ -4,11 +4,9 @@ exports[`Design management large image component renders SVG with proper height
<div
class="gl-mx-auto gl-my-auto js-design-image"
>
- <!---->
-
<img
alt="test"
- class="mh-100 img-fluid"
+ class="img-fluid mh-100"
src="mockImage.svg"
/>
</div>
@@ -18,11 +16,9 @@ exports[`Design management large image component renders image 1`] = `
<div
class="gl-mx-auto gl-my-auto js-design-image"
>
- <!---->
-
<img
alt="test"
- class="mh-100 img-fluid"
+ class="img-fluid mh-100"
src="test.jpg"
/>
</div>
@@ -33,12 +29,10 @@ exports[`Design management large image component renders loading state 1`] = `
class="gl-mx-auto gl-my-auto js-design-image"
isloading="true"
>
- <!---->
-
<img
alt=""
- class="mh-100 img-fluid"
- src=""
+ class="img-fluid mh-100"
+ src="null"
/>
</div>
`;
@@ -55,8 +49,6 @@ exports[`Design management large image component sets correct classes and styles
<div
class="gl-mx-auto gl-my-auto js-design-image"
>
- <!---->
-
<img
alt="test"
class="mh-100"
@@ -70,8 +62,6 @@ exports[`Design management large image component zoom sets image style when zoom
<div
class="gl-mx-auto gl-my-auto js-design-image"
>
- <!---->
-
<img
alt="test"
class="mh-100"
diff --git a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_signed_out_spec.js.snap b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_signed_out_spec.js.snap
index ab37cb90bd3..8bc9bce7e80 100644
--- a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_signed_out_spec.js.snap
+++ b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_signed_out_spec.js.snap
@@ -4,19 +4,19 @@ exports[`DesignNoteSignedOut renders message containing register and sign-in lin
<div
class="disabled-comment text-center"
>
- Please
+ Please
<gl-link-stub
href="/users/sign_up?redirect_to_referer=yes"
>
register
</gl-link-stub>
- or
+ or
<gl-link-stub
href="/users/sign_in?redirect_to_referer=yes"
>
sign in
</gl-link-stub>
- to reply.
+ to reply.
</div>
`;
@@ -24,18 +24,18 @@ exports[`DesignNoteSignedOut renders message containing register and sign-in lin
<div
class="disabled-comment text-center"
>
- Please
+ Please
<gl-link-stub
href="/users/sign_up?redirect_to_referer=yes"
>
register
</gl-link-stub>
- or
+ or
<gl-link-stub
href="/users/sign_in?redirect_to_referer=yes"
>
sign in
</gl-link-stub>
- to start a new discussion.
+ to start a new discussion.
</div>
`;
diff --git a/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap b/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
index 4dc8eaea174..206187c3530 100644
--- a/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
+++ b/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
@@ -1,17 +1,33 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design reply form component renders button text as "Comment" when creating a comment 1`] = `
-"<button data-track-action=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn gl-mr-3 gl-w-auto! btn-confirm btn-md disabled gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">
- Comment
- </span></button>"
+<button
+ class="btn btn-confirm btn-md disabled gl-button gl-mr-3 gl-w-auto!"
+ data-qa-selector="save_comment_button"
+ data-track-action="click_button"
+ disabled=""
+ type="submit"
+>
+ <span
+ class="gl-button-text"
+ >
+ Comment
+ </span>
+</button>
`;
exports[`Design reply form component renders button text as "Save comment" when creating a comment 1`] = `
-"<button data-track-action=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn gl-mr-3 gl-w-auto! btn-confirm btn-md disabled gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">
- Save comment
- </span></button>"
+<button
+ class="btn btn-confirm btn-md disabled gl-button gl-mr-3 gl-w-auto!"
+ data-qa-selector="save_comment_button"
+ data-track-action="click_button"
+ disabled=""
+ type="submit"
+>
+ <span
+ class="gl-button-text"
+ >
+ Save comment
+ </span>
+</button>
`;
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 0bbb44bb517..53359b02b4c 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -9,36 +9,27 @@ exports[`Design management list item component when item appears in view after i
`;
exports[`Design management list item component with notes renders item with multiple comments 1`] = `
-<router-link-stub
- ariacurrentvalue="page"
- class="card gl-cursor-pointer text-plain js-design-list-item design-list-item gl-mb-0"
- event="click"
- tag="a"
- to="[object Object]"
+<a
+ class="card design-list-item gl-cursor-pointer gl-mb-0 js-design-list-item text-plain"
>
<div
- class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative gl-rounded-top-base"
+ class="card-body gl-align-items-center gl-display-flex gl-justify-content-center gl-overflow-hidden gl-p-0 gl-relative gl-rounded-top-base"
>
- <!---->
-
<gl-intersection-observer-stub
class="gl-flex-grow-1"
>
- <!---->
-
<img
alt="test"
- class="gl-display-block gl-mx-auto gl-max-w-full gl-max-h-full gl-w-auto design-img"
+ class="design-img gl-display-block gl-max-h-full gl-max-w-full gl-mx-auto gl-w-auto"
data-qa-filename="test"
data-qa-selector="design_image"
data-testid="design-img-1"
- src=""
+ src="null"
/>
</gl-intersection-observer-stub>
</div>
-
<div
- class="card-footer gl-display-flex gl-w-full gl-bg-white gl-py-3 gl-px-4"
+ class="card-footer gl-bg-white gl-display-flex gl-px-4 gl-py-3 gl-w-full"
>
<div
class="gl-display-flex gl-flex-direction-column str-truncated-100"
@@ -51,12 +42,10 @@ exports[`Design management list item component with notes renders item with mult
>
test
</span>
-
<span
class="str-truncated-100"
>
-
- Updated
+ Updated
<timeago-stub
cssclass=""
datetimeformat="DATE_WITH_TIME_FORMAT"
@@ -65,60 +54,47 @@ exports[`Design management list item component with notes renders item with mult
/>
</span>
</div>
-
<div
- class="gl-ml-auto gl-display-flex gl-align-items-center gl-text-gray-500"
+ class="gl-align-items-center gl-display-flex gl-ml-auto gl-text-gray-500"
>
<gl-icon-stub
class="gl-ml-2"
name="comments"
size="16"
/>
-
<span
aria-label="2 comments"
class="gl-ml-2"
>
-
2
-
</span>
</div>
</div>
-</router-link-stub>
+</a>
`;
exports[`Design management list item component with notes renders item with single comment 1`] = `
-<router-link-stub
- ariacurrentvalue="page"
- class="card gl-cursor-pointer text-plain js-design-list-item design-list-item gl-mb-0"
- event="click"
- tag="a"
- to="[object Object]"
+<a
+ class="card design-list-item gl-cursor-pointer gl-mb-0 js-design-list-item text-plain"
>
<div
- class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative gl-rounded-top-base"
+ class="card-body gl-align-items-center gl-display-flex gl-justify-content-center gl-overflow-hidden gl-p-0 gl-relative gl-rounded-top-base"
>
- <!---->
-
<gl-intersection-observer-stub
class="gl-flex-grow-1"
>
- <!---->
-
<img
alt="test"
- class="gl-display-block gl-mx-auto gl-max-w-full gl-max-h-full gl-w-auto design-img"
+ class="design-img gl-display-block gl-max-h-full gl-max-w-full gl-mx-auto gl-w-auto"
data-qa-filename="test"
data-qa-selector="design_image"
data-testid="design-img-1"
- src=""
+ src="null"
/>
</gl-intersection-observer-stub>
</div>
-
<div
- class="card-footer gl-display-flex gl-w-full gl-bg-white gl-py-3 gl-px-4"
+ class="card-footer gl-bg-white gl-display-flex gl-px-4 gl-py-3 gl-w-full"
>
<div
class="gl-display-flex gl-flex-direction-column str-truncated-100"
@@ -131,12 +107,10 @@ exports[`Design management list item component with notes renders item with sing
>
test
</span>
-
<span
class="str-truncated-100"
>
-
- Updated
+ Updated
<timeago-stub
cssclass=""
datetimeformat="DATE_WITH_TIME_FORMAT"
@@ -145,25 +119,21 @@ exports[`Design management list item component with notes renders item with sing
/>
</span>
</div>
-
<div
- class="gl-ml-auto gl-display-flex gl-align-items-center gl-text-gray-500"
+ class="gl-align-items-center gl-display-flex gl-ml-auto gl-text-gray-500"
>
<gl-icon-stub
class="gl-ml-2"
name="comments"
size="16"
/>
-
<span
aria-label="1 comment"
class="gl-ml-2"
>
-
1
-
</span>
</div>
</div>
-</router-link-stub>
+</a>
`;
diff --git a/spec/frontend/design_management/components/list/item_spec.js b/spec/frontend/design_management/components/list/item_spec.js
index 4a0ad5a045b..14e8a5579ba 100644
--- a/spec/frontend/design_management/components/list/item_spec.js
+++ b/spec/frontend/design_management/components/list/item_spec.js
@@ -1,5 +1,5 @@
import { GlIcon, GlLoadingIcon, GlIntersectionObserver } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueRouter from 'vue-router';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -49,7 +49,7 @@ describe('Design management list item component', () => {
imageLoading,
};
},
- stubs: ['router-link'],
+ stubs: { RouterLink: RouterLinkStub },
}),
);
}
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index 3c4aa0f4d3c..cf8aac22f67 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -2,14 +2,14 @@
exports[`Design management toolbar component renders design and updated data 1`] = `
<header
- class="gl-display-flex gl-align-items-center gl-justify-content-space-between gl-bg-white gl-py-4 gl-pl-4 js-design-header"
+ class="gl-align-items-center gl-bg-white gl-display-flex gl-justify-content-space-between gl-pl-4 gl-py-4 js-design-header"
>
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<a
aria-label="Go back to designs"
- class="gl-mr-5 gl-display-flex gl-align-items-center gl-justify-content-center text-plain"
+ class="gl-align-items-center gl-display-flex gl-justify-content-center gl-mr-5 text-plain"
data-testid="close-design"
>
<gl-icon-stub
@@ -17,16 +17,14 @@ exports[`Design management toolbar component renders design and updated data 1`]
size="16"
/>
</a>
-
<div
- class="gl-overflow-hidden gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex gl-overflow-hidden"
>
<h2
- class="gl-m-0 str-truncated-100 gl-font-base"
+ class="gl-font-base gl-m-0 str-truncated-100"
>
test.jpg
</h2>
-
<small
class="gl-text-gray-500"
>
@@ -34,12 +32,10 @@ exports[`Design management toolbar component renders design and updated data 1`]
</small>
</div>
</div>
-
<design-navigation-stub
- class="gl-ml-auto gl-flex-shrink-0"
- id="1"
+ class="gl-flex-shrink-0 gl-ml-auto"
+ id="reference-0"
/>
-
<gl-button-stub
aria-label="Download design"
buttontextclasses=""
@@ -50,7 +46,6 @@ exports[`Design management toolbar component renders design and updated data 1`]
title="Download design"
variant="default"
/>
-
<delete-button-stub
buttoncategory="secondary"
buttonclass=""
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
index 191bcc2d484..e6a74c49d50 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
@@ -12,15 +12,12 @@ exports[`Design management upload button component renders inverted upload desig
title="Adding a design with the same filename replaces the file in a new version."
variant="confirm"
>
-
Upload designs
-
</gl-button-stub>
-
<input
accept="image/*"
class="gl-display-none"
- multiple="multiple"
+ multiple=""
name="design_file"
type="file"
/>
@@ -37,15 +34,12 @@ exports[`Design management upload button component renders upload design button
title="Adding a design with the same filename replaces the file in a new version."
variant="confirm"
>
-
Upload designs
-
</gl-button-stub>
-
<input
accept="image/*"
class="gl-display-none"
- multiple="multiple"
+ multiple=""
name="design_file"
type="file"
/>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index f0615f61059..224e35e9f5e 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -2,86 +2,70 @@
exports[`Design management design index page renders design index 1`] = `
<div
- class="design-detail js-design-detail fixed-top gl-w-full gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
+ class="design-detail fixed-top gl-display-flex gl-flex-direction-column gl-justify-content-center gl-lg-flex-direction-row gl-w-full js-design-detail"
>
<div
- class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-overflow-hidden gl-relative"
>
<div
iid="1"
project-path="project-path"
/>
-
- <!---->
-
<design-presentation-stub
discussions="[object Object],[object Object]"
image="test.jpg"
imagename="test.jpg"
scale="1"
/>
-
<div
- class="design-scaler-wrapper gl-absolute gl-mb-6 gl-display-flex gl-justify-content-center gl-align-items-center"
+ class="design-scaler-wrapper gl-absolute gl-align-items-center gl-display-flex gl-justify-content-center gl-mb-6"
>
<design-scaler-stub
maxscale="2"
/>
</div>
</div>
-
<div
- class="image-notes gl-pt-0"
+ class="gl-pt-0 image-notes"
>
<div
- class="gl-py-4 gl-mb-4 gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
+ class="gl-align-items-center gl-border-b-1 gl-border-b-gray-100 gl-border-b-solid gl-display-flex gl-justify-content-space-between gl-mb-4 gl-py-4"
>
<span>
To Do
</span>
-
<design-todo-button-stub
design="[object Object]"
/>
</div>
-
<h2
class="gl-font-weight-bold gl-mt-0"
>
-
- My precious issue
-
+ My precious issue
</h2>
-
<a
- class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-display-block gl-mb-6 gl-text-decoration-none gl-text-gray-400"
href="full-issue-url"
>
ull-issue-path
</a>
-
<description-form-stub
design="[object Object]"
designvariables="[object Object]"
markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
/>
-
<participants-stub
class="gl-mb-4"
lazy="true"
numberoflessparticipants="8"
participants="[object Object]"
/>
-
- <!---->
-
<design-note-signed-out-stub
class="gl-mb-4"
isadddiscussion="true"
registerpath=""
signinpath=""
/>
-
<design-discussion-stub
data-testid="unresolved-discussion"
designid="gid::/gitlab/Design/1"
@@ -92,7 +76,6 @@ exports[`Design management design index page renders design index 1`] = `
registerpath=""
signinpath=""
/>
-
<gl-accordion-stub
class="gl-mb-5"
headerlevel="3"
@@ -113,23 +96,21 @@ exports[`Design management design index page renders design index 1`] = `
/>
</gl-accordion-item-stub>
</gl-accordion-stub>
-
</div>
</div>
`;
exports[`Design management design index page with error GlAlert is rendered in correct position with correct content 1`] = `
<div
- class="design-detail js-design-detail fixed-top gl-w-full gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
+ class="design-detail fixed-top gl-display-flex gl-flex-direction-column gl-justify-content-center gl-lg-flex-direction-row gl-w-full js-design-detail"
>
<div
- class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-overflow-hidden gl-relative"
>
<div
iid="1"
project-path="project-path"
/>
-
<div
class="gl-p-5"
>
@@ -144,82 +125,64 @@ exports[`Design management design index page with error GlAlert is rendered in c
title=""
variant="danger"
>
-
woops
-
</gl-alert-stub>
</div>
-
<design-presentation-stub
discussions=""
image="test.jpg"
imagename="test.jpg"
scale="1"
/>
-
<div
- class="design-scaler-wrapper gl-absolute gl-mb-6 gl-display-flex gl-justify-content-center gl-align-items-center"
+ class="design-scaler-wrapper gl-absolute gl-align-items-center gl-display-flex gl-justify-content-center gl-mb-6"
>
<design-scaler-stub
maxscale="2"
/>
</div>
</div>
-
<div
- class="image-notes gl-pt-0"
+ class="gl-pt-0 image-notes"
>
<div
- class="gl-py-4 gl-mb-4 gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
+ class="gl-align-items-center gl-border-b-1 gl-border-b-gray-100 gl-border-b-solid gl-display-flex gl-justify-content-space-between gl-mb-4 gl-py-4"
>
<span>
To Do
</span>
-
<design-todo-button-stub
design="[object Object]"
/>
</div>
-
<h2
class="gl-font-weight-bold gl-mt-0"
>
-
- My precious issue
-
+ My precious issue
</h2>
-
<a
- class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-display-block gl-mb-6 gl-text-decoration-none gl-text-gray-400"
href="full-issue-url"
>
ull-issue-path
</a>
-
<description-form-stub
design="[object Object]"
designvariables="[object Object]"
markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
/>
-
<participants-stub
class="gl-mb-4"
lazy="true"
numberoflessparticipants="8"
participants="[object Object]"
/>
-
- <!---->
-
<design-note-signed-out-stub
class="gl-mb-4"
isadddiscussion="true"
registerpath=""
signinpath=""
/>
-
- <!---->
-
</div>
</div>
`;
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index c1f0966f9c6..e10aad6214c 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -11,7 +11,7 @@ import CommitWidget from '~/diffs/components/commit_widget.vue';
import CompareVersions from '~/diffs/components/compare_versions.vue';
import DiffFile from '~/diffs/components/diff_file.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
-import findingsDrawer from '~/diffs/components/shared/findings_drawer.vue';
+import FindingsDrawer from '~/diffs/components/shared/findings_drawer.vue';
import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
@@ -251,6 +251,11 @@ describe('diffs/components/app', () => {
await nextTick();
expect(store.state.diffs.currentDiffFileId).toBe('ABC');
});
+
+ it('renders findings-drawer', () => {
+ createComponent();
+ expect(wrapper.findComponent(FindingsDrawer).exists()).toBe(true);
+ });
});
it('marks current diff file based on currently highlighted row', async () => {
@@ -755,20 +760,4 @@ describe('diffs/components/app', () => {
);
});
});
-
- describe('findings-drawer', () => {
- it('does not render findings-drawer when codeQualityInlineDrawer flag is off', () => {
- createComponent();
- expect(wrapper.findComponent(findingsDrawer).exists()).toBe(false);
- });
-
- it('does render findings-drawer when codeQualityInlineDrawer flag is on', () => {
- createComponent({}, () => {}, {
- glFeatures: {
- codeQualityInlineDrawer: true,
- },
- });
- expect(wrapper.findComponent(findingsDrawer).exists()).toBe(true);
- });
- });
});
diff --git a/spec/frontend/diffs/components/diff_inline_findings_item_spec.js b/spec/frontend/diffs/components/diff_inline_findings_item_spec.js
index 72d96d3435f..cda3273d51e 100644
--- a/spec/frontend/diffs/components/diff_inline_findings_item_spec.js
+++ b/spec/frontend/diffs/components/diff_inline_findings_item_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlLink } from '@gitlab/ui';
+import { GlIcon } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import DiffInlineFindingsItem from '~/diffs/components/diff_inline_findings_item.vue';
import { SEVERITY_CLASSES, SEVERITY_ICONS } from '~/ci/reports/codequality_report/constants';
@@ -8,19 +8,13 @@ let wrapper;
const [codeQualityFinding] = multipleFindingsArrCodeQualityScale;
const findIcon = () => wrapper.findComponent(GlIcon);
-const findButton = () => wrapper.findComponent(GlLink);
const findDescriptionPlainText = () => wrapper.findByTestId('description-plain-text');
-const findDescriptionLinkSection = () => wrapper.findByTestId('description-button-section');
describe('DiffCodeQuality', () => {
- const createWrapper = ({ glFeatures = {}, link = true } = {}) => {
+ const createWrapper = () => {
return shallowMountExtended(DiffInlineFindingsItem, {
propsData: {
finding: codeQualityFinding,
- link,
- },
- provide: {
- glFeatures,
},
});
};
@@ -36,42 +30,9 @@ describe('DiffCodeQuality', () => {
});
});
- describe('with codeQualityInlineDrawer flag false', () => {
- it('should render severity + description in plain text', () => {
- wrapper = createWrapper({
- glFeatures: {
- codeQualityInlineDrawer: false,
- },
- });
- expect(findDescriptionPlainText().text()).toContain(codeQualityFinding.severity);
- expect(findDescriptionPlainText().text()).toContain(codeQualityFinding.description);
- });
- });
-
- describe('with codeQualityInlineDrawer flag true', () => {
- const [{ description, severity }] = multipleFindingsArrCodeQualityScale;
- const renderedText = `${severity} - ${description}`;
- it('when link prop is true, should render gl-link', () => {
- wrapper = createWrapper({
- glFeatures: {
- codeQualityInlineDrawer: true,
- },
- });
-
- expect(findButton().exists()).toBe(true);
- expect(findButton().text()).toBe(renderedText);
- });
-
- it('when link prop is false, should not render gl-link', () => {
- wrapper = createWrapper({
- glFeatures: {
- codeQualityInlineDrawer: true,
- },
- link: false,
- });
-
- expect(findButton().exists()).toBe(false);
- expect(findDescriptionLinkSection().text()).toBe(renderedText);
- });
+ it('should render severity + description in plain text', () => {
+ wrapper = createWrapper();
+ expect(findDescriptionPlainText().text()).toContain(codeQualityFinding.severity);
+ expect(findDescriptionPlainText().text()).toContain(codeQualityFinding.description);
});
});
diff --git a/spec/frontend/diffs/components/diff_inline_findings_spec.js b/spec/frontend/diffs/components/diff_inline_findings_spec.js
index 65b2abe7dd5..f654a2e2d4f 100644
--- a/spec/frontend/diffs/components/diff_inline_findings_spec.js
+++ b/spec/frontend/diffs/components/diff_inline_findings_spec.js
@@ -2,7 +2,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import DiffInlineFindings from '~/diffs/components/diff_inline_findings.vue';
import DiffInlineFindingsItem from '~/diffs/components/diff_inline_findings_item.vue';
import { NEW_CODE_QUALITY_FINDINGS } from '~/diffs/i18n';
-import { multipleCodeQualityNoSast } from '../mock_data/inline_findings';
+import { multipleFindingsArrCodeQualityScale } from '../mock_data/inline_findings';
let wrapper;
const heading = () => wrapper.findByTestId('diff-inline-findings-heading');
@@ -13,7 +13,7 @@ describe('DiffInlineFindings', () => {
return shallowMountExtended(DiffInlineFindings, {
propsData: {
title: NEW_CODE_QUALITY_FINDINGS,
- findings: multipleCodeQualityNoSast.codeQuality,
+ findings: multipleFindingsArrCodeQualityScale,
},
});
};
@@ -25,7 +25,7 @@ describe('DiffInlineFindings', () => {
it('renders the correct number of DiffInlineFindingsItem components with correct props', () => {
wrapper = createWrapper();
- expect(diffInlineFindingsItems()).toHaveLength(multipleCodeQualityNoSast.codeQuality.length);
+ expect(diffInlineFindingsItems()).toHaveLength(multipleFindingsArrCodeQualityScale.length);
expect(diffInlineFindingsItems().wrappers[0].props('finding')).toEqual(
wrapper.props('findings')[0],
);
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 8a67d7b152c..30510958704 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -71,7 +71,8 @@ describe('DiffRow', () => {
const hits = coverageFileData[file]?.[line];
if (hits) {
return { text: `Test coverage: ${hits} hits`, class: 'coverage' };
- } else if (hits === 0) {
+ }
+ if (hits === 0) {
return { text: 'No test coverage', class: 'no-coverage' };
}
diff --git a/spec/frontend/diffs/components/inline_findings_spec.js b/spec/frontend/diffs/components/inline_findings_spec.js
index 71cc6ae49fd..102287a23b6 100644
--- a/spec/frontend/diffs/components/inline_findings_spec.js
+++ b/spec/frontend/diffs/components/inline_findings_spec.js
@@ -2,7 +2,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import InlineFindings from '~/diffs/components/inline_findings.vue';
import DiffInlineFindings from '~/diffs/components/diff_inline_findings.vue';
import { NEW_CODE_QUALITY_FINDINGS } from '~/diffs/i18n';
-import { threeCodeQualityFindingsRaw } from '../mock_data/inline_findings';
+import { threeCodeQualityFindings } from '../mock_data/inline_findings';
let wrapper;
@@ -12,7 +12,7 @@ describe('InlineFindings', () => {
const createWrapper = () => {
return mountExtended(InlineFindings, {
propsData: {
- codeQuality: threeCodeQualityFindingsRaw,
+ codeQuality: threeCodeQualityFindings,
},
});
};
@@ -28,6 +28,6 @@ describe('InlineFindings', () => {
it('renders diff inline findings component with correct props for codequality array', () => {
wrapper = createWrapper();
expect(diffInlineFindings().props('title')).toBe(NEW_CODE_QUALITY_FINDINGS);
- expect(diffInlineFindings().props('findings')).toBe(threeCodeQualityFindingsRaw);
+ expect(diffInlineFindings().props('findings')).toBe(threeCodeQualityFindings);
});
});
diff --git a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
index 51bd8f380ee..afa2a7d9678 100644
--- a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
+++ b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap
@@ -9,15 +9,13 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
zindex="252"
>
<h2
- class="gl-font-size-h2 gl-mt-0 gl-mb-0"
+ class="gl-font-size-h2 gl-mb-0 gl-mt-0"
data-testid="findings-drawer-heading"
>
-
- Unused method argument - \`c\`. If it's necessary, use \`_\` or \`_c\` as an argument name to indicate that it won't be used.
-
+ Unused method argument - \`c\`. If it's necessary, use \`_\` or \`_c\` as an argument name to indicate that it won't be used.
</h2>
<ul
- class="gl-list-style-none gl-border-b-initial gl-mb-0 gl-pb-0!"
+ class="gl-border-b-initial gl-list-style-none gl-mb-0 gl-pb-0!"
>
<li
class="gl-mb-4"
@@ -28,19 +26,14 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
>
Severity:
</span>
-
<gl-icon-stub
- class="inline-findings-severity-icon gl-text-orange-300"
+ class="gl-text-orange-300 inline-findings-severity-icon"
data-testid="findings-drawer-severity-icon"
name="severity-low"
size="12"
/>
-
-
- minor
-
+ minor
</li>
-
<li
class="gl-mb-4"
data-testid="findings-drawer-engine"
@@ -50,11 +43,8 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
>
Engine:
</span>
-
- testengine name
-
+ testengine name
</li>
-
<li
class="gl-mb-4"
data-testid="findings-drawer-category"
@@ -64,21 +54,17 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
>
Category:
</span>
-
- testcategory 1
-
+ testcategory 1
</li>
-
<li
class="gl-mb-4"
data-testid="findings-drawer-other-locations"
>
<span
- class="gl-font-weight-bold gl-mb-3 gl-display-block"
+ class="gl-display-block gl-font-weight-bold gl-mb-3"
>
Other locations:
</span>
-
<ul
class="gl-pl-6"
>
@@ -115,7 +101,6 @@ exports[`FindingsDrawer matches the snapshot 1`] = `
</ul>
</li>
</ul>
-
<span
class="drawer-body gl-display-block gl-px-3 gl-py-0!"
data-testid="findings-drawer-body"
diff --git a/spec/frontend/diffs/mock_data/inline_findings.js b/spec/frontend/diffs/mock_data/inline_findings.js
index 85fb48b86d5..ae1ae909238 100644
--- a/spec/frontend/diffs/mock_data/inline_findings.js
+++ b/spec/frontend/diffs/mock_data/inline_findings.js
@@ -4,6 +4,7 @@ export const multipleFindingsArrCodeQualityScale = [
description: 'mocked minor Issue',
line: 2,
scale: 'codeQuality',
+ text: 'mocked minor Issue',
},
{
severity: 'major',
@@ -43,6 +44,7 @@ export const multipleFindingsArrSastScale = [
description: 'mocked low Issue',
line: 2,
scale: 'sast',
+ text: 'mocked low Issue',
},
{
severity: 'medium',
@@ -76,48 +78,6 @@ export const multipleFindingsArrSastScale = [
},
];
-export const multipleCodeQualityNoSast = {
- codeQuality: multipleFindingsArrCodeQualityScale,
- sast: [],
-};
-
-export const multipleSastNoCodeQuality = {
- codeQuality: [],
- sast: multipleFindingsArrSastScale,
-};
-
-export const fiveCodeQualityFindings = {
- filePath: 'index.js',
- codequality: multipleFindingsArrCodeQualityScale.slice(0, 5),
-};
-
-export const threeCodeQualityFindings = {
- filePath: 'index.js',
- codequality: multipleFindingsArrCodeQualityScale.slice(0, 3),
-};
-export const threeCodeQualityFindingsRaw = [multipleFindingsArrCodeQualityScale.slice(0, 3)];
-
-export const singularCodeQualityFinding = {
- filePath: 'index.js',
- codequality: [multipleFindingsArrCodeQualityScale[0]],
-};
-
-export const singularFindingSast = {
- filePath: 'index.js',
- sast: [multipleFindingsArrSastScale[0]],
-};
-
-export const threeSastFindings = {
- filePath: 'index.js',
- sast: multipleFindingsArrSastScale.slice(0, 3),
-};
-
-export const oneCodeQualityTwoSastFindings = {
- filePath: 'index.js',
- sast: multipleFindingsArrSastScale.slice(0, 2),
- codequality: [multipleFindingsArrCodeQualityScale[0]],
-};
-
export const diffCodeQuality = {
diffFile: { file_hash: '123' },
diffLines: [
@@ -151,3 +111,19 @@ export const diffCodeQuality = {
},
],
};
+
+export const singularCodeQualityFinding = [multipleFindingsArrCodeQualityScale[0]];
+export const singularSastFinding = [multipleFindingsArrSastScale[0]];
+export const twoSastFindings = multipleFindingsArrSastScale.slice(0, 2);
+export const fiveCodeQualityFindings = multipleFindingsArrCodeQualityScale.slice(0, 5);
+export const threeCodeQualityFindings = multipleFindingsArrCodeQualityScale.slice(0, 3);
+
+export const filePath = 'testPath';
+export const scale = 'exampleScale';
+
+export const dropdownIcon = {
+ id: 'noise.rb-2',
+ key: 'mockedkey',
+ name: 'severity-medium',
+ class: 'gl-text-orange-400',
+};
diff --git a/spec/frontend/drawio/drawio_editor_spec.js b/spec/frontend/drawio/drawio_editor_spec.js
index 5a77b9d4689..0b863edc13b 100644
--- a/spec/frontend/drawio/drawio_editor_spec.js
+++ b/spec/frontend/drawio/drawio_editor_spec.js
@@ -5,6 +5,7 @@ import {
DRAWIO_IFRAME_TIMEOUT,
DIAGRAM_MAX_SIZE,
} from '~/drawio/constants';
+import { base64EncodeUnicode } from '~/lib/utils/text_utility';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
const DRAWIO_EDITOR_URL =
@@ -19,8 +20,8 @@ describe('drawio/drawio_editor', () => {
let editorFacade;
let drawioIFrameReceivedMessages;
const diagramURL = `${window.location.origin}/uploads/diagram.drawio.svg`;
- const testSvg = '<svg></svg>';
- const testEncodedSvg = `data:image/svg+xml;base64,${btoa(testSvg)}`;
+ const testSvg = '<svg>😀</svg>';
+ const testEncodedSvg = `data:image/svg+xml;base64,${base64EncodeUnicode(testSvg)}`;
const filename = 'diagram.drawio.svg';
const findDrawioIframe = () => document.getElementById(DRAWIO_FRAME_ID);
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
index 909911debf1..3076105ffde 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
@@ -33,6 +33,17 @@ include:
rules:
- exists:
- file.md
+ - local: builds.yml
+ rules:
+ - if: $INCLUDE_BUILDS == "true"
+ changes:
+ - 'test.yml'
+ - local: builds.yml
+ rules:
+ - changes:
+ paths:
+ - 'test.yml'
+ compare_to: 'master'
# valid trigger:include
trigger:include accepts project and file properties:
diff --git a/spec/frontend/emoji/index_spec.js b/spec/frontend/emoji/index_spec.js
index 1b948cce73a..1a12bd303f1 100644
--- a/spec/frontend/emoji/index_spec.js
+++ b/spec/frontend/emoji/index_spec.js
@@ -134,9 +134,11 @@ describe('emoji', () => {
const emojiKey = 'bomb';
const markup = glEmojiTag(emojiKey);
- expect(trimText(markup)).toMatchInlineSnapshot(
- `"<gl-emoji data-name=\\"bomb\\"></gl-emoji>"`,
- );
+ expect(trimText(markup)).toMatchInlineSnapshot(`
+ <gl-emoji
+ data-name="bomb"
+ />
+ `);
});
it('bomb emoji with sprite fallback readiness', () => {
@@ -144,9 +146,12 @@ describe('emoji', () => {
const markup = glEmojiTag(emojiKey, {
sprite: true,
});
- expect(trimText(markup)).toMatchInlineSnapshot(
- `"<gl-emoji data-fallback-sprite-class=\\"emoji-bomb\\" data-name=\\"bomb\\"></gl-emoji>"`,
- );
+ expect(trimText(markup)).toMatchInlineSnapshot(`
+ <gl-emoji
+ data-fallback-sprite-class="emoji-bomb"
+ data-name="bomb"
+ />
+ `);
});
});
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
index b55bbb34c65..9989c946800 100644
--- a/spec/frontend/environments/edit_environment_spec.js
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -7,7 +7,6 @@ import EditEnvironment from '~/environments/components/edit_environment.vue';
import { createAlert } from '~/alert';
import { visitUrl } from '~/lib/utils/url_utility';
import getEnvironment from '~/environments/graphql/queries/environment.query.graphql';
-import getEnvironmentWithFluxResource from '~/environments/graphql/queries/environment_with_flux_resource.query.graphql';
import updateEnvironment from '~/environments/graphql/mutations/update_environment.mutation.graphql';
import { __ } from '~/locale';
import createMockApollo from '../__helpers__/mock_apollo_helper';
@@ -44,9 +43,6 @@ describe('~/environments/components/edit.vue', () => {
let wrapper;
const getEnvironmentQuery = jest.fn().mockResolvedValue({ data: resolvedEnvironment });
- const getEnvironmentWithFluxResourceQuery = jest
- .fn()
- .mockResolvedValue({ data: resolvedEnvironment });
const updateEnvironmentSuccess = jest
.fn()
@@ -60,24 +56,17 @@ describe('~/environments/components/edit.vue', () => {
const mocks = [
[getEnvironment, getEnvironmentQuery],
- [getEnvironmentWithFluxResource, getEnvironmentWithFluxResourceQuery],
[updateEnvironment, mutationHandler],
];
return createMockApollo(mocks);
};
- const createWrapperWithApollo = async ({
- mutationHandler = updateEnvironmentSuccess,
- fluxResourceForEnvironment = false,
- } = {}) => {
+ const createWrapperWithApollo = async ({ mutationHandler = updateEnvironmentSuccess } = {}) => {
wrapper = mountExtended(EditEnvironment, {
propsData: { environment: {} },
provide: {
...provide,
- glFeatures: {
- fluxResourceForEnvironment,
- },
},
apolloProvider: createMockApolloProvider(mutationHandler),
});
@@ -170,11 +159,4 @@ describe('~/environments/components/edit.vue', () => {
});
});
});
-
- describe('when `fluxResourceForEnvironment` is enabled', () => {
- it('calls the `getEnvironmentWithFluxResource` query', () => {
- createWrapperWithApollo({ fluxResourceForEnvironment: true });
- expect(getEnvironmentWithFluxResourceQuery).toHaveBeenCalled();
- });
- });
});
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
index 1b80b596db7..22dd7437d82 100644
--- a/spec/frontend/environments/environment_form_spec.js
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -53,11 +53,7 @@ describe('~/environments/components/form.vue', () => {
},
});
- const createWrapperWithApollo = ({
- propsData = {},
- fluxResourceForEnvironment = false,
- queryResult = null,
- } = {}) => {
+ const createWrapperWithApollo = ({ propsData = {}, queryResult = null } = {}) => {
Vue.use(VueApollo);
const requestHandlers = [
@@ -83,9 +79,6 @@ describe('~/environments/components/form.vue', () => {
return mountExtended(EnvironmentForm, {
provide: {
...PROVIDE,
- glFeatures: {
- fluxResourceForEnvironment,
- },
},
propsData: {
...DEFAULT_PROPS,
@@ -422,39 +415,30 @@ describe('~/environments/components/form.vue', () => {
});
describe('flux resource selector', () => {
- it("doesn't render if `fluxResourceForEnvironment` feature flag is disabled", () => {
+ beforeEach(() => {
wrapper = createWrapperWithApollo();
+ });
+
+ it("doesn't render flux resource selector by default", () => {
expect(findFluxResourceSelector().exists()).toBe(false);
});
- describe('when `fluxResourceForEnvironment` feature flag is enabled', () => {
- beforeEach(() => {
- wrapper = createWrapperWithApollo({
- fluxResourceForEnvironment: true,
- });
+ describe('when the agent was selected', () => {
+ beforeEach(async () => {
+ await selectAgent();
});
- it("doesn't render flux resource selector by default", () => {
+ it("doesn't render flux resource selector", () => {
expect(findFluxResourceSelector().exists()).toBe(false);
});
- describe('when the agent was selected', () => {
- beforeEach(async () => {
- await selectAgent();
- });
-
- it("doesn't render flux resource selector", () => {
- expect(findFluxResourceSelector().exists()).toBe(false);
- });
-
- it('renders the flux resource selector when the namespace is selected', async () => {
- await findNamespaceSelector().vm.$emit('select', 'agent');
+ it('renders the flux resource selector when the namespace is selected', async () => {
+ await findNamespaceSelector().vm.$emit('select', 'agent');
- expect(findFluxResourceSelector().props()).toEqual({
- namespace: 'agent',
- fluxResourcePath: '',
- configuration,
- });
+ expect(findFluxResourceSelector().props()).toEqual({
+ namespace: 'agent',
+ fluxResourcePath: '',
+ configuration,
});
});
});
@@ -522,7 +506,6 @@ describe('~/environments/components/form.vue', () => {
beforeEach(() => {
wrapper = createWrapperWithApollo({
propsData: { environment: environmentWithAgentAndNamespace },
- fluxResourceForEnvironment: true,
});
});
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index bfcc4f4ebb6..7ee31bf2c62 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -13,7 +13,6 @@ import Deployment from '~/environments/components/deployment.vue';
import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue';
import KubernetesOverview from '~/environments/components/kubernetes_overview.vue';
import getEnvironmentClusterAgent from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
-import getEnvironmentClusterAgentWithFluxResource from '~/environments/graphql/queries/environment_cluster_agent_with_flux_resource.query.graphql';
import {
resolvedEnvironment,
rolloutStatus,
@@ -27,7 +26,6 @@ Vue.use(VueApollo);
describe('~/environments/components/new_environment_item.vue', () => {
let wrapper;
let queryResponseHandler;
- let queryWithFluxResourceResponseHandler;
const projectPath = '/1';
@@ -39,27 +37,14 @@ describe('~/environments/components/new_environment_item.vue', () => {
environment: {
id: '1',
kubernetesNamespace: 'default',
+ fluxResourcePath: fluxResourcePathMock,
clusterAgent,
},
},
},
};
queryResponseHandler = jest.fn().mockResolvedValue(response);
- queryWithFluxResourceResponseHandler = jest.fn().mockResolvedValue({
- data: {
- project: {
- id: response.data.project.id,
- environment: {
- ...response.data.project.environment,
- fluxResourcePath: fluxResourcePathMock,
- },
- },
- },
- });
- return createMockApollo([
- [getEnvironmentClusterAgent, queryResponseHandler],
- [getEnvironmentClusterAgentWithFluxResource, queryWithFluxResourceResponseHandler],
- ]);
+ return createMockApollo([[getEnvironmentClusterAgent, queryResponseHandler]]);
};
const createWrapper = ({ propsData = {}, provideData = {}, apolloProvider } = {}) =>
@@ -554,25 +539,6 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
});
- it('should request agent data with Flux resource when `fluxResourceForEnvironment` feature flag is enabled', async () => {
- wrapper = createWrapper({
- propsData: { environment: resolvedEnvironment },
- provideData: {
- glFeatures: {
- fluxResourceForEnvironment: true,
- },
- },
- apolloProvider: createApolloProvider(agent),
- });
-
- await expandCollapsedSection();
-
- expect(queryWithFluxResourceResponseHandler).toHaveBeenCalledWith({
- environmentName: resolvedEnvironment.name,
- projectFullPath: projectPath,
- });
- });
-
it('should render if the environment has an agent associated', async () => {
wrapper = createWrapper({
propsData: { environment: resolvedEnvironment },
@@ -588,14 +554,9 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
});
- it('should render with the namespace if `fluxResourceForEnvironment` feature flag is enabled and the environment has an agent associated', async () => {
+ it('should render with the namespace if the environment has an agent associated', async () => {
wrapper = createWrapper({
propsData: { environment: resolvedEnvironment },
- provideData: {
- glFeatures: {
- fluxResourceForEnvironment: true,
- },
- },
apolloProvider: createApolloProvider(agent),
});
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
index 6156addd63f..b503a6f829e 100644
--- a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -1,7 +1,6 @@
-import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
@@ -13,87 +12,78 @@ describe('New Environments Dropdown', () => {
let wrapper;
let axiosMock;
- beforeEach(() => {
+ const createWrapper = (axiosResult = []) => {
axiosMock = new MockAdapter(axios);
- wrapper = shallowMount(NewEnvironmentsDropdown, {
+ axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, axiosResult);
+
+ wrapper = shallowMountExtended(NewEnvironmentsDropdown, {
provide: { environmentsEndpoint: TEST_HOST },
+ stubs: {
+ GlCollapsibleListbox,
+ },
});
- });
+ };
+
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findCreateEnvironmentButton = () => wrapper.findByTestId('add-environment-button');
afterEach(() => {
axiosMock.restore();
});
describe('before results', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
it('should show a loading icon', () => {
- axiosMock.onGet(TEST_HOST).reply(() => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- });
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
- return axios.waitForAll();
+ expect(findListbox().props('searching')).toBe(true);
});
it('should not show any dropdown items', () => {
- axiosMock.onGet(TEST_HOST).reply(() => {
- expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(0);
- });
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
- return axios.waitForAll();
+ expect(findListbox().props('items')).toEqual([]);
});
});
describe('with empty results', () => {
- let item;
beforeEach(async () => {
- axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, []);
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
+ createWrapper();
+ findListbox().vm.$emit('search', TEST_SEARCH);
await axios.waitForAll();
- await nextTick();
- item = wrapper.findComponent(GlDropdownItem);
});
it('should display a Create item label', () => {
- expect(item.text()).toBe('Create production');
- });
-
- it('should display that no matching items are found', () => {
- expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(true);
+ expect(findCreateEnvironmentButton().text()).toBe(`Create ${TEST_SEARCH}`);
});
it('should emit a new scope when selected', () => {
- item.vm.$emit('click');
+ findCreateEnvironmentButton().vm.$emit('click');
expect(wrapper.emitted('add')).toEqual([[TEST_SEARCH]]);
});
});
describe('with results', () => {
- let items;
- beforeEach(() => {
- axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, ['prod', 'production']);
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
- wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'prod');
- return axios.waitForAll().then(() => {
- items = wrapper.findAllComponents(GlDropdownItem);
- });
+ beforeEach(async () => {
+ createWrapper(['prod', 'production']);
+ findListbox().vm.$emit('search', TEST_SEARCH);
+ await axios.waitForAll();
});
- it('should display one item per result', () => {
- expect(items).toHaveLength(2);
+ it('should populate results properly', () => {
+ expect(findListbox().props().items).toHaveLength(2);
});
- it('should emit an add if an item is clicked', () => {
- items.at(0).vm.$emit('click');
+ it('should emit an add on selection', () => {
+ findListbox().vm.$emit('select', ['prod']);
expect(wrapper.emitted('add')).toEqual([['prod']]);
});
- it('should not display a create label', () => {
- items = items.filter((i) => i.text().startsWith('Create'));
- expect(items).toHaveLength(0);
- });
-
it('should not display a message about no results', () => {
expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(false);
});
+
+ it('should not display a footer with the create button', () => {
+ expect(findCreateEnvironmentButton().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/feature_flags/components/strategy_spec.js b/spec/frontend/feature_flags/components/strategy_spec.js
index ca6e338ac6c..90021829212 100644
--- a/spec/frontend/feature_flags/components/strategy_spec.js
+++ b/spec/frontend/feature_flags/components/strategy_spec.js
@@ -1,11 +1,14 @@
import { GlAlert, GlFormSelect, GlLink, GlToken, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import { last } from 'lodash';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
+import axios from '~/lib/utils/axios_utils';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import Strategy from '~/feature_flags/components/strategy.vue';
import StrategyParameters from '~/feature_flags/components/strategy_parameters.vue';
import {
@@ -22,16 +25,18 @@ import { userList } from '../mock_data';
jest.mock('~/api');
+const TEST_HOST = '/test';
const provide = {
strategyTypeDocsPagePath: 'link-to-strategy-docs',
environmentsScopeDocsPath: 'link-scope-docs',
- environmentsEndpoint: '',
+ environmentsEndpoint: TEST_HOST,
};
Vue.use(Vuex);
describe('Feature flags strategy', () => {
let wrapper;
+ let axiosMock;
const findStrategyParameters = () => wrapper.findComponent(StrategyParameters);
const findDocsLinks = () => wrapper.findAllComponents(GlLink);
@@ -45,6 +50,8 @@ describe('Feature flags strategy', () => {
provide,
},
) => {
+ axiosMock = new MockAdapter(axios);
+ axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, []);
wrapper = mount(Strategy, { store: createStore({ projectId: '1' }), ...opts });
};
@@ -52,6 +59,10 @@ describe('Feature flags strategy', () => {
Api.searchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
});
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
describe('helper links', () => {
const propsData = { strategy: {}, index: 0, userLists: [userList] };
factory({ propsData, provide });
diff --git a/spec/frontend/filtered_search/filtered_search_manager_spec.js b/spec/frontend/filtered_search/filtered_search_manager_spec.js
index 8c16ff100eb..c55099d89d9 100644
--- a/spec/frontend/filtered_search/filtered_search_manager_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_manager_spec.js
@@ -24,7 +24,7 @@ describe('Filtered Search Manager', () => {
let manager;
let tokensContainer;
const page = 'issues';
- const placeholder = 'Search or filter results...';
+ const placeholder = 'Search or filter results…';
function dispatchBackspaceEvent(element, eventType) {
const event = new Event(eventType);
diff --git a/spec/frontend/fixtures/abuse_reports.rb b/spec/frontend/fixtures/abuse_reports.rb
deleted file mode 100644
index ad0fb9be8dc..00000000000
--- a/spec/frontend/fixtures/abuse_reports.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Admin::AbuseReportsController, '(JavaScript fixtures)', type: :controller do
- include JavaScriptFixturesHelpers
- include AdminModeHelper
-
- let(:admin) { create(:admin) }
- let!(:abuse_report) { create(:abuse_report) }
- let!(:abuse_report_with_short_message) { create(:abuse_report, message: 'SHORT MESSAGE') }
- let!(:abuse_report_with_long_message) { create(:abuse_report, message: "LONG MESSAGE\n" * 50) }
-
- render_views
-
- before do
- stub_feature_flags(abuse_reports_list: false)
-
- sign_in(admin)
- enable_admin_mode!(admin)
- end
-
- it 'abuse_reports/abuse_reports_list.html' do
- get :index
-
- expect(response).to be_successful
- end
-end
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index 73594ddf686..9e6fcea2d17 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', :with_license, type: :controller do
include JavaScriptFixturesHelpers
- let(:user) { create(:user, feed_token: 'feedtoken:coldfeed') }
+ let(:user) { create(:user, :no_super_sidebar, feed_token: 'feedtoken:coldfeed') }
let(:namespace) { create(:namespace, name: 'frontend-fixtures') }
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'issues-project') }
diff --git a/spec/frontend/fixtures/jobs.rb b/spec/frontend/fixtures/jobs.rb
index 6c0b87c5a68..1502999ac9c 100644
--- a/spec/frontend/fixtures/jobs.rb
+++ b/spec/frontend/fixtures/jobs.rb
@@ -89,28 +89,28 @@ RSpec.describe 'Jobs (JavaScript fixtures)' do
end
end
- it_behaves_like 'graphql queries', 'jobs/components/table/graphql/queries', 'get_jobs.query.graphql' do
+ it_behaves_like 'graphql queries', 'ci/jobs_page/graphql/queries', 'get_jobs.query.graphql' do
let(:variables) { { fullPath: 'frontend-fixtures/builds-project' } }
let(:success_path) { %w[project jobs] }
end
- it_behaves_like 'graphql queries', 'jobs/components/table/graphql/queries', 'get_jobs_count.query.graphql', true do
+ it_behaves_like 'graphql queries', 'ci/jobs_page/graphql/queries', 'get_jobs_count.query.graphql', true do
let(:variables) { { fullPath: 'frontend-fixtures/builds-project' } }
let(:success_path) { %w[project jobs] }
end
- it_behaves_like 'graphql queries', 'pages/admin/jobs/components/table/graphql/queries', 'get_all_jobs.query.graphql' do
+ it_behaves_like 'graphql queries', 'ci/admin/jobs_table/graphql/queries', 'get_all_jobs.query.graphql' do
let(:user) { create(:admin) }
let(:success_path) { 'jobs' }
end
- it_behaves_like 'graphql queries', 'pages/admin/jobs/components/table/graphql/queries', 'get_cancelable_jobs_count.query.graphql', true do
+ it_behaves_like 'graphql queries', 'ci/admin/jobs_table/graphql/queries', 'get_cancelable_jobs_count.query.graphql', true do
let(:variables) { { statuses: %w[PENDING RUNNING] } }
let(:user) { create(:admin) }
let(:success_path) { %w[cancelable count] }
end
- it_behaves_like 'graphql queries', 'pages/admin/jobs/components/table/graphql/queries', 'get_all_jobs_count.query.graphql', true do
+ it_behaves_like 'graphql queries', 'ci/admin/jobs_table/graphql/queries', 'get_all_jobs_count.query.graphql', true do
let(:user) { create(:admin) }
let(:success_path) { 'jobs' }
end
diff --git a/spec/frontend/fixtures/pipeline_header.rb b/spec/frontend/fixtures/pipeline_header.rb
index 3fdc45b1194..744df18a403 100644
--- a/spec/frontend/fixtures/pipeline_header.rb
+++ b/spec/frontend/fixtures/pipeline_header.rb
@@ -12,7 +12,7 @@ RSpec.describe "GraphQL Pipeline Header", '(JavaScript fixtures)', type: :reques
let_it_be(:user) { project.first_owner }
let_it_be(:commit) { create(:commit, project: project) }
- let(:query_path) { 'pipelines/graphql/queries/get_pipeline_header_data.query.graphql' }
+ let(:query_path) { 'ci/pipeline_details/header/graphql/queries/get_pipeline_header_data.query.graphql' }
context 'with successful pipeline' do
let_it_be(:pipeline) do
diff --git a/spec/frontend/fixtures/pipeline_schedules.rb b/spec/frontend/fixtures/pipeline_schedules.rb
index 7bba7910b87..4c95e7ecd20 100644
--- a/spec/frontend/fixtures/pipeline_schedules.rb
+++ b/spec/frontend/fixtures/pipeline_schedules.rb
@@ -16,35 +16,6 @@ RSpec.describe 'Pipeline schedules (JavaScript fixtures)' do
let!(:pipeline_schedule_variable1) { create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule_populated) }
let!(:pipeline_schedule_variable2) { create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule_populated) }
- describe Projects::PipelineSchedulesController, type: :controller do
- render_views
-
- before do
- sign_in(user)
- stub_feature_flags(pipeline_schedules_vue: false)
- end
-
- it 'pipeline_schedules/edit.html' do
- get :edit, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: pipeline_schedule.id
- }
-
- expect(response).to be_successful
- end
-
- it 'pipeline_schedules/edit_with_variables.html' do
- get :edit, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: pipeline_schedule_populated.id
- }
-
- expect(response).to be_successful
- end
- end
-
describe GraphQL::Query, type: :request do
before do
pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
diff --git a/spec/frontend/fixtures/pipelines.rb b/spec/frontend/fixtures/pipelines.rb
index 24a6f6f7de6..151d4a763c0 100644
--- a/spec/frontend/fixtures/pipelines.rb
+++ b/spec/frontend/fixtures/pipelines.rb
@@ -71,7 +71,7 @@ RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :co
end
let_it_be(:query) do
- get_graphql_query_as_string("pipelines/graphql/queries/#{get_pipeline_actions_query}")
+ get_graphql_query_as_string("ci/pipelines_page/graphql/queries/#{get_pipeline_actions_query}")
end
it "#{fixtures_path}#{get_pipeline_actions_query}.json" do
diff --git a/spec/frontend/fixtures/snippet.rb b/spec/frontend/fixtures/snippet.rb
index 0510746a944..23df89a244c 100644
--- a/spec/frontend/fixtures/snippet.rb
+++ b/spec/frontend/fixtures/snippet.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe SnippetsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
- let(:namespace) { create(:namespace, name: 'frontend-fixtures') }
+ let(:user) { create(:user, :no_super_sidebar) }
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures', owner: user) }
let(:project) { create(:project, :repository, namespace: namespace, path: 'branches-project') }
- let(:user) { project.first_owner }
let(:snippet) { create(:personal_snippet, :public, title: 'snippet.md', content: '# snippet', file_name: 'snippet.md', author: user) }
render_views
diff --git a/spec/frontend/groups/components/empty_states/groups_dashboard_empty_state_spec.js b/spec/frontend/groups/components/empty_states/groups_dashboard_empty_state_spec.js
new file mode 100644
index 00000000000..d2afbad802c
--- /dev/null
+++ b/spec/frontend/groups/components/empty_states/groups_dashboard_empty_state_spec.js
@@ -0,0 +1,29 @@
+import { GlEmptyState } from '@gitlab/ui';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GroupsDashboardEmptyState from '~/groups/components/empty_states/groups_dashboard_empty_state.vue';
+
+let wrapper;
+
+const defaultProvide = {
+ groupsEmptyStateIllustration: '/assets/illustrations/empty-state/empty-groups-md.svg',
+};
+
+const createComponent = () => {
+ wrapper = shallowMountExtended(GroupsDashboardEmptyState, {
+ provide: defaultProvide,
+ });
+};
+
+describe('GroupsDashboardEmptyState', () => {
+ it('renders empty state', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ title: 'A group is a collection of several projects',
+ description:
+ "If you organize your projects under a group, it works like a folder. You can manage your group member's permissions and access to each project in the group.",
+ svgPath: defaultProvide.groupsEmptyStateIllustration,
+ });
+ });
+});
diff --git a/spec/frontend/groups/components/empty_states/groups_explore_empty_state_spec.js b/spec/frontend/groups/components/empty_states/groups_explore_empty_state_spec.js
new file mode 100644
index 00000000000..f4c425902f5
--- /dev/null
+++ b/spec/frontend/groups/components/empty_states/groups_explore_empty_state_spec.js
@@ -0,0 +1,27 @@
+import { GlEmptyState } from '@gitlab/ui';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GroupsExploreEmptyState from '~/groups/components/empty_states/groups_explore_empty_state.vue';
+
+let wrapper;
+
+const defaultProvide = {
+ groupsEmptyStateIllustration: '/assets/illustrations/empty-state/empty-groups-md.svg',
+};
+
+const createComponent = () => {
+ wrapper = shallowMountExtended(GroupsExploreEmptyState, {
+ provide: defaultProvide,
+ });
+};
+
+describe('GroupsExploreEmptyState', () => {
+ it('renders empty state', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ title: 'No public groups',
+ svgPath: defaultProvide.groupsEmptyStateIllustration,
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/file_templates/dropdown_spec.js b/spec/frontend/ide/components/file_templates/dropdown_spec.js
deleted file mode 100644
index 9ccdaf8b916..00000000000
--- a/spec/frontend/ide/components/file_templates/dropdown_spec.js
+++ /dev/null
@@ -1,168 +0,0 @@
-import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import $ from 'jquery';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import Dropdown from '~/ide/components/file_templates/dropdown.vue';
-
-Vue.use(Vuex);
-
-describe('IDE file templates dropdown component', () => {
- let wrapper;
- let element;
- let fetchTemplateTypesMock;
-
- const defaultProps = {
- label: 'label',
- };
-
- const findItemButtons = () => wrapper.findAll('button');
- const findSearch = () => wrapper.find('input[type="search"]');
- const triggerDropdown = () => $(element).trigger('show.bs.dropdown');
-
- const createComponent = ({ props, state } = {}) => {
- fetchTemplateTypesMock = jest.fn();
- const fakeStore = new Vuex.Store({
- modules: {
- fileTemplates: {
- namespaced: true,
- state: {
- templates: [],
- isLoading: false,
- ...state,
- },
- actions: {
- fetchTemplateTypes: fetchTemplateTypesMock,
- },
- },
- },
- });
-
- wrapper = shallowMount(Dropdown, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- store: fakeStore,
- });
-
- ({ element } = wrapper);
- };
-
- it('calls clickItem on click', async () => {
- const itemData = { name: 'test.yml ' };
- createComponent({ props: { data: [itemData] } });
- const item = findItemButtons().at(0);
- item.trigger('click');
-
- await nextTick();
- expect(wrapper.emitted().click[0][0]).toBe(itemData);
- });
-
- it('renders dropdown title', () => {
- const title = 'Test title';
- createComponent({ props: { title } });
-
- expect(wrapper.find('.dropdown-title').text()).toContain(title);
- });
-
- describe('in async mode', () => {
- const defaultAsyncProps = { ...defaultProps, isAsyncData: true };
-
- it('calls `fetchTemplateTypes` on dropdown event', () => {
- createComponent({ props: defaultAsyncProps });
-
- triggerDropdown();
-
- expect(fetchTemplateTypesMock).toHaveBeenCalled();
- });
-
- it('does not call `fetchTemplateTypes` on dropdown event if destroyed', () => {
- createComponent({ props: defaultAsyncProps });
- wrapper.destroy();
-
- triggerDropdown();
-
- expect(fetchTemplateTypesMock).not.toHaveBeenCalled();
- });
-
- it('shows loader when isLoading is true', () => {
- createComponent({ props: defaultAsyncProps, state: { isLoading: true } });
-
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- });
-
- it('renders templates', () => {
- const templates = [{ name: 'file-1' }, { name: 'file-2' }];
- createComponent({
- props: { ...defaultAsyncProps, data: [{ name: 'should-never-appear ' }] },
- state: {
- templates,
- },
- });
- const items = findItemButtons();
-
- expect(items.wrappers.map((x) => x.text())).toEqual(templates.map((x) => x.name));
- });
-
- it('searches template data', async () => {
- const templates = [{ name: 'match 1' }, { name: 'other' }, { name: 'match 2' }];
- const matches = ['match 1', 'match 2'];
- createComponent({
- props: { ...defaultAsyncProps, data: matches, searchable: true },
- state: { templates },
- });
- findSearch().setValue('match');
- await nextTick();
- const items = findItemButtons();
-
- expect(items.length).toBe(matches.length);
- expect(items.wrappers.map((x) => x.text())).toEqual(matches);
- });
-
- it('does not render input when `searchable` is true & `showLoading` is true', () => {
- createComponent({
- props: { ...defaultAsyncProps, searchable: true },
- state: { isLoading: true },
- });
-
- expect(findSearch().exists()).toBe(false);
- });
- });
-
- describe('in sync mode', () => {
- it('renders props data', () => {
- const data = [{ name: 'file-1' }, { name: 'file-2' }];
- createComponent({
- props: { data },
- state: {
- templates: [{ name: 'should-never-appear ' }],
- },
- });
-
- const items = findItemButtons();
-
- expect(items.length).toBe(data.length);
- expect(items.wrappers.map((x) => x.text())).toEqual(data.map((x) => x.name));
- });
-
- it('renders input when `searchable` is true', () => {
- createComponent({ props: { searchable: true } });
-
- expect(findSearch().exists()).toBe(true);
- });
-
- it('searches data', async () => {
- const data = [{ name: 'match 1' }, { name: 'other' }, { name: 'match 2' }];
- const matches = ['match 1', 'match 2'];
- createComponent({ props: { searchable: true, data } });
- findSearch().setValue('match');
- await nextTick();
- const items = findItemButtons();
-
- expect(items.length).toBe(matches.length);
- expect(items.wrappers.map((x) => x.text())).toEqual(matches);
- });
- });
-});
diff --git a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
index 069b6927bac..f7b690fb3a4 100644
--- a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
@@ -4,10 +4,8 @@ exports[`IDE pipelines list when loaded renders empty state when no latestPipeli
<div
class="ide-pipeline"
>
- <!---->
-
<div
- class="gl-h-full gl-display-flex gl-flex-direction-column gl-justify-content-center"
+ class="gl-display-flex gl-flex-direction-column gl-h-full gl-justify-content-center"
>
<empty-state-stub />
</div>
diff --git a/spec/frontend/ide/init_gitlab_web_ide_spec.js b/spec/frontend/ide/init_gitlab_web_ide_spec.js
index f8af8459025..efbbd6c7514 100644
--- a/spec/frontend/ide/init_gitlab_web_ide_spec.js
+++ b/spec/frontend/ide/init_gitlab_web_ide_spec.js
@@ -18,6 +18,7 @@ jest.mock('~/lib/utils/csrf', () => ({
const ROOT_ELEMENT_ID = 'ide';
const TEST_NONCE = 'test123nonce';
+const TEST_USERNAME = 'lipsum';
const TEST_PROJECT_PATH = 'group1/project1';
const TEST_BRANCH_NAME = '12345-foo-patch';
const TEST_USER_PREFERENCES_PATH = '/user/preferences';
@@ -69,6 +70,7 @@ describe('ide/init_gitlab_web_ide', () => {
};
beforeEach(() => {
+ gon.current_username = TEST_USERNAME;
process.env.GITLAB_WEB_IDE_PUBLIC_PATH = TEST_GITLAB_WEB_IDE_PUBLIC_PATH;
confirmAction.mockImplementation(
@@ -100,6 +102,7 @@ describe('ide/init_gitlab_web_ide', () => {
mrId: TEST_MR_ID,
mrTargetProject: '',
forkInfo: null,
+ username: gon.current_username,
gitlabUrl: TEST_HOST,
nonce: TEST_NONCE,
httpHeaders: {
diff --git a/spec/frontend/ide/lib/gitlab_web_ide/setup_root_element_spec.js b/spec/frontend/ide/lib/gitlab_web_ide/setup_root_element_spec.js
index 35cf41b31f5..011f2564cec 100644
--- a/spec/frontend/ide/lib/gitlab_web_ide/setup_root_element_spec.js
+++ b/spec/frontend/ide/lib/gitlab_web_ide/setup_root_element_spec.js
@@ -24,8 +24,8 @@ describe('~/ide/lib/gitlab_web_ide/setup_root_element', () => {
expect(result).toBe(findIDERoot());
expect(result).toMatchInlineSnapshot(`
<div
- class="gl--flex-center gl-relative gl-h-full"
- id="ide-test-root"
+ class="gl--flex-center gl-h-full gl-relative"
+ id="reference-0"
/>
`);
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index dae5671777c..03d0920994c 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -317,7 +317,7 @@ describe('import table', () => {
});
it('updates page size when selected in Dropdown', async () => {
- const otherOption = findPaginationDropdown().findAll('li p').at(1);
+ const otherOption = findPaginationDropdown().findAll('.gl-new-dropdown-item-content').at(1);
expect(otherOption.text()).toMatchInterpolatedText('50 items per page');
bulkImportSourceGroupsQueryMock.mockResolvedValue({
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index a0710ddb06c..470d63e7c2a 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -46,7 +46,7 @@ describe('Incidents List', () => {
const findLoader = () => wrapper.findComponent(GlLoadingIcon);
const findTimeAgo = () => wrapper.findAllComponents(TimeAgoTooltip);
const findAssignees = () => wrapper.findAll('[data-testid="incident-assignees"]');
- const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
+ const findCreateIncidentBtn = () => wrapper.find('[data-testid="create-incident-button"]');
const findClosedIcon = () => wrapper.findAll("[data-testid='incident-closed']");
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findSeverity = () => wrapper.findAllComponents(SeverityToken);
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index b5f8f0023f9..f8a7c47e634 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -2,14 +2,11 @@
exports[`Alert integration settings form should match the default snapshot 1`] = `
<div>
- <!---->
-
<p>
<gl-sprintf-stub
message="Create a GitLab incident for each PagerDuty incident by %{linkStart}configuring a webhook in PagerDuty%{linkEnd}"
/>
</p>
-
<form>
<gl-form-group-stub
class="col-8 col-md-9 gl-p-0"
@@ -17,13 +14,12 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
optionaltext="(optional)"
>
<gl-toggle-stub
- id="active"
+ id="reference-0"
label="Active"
labelposition="top"
value="true"
/>
</gl-form-group-stub>
-
<gl-form-group-stub
class="col-8 col-md-9 gl-p-0"
label="Webhook URL"
@@ -33,7 +29,7 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
>
<gl-form-input-group-stub
data-testid="webhook-url"
- id="url"
+ id="reference-1"
inputclass=""
predefinedoptions="[object Object]"
readonly=""
@@ -48,7 +44,6 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
variant="default"
/>
</gl-form-input-group-stub>
-
<gl-button-stub
buttontextclasses=""
category="primary"
@@ -60,11 +55,8 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
tabindex="0"
variant="default"
>
-
Reset webhook URL
-
</gl-button-stub>
-
<gl-modal-stub
actioncancel="[object Object]"
actionprimary="[object Object]"
@@ -76,9 +68,7 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
title="Reset webhook URL"
titletag="h4"
>
-
Resetting the webhook URL for this project will require updating this integration's settings in PagerDuty.
-
</gl-modal-stub>
</gl-form-group-stub>
</form>
diff --git a/spec/frontend/integrations/index/mock_data.js b/spec/frontend/integrations/index/mock_data.js
index c07b320c0d3..65c1e5643e9 100644
--- a/spec/frontend/integrations/index/mock_data.js
+++ b/spec/frontend/integrations/index/mock_data.js
@@ -1,6 +1,7 @@
export const mockActiveIntegrations = [
{
active: true,
+ configured: true,
title: 'Asana',
description: 'Asana - Teamwork without email',
updated_at: '2021-03-18T00:27:09.634Z',
@@ -10,6 +11,7 @@ export const mockActiveIntegrations = [
},
{
active: true,
+ configured: true,
title: 'Jira',
description: 'Jira issue tracker',
updated_at: '2021-01-29T06:41:25.806Z',
@@ -22,6 +24,7 @@ export const mockActiveIntegrations = [
export const mockInactiveIntegrations = [
{
active: false,
+ configured: false,
title: 'Webex Teams',
description: 'Receive event notifications in Webex Teams',
updated_at: null,
@@ -31,6 +34,7 @@ export const mockInactiveIntegrations = [
},
{
active: false,
+ configured: false,
title: 'YouTrack',
description: 'YouTrack issue tracker',
updated_at: null,
@@ -40,6 +44,7 @@ export const mockInactiveIntegrations = [
},
{
active: false,
+ configured: false,
title: 'Atlassian Bamboo CI',
description: 'A continuous integration and build server',
updated_at: null,
@@ -49,6 +54,7 @@ export const mockInactiveIntegrations = [
},
{
active: false,
+ configured: false,
title: 'Prometheus',
description: 'A monitoring tool for Kubernetes',
updated_at: null,
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 1a9b0fae52a..526487f6460 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlModal, GlSprintf, GlFormGroup, GlCollapse, GlIcon } from '@gitlab/ui';
+import { GlModal, GlSprintf, GlFormGroup, GlCollapse, GlIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { stubComponent } from 'helpers/stub_component';
@@ -12,7 +12,6 @@ import ModalConfetti from '~/invite_members/components/confetti.vue';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
import UserLimitNotification from '~/invite_members/components/user_limit_notification.vue';
import {
- INVITE_MEMBERS_FOR_TASK,
MEMBERS_MODAL_CELEBRATE_INTRO,
MEMBERS_MODAL_CELEBRATE_TITLE,
MEMBERS_PLACEHOLDER,
@@ -31,7 +30,6 @@ import {
HTTP_STATUS_CREATED,
HTTP_STATUS_INTERNAL_SERVER_ERROR,
} from '~/lib/utils/http_status';
-import { getParameterValues } from '~/lib/utils/url_utility';
import {
displaySuccessfulInvitationAlert,
reloadOnInvitationSuccess,
@@ -54,10 +52,6 @@ import {
jest.mock('~/invite_members/utils/trigger_successful_invite_alert');
jest.mock('~/experimentation/experiment_tracking');
-jest.mock('~/lib/utils/url_utility', () => ({
- ...jest.requireActual('~/lib/utils/url_utility'),
- getParameterValues: jest.fn(() => []),
-}));
describe('InviteMembersModal', () => {
let wrapper;
@@ -129,7 +123,6 @@ describe('InviteMembersModal', () => {
});
const findModal = () => wrapper.findComponent(GlModal);
- const findBase = () => wrapper.findComponent(InviteModalBase);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findEmptyInvitesAlert = () => wrapper.findByTestId('empty-invites-alert');
const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
@@ -155,10 +148,6 @@ describe('InviteMembersModal', () => {
findMembersFormGroup().attributes('invalid-feedback');
const membersFormGroupDescription = () => findMembersFormGroup().attributes('description');
const findMembersSelect = () => wrapper.findComponent(MembersTokenSelect);
- const findTasksToBeDone = () => wrapper.findByTestId('invite-members-modal-tasks-to-be-done');
- const findTasks = () => wrapper.findByTestId('invite-members-modal-tasks');
- const findProjectSelect = () => wrapper.findByTestId('invite-members-modal-project-select');
- const findNoProjectsAlert = () => wrapper.findByTestId('invite-members-modal-no-projects-alert');
const findCelebrationEmoji = () => wrapper.findComponent(GlEmoji);
const triggerOpenModal = async ({ mode = 'default', source } = {}) => {
eventHub.$emit('openModal', { mode, source });
@@ -168,131 +157,11 @@ describe('InviteMembersModal', () => {
findMembersSelect().vm.$emit('input', val);
await nextTick();
};
- const triggerTasks = async (val) => {
- findTasks().vm.$emit('input', val);
- await nextTick();
- };
- const triggerAccessLevel = async (val) => {
- findBase().vm.$emit('access-level', val);
- await nextTick();
- };
const removeMembersToken = async (val) => {
findMembersSelect().vm.$emit('token-remove', val);
await nextTick();
};
- describe('rendering the tasks to be done', () => {
- const setupComponent = async (props = {}, urlParameter = ['invite_members_for_task']) => {
- getParameterValues.mockImplementation(() => urlParameter);
- createComponent(props);
-
- await triggerAccessLevel(30);
- };
-
- const setupComponentWithTasks = async (...args) => {
- await setupComponent(...args);
- await triggerTasks(['ci', 'code']);
- };
-
- afterAll(() => {
- getParameterValues.mockImplementation(() => []);
- });
-
- it('renders the tasks to be done', async () => {
- await setupComponent();
-
- expect(findTasksToBeDone().exists()).toBe(true);
- });
-
- describe('when the selected access level is lower than 30', () => {
- it('does not render the tasks to be done', async () => {
- await setupComponent();
- await triggerAccessLevel(20);
-
- expect(findTasksToBeDone().exists()).toBe(false);
- });
- });
-
- describe('when the url does not contain the parameter `open_modal=invite_members_for_task`', () => {
- it('does not render the tasks to be done', async () => {
- await setupComponent({}, []);
-
- expect(findTasksToBeDone().exists()).toBe(false);
- });
- });
-
- describe('rendering the tasks', () => {
- it('renders the tasks', async () => {
- await setupComponent();
-
- expect(findTasks().exists()).toBe(true);
- });
-
- it('does not render an alert', async () => {
- await setupComponent();
-
- expect(findNoProjectsAlert().exists()).toBe(false);
- });
-
- describe('when there are no projects passed in the data', () => {
- it('does not render the tasks', async () => {
- await setupComponent({ projects: [] });
-
- expect(findTasks().exists()).toBe(false);
- });
-
- it('renders an alert with a link to the new projects path', async () => {
- await setupComponent({ projects: [] });
-
- expect(findNoProjectsAlert().exists()).toBe(true);
- expect(findNoProjectsAlert().findComponent(GlLink).attributes('href')).toBe(
- newProjectPath,
- );
- });
- });
- });
-
- describe('rendering the project dropdown', () => {
- it('renders the project select', async () => {
- await setupComponentWithTasks();
-
- expect(findProjectSelect().exists()).toBe(true);
- });
-
- describe('when the modal is shown for a project', () => {
- it('does not render the project select', async () => {
- await setupComponentWithTasks({ isProject: true });
-
- expect(findProjectSelect().exists()).toBe(false);
- });
- });
-
- describe('when no tasks are selected', () => {
- it('does not render the project select', async () => {
- await setupComponent();
-
- expect(findProjectSelect().exists()).toBe(false);
- });
- });
- });
-
- describe('tracking events', () => {
- it('tracks the submit for invite_members_for_task', async () => {
- await setupComponentWithTasks();
-
- await triggerMembersTokenSelect([user1]);
-
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
-
- clickInviteButton();
-
- expectTracking(INVITE_MEMBERS_FOR_TASK.submit, 'selected_tasks_to_be_done', 'ci,code');
-
- unmockTracking();
- });
- });
- });
-
describe('rendering with tracking considerations', () => {
describe('when inviting to a project', () => {
describe('when inviting members', () => {
@@ -624,6 +493,18 @@ describe('InviteMembersModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('');
expect(findMembersSelect().props('exceptionState')).not.toBe(false);
});
+
+ it('displays invite limit error message', async () => {
+ mockInvitationsApi(HTTP_STATUS_CREATED, invitationsApiResponse.INVITE_LIMIT);
+
+ clickInviteButton();
+
+ await waitForPromises();
+
+ expect(membersFormGroupInvalidFeedback()).toBe(
+ invitationsApiResponse.INVITE_LIMIT.message,
+ );
+ });
});
});
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
index e3e2426fcfc..4f773009f37 100644
--- a/spec/frontend/invite_members/mock_data/api_responses.js
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -47,6 +47,11 @@ const EMAIL_TAKEN = {
status: 'error',
};
+const INVITE_LIMIT = {
+ message: 'Invite limit of 5 per day exceeded.',
+ status: 'error',
+};
+
export const GROUPS_INVITATIONS_PATH = '/api/v4/groups/1/invitations';
export const invitationsApiResponse = {
@@ -56,6 +61,7 @@ export const invitationsApiResponse = {
MULTIPLE_RESTRICTED,
EMAIL_TAKEN,
EXPANDED_RESTRICTED,
+ INVITE_LIMIT,
};
export const IMPORT_PROJECT_MEMBERS_PATH = '/api/v4/projects/1/import_project_members/2';
diff --git a/spec/frontend/invite_members/mock_data/member_modal.js b/spec/frontend/invite_members/mock_data/member_modal.js
index 67fb1dcbfbd..8cde13bf69c 100644
--- a/spec/frontend/invite_members/mock_data/member_modal.js
+++ b/spec/frontend/invite_members/mock_data/member_modal.js
@@ -6,14 +6,6 @@ export const propsData = {
accessLevels: { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 },
defaultAccessLevel: 30,
helpLink: 'https://example.com',
- tasksToBeDoneOptions: [
- { text: 'First task', value: 'first' },
- { text: 'Second task', value: 'second' },
- ],
- projects: [
- { text: 'First project', value: '1' },
- { text: 'Second project', value: '2' },
- ],
};
export const inviteSource = 'unknown';
@@ -51,8 +43,6 @@ export const postData = {
expires_at: undefined,
invite_source: inviteSource,
format: 'json',
- tasks_to_be_done: [],
- tasks_project_id: '',
};
export const emailPostData = {
@@ -60,8 +50,6 @@ export const emailPostData = {
expires_at: undefined,
email: `${user3.name}`,
invite_source: inviteSource,
- tasks_to_be_done: [],
- tasks_project_id: '',
format: 'json',
};
@@ -71,8 +59,6 @@ export const singleUserPostData = {
user_id: `${user1.id}`,
email: `${user3.name}`,
invite_source: inviteSource,
- tasks_to_be_done: [],
- tasks_project_id: '',
format: 'json',
};
diff --git a/spec/frontend/invite_members/utils/member_utils_spec.js b/spec/frontend/invite_members/utils/member_utils_spec.js
index b6fc70038bb..abae43c3dbb 100644
--- a/spec/frontend/invite_members/utils/member_utils_spec.js
+++ b/spec/frontend/invite_members/utils/member_utils_spec.js
@@ -1,10 +1,4 @@
-import {
- memberName,
- triggerExternalAlert,
- qualifiesForTasksToBeDone,
-} from '~/invite_members/utils/member_utils';
-import setWindowLocation from 'helpers/set_window_location_helper';
-import { getParameterValues } from '~/lib/utils/url_utility';
+import { memberName, triggerExternalAlert } from '~/invite_members/utils/member_utils';
jest.mock('~/lib/utils/url_utility');
@@ -24,17 +18,3 @@ describe('Trigger External Alert', () => {
expect(triggerExternalAlert()).toBe(false);
});
});
-
-describe('Qualifies For Tasks To Be Done', () => {
- it.each([
- ['invite_members_for_task', true],
- ['blah', false],
- ])(`returns name from supplied member token: %j`, (value, result) => {
- setWindowLocation(`blah/blah?open_modal=${value}`);
- getParameterValues.mockImplementation(() => {
- return [value];
- });
-
- expect(qualifiesForTasksToBeDone()).toBe(result);
- });
-});
diff --git a/spec/frontend/issuable/components/csv_export_modal_spec.js b/spec/frontend/issuable/components/csv_export_modal_spec.js
index ccd53e64c4d..118ba9ab378 100644
--- a/spec/frontend/issuable/components/csv_export_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_export_modal_spec.js
@@ -53,7 +53,7 @@ describe('CsvExportModal', () => {
href: 'export/csv/path',
variant: 'confirm',
'data-method': 'post',
- 'data-qa-selector': `export_issues_button`,
+ 'data-testid': 'export-issues-button',
'data-track-action': 'click_button',
'data-track-label': dataTrackLabel,
},
diff --git a/spec/frontend/issuable/components/issuable_header_warnings_spec.js b/spec/frontend/issuable/components/issuable_header_warnings_spec.js
deleted file mode 100644
index 34f36bdf6cb..00000000000
--- a/spec/frontend/issuable/components/issuable_header_warnings_spec.js
+++ /dev/null
@@ -1,105 +0,0 @@
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import mrStore from '~/mr_notes/stores';
-import createIssueStore from '~/notes/stores';
-import IssuableHeaderWarnings from '~/issuable/components/issuable_header_warnings.vue';
-
-const ISSUABLE_TYPE_ISSUE = 'issue';
-const ISSUABLE_TYPE_MR = 'merge_request';
-
-jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
-
-describe('IssuableHeaderWarnings', () => {
- let wrapper;
-
- const findConfidentialIcon = () => wrapper.findByTestId('confidential');
- const findLockedIcon = () => wrapper.findByTestId('locked');
- const findHiddenIcon = () => wrapper.findByTestId('hidden');
-
- const renderTestMessage = (renders) => (renders ? 'renders' : 'does not render');
-
- const createComponent = ({ store, provide }) => {
- wrapper = shallowMountExtended(IssuableHeaderWarnings, {
- mocks: {
- $store: store,
- },
- provide,
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- });
- };
-
- describe.each`
- issuableType
- ${ISSUABLE_TYPE_ISSUE} | ${ISSUABLE_TYPE_MR}
- `(`when issuableType=$issuableType`, ({ issuableType }) => {
- describe.each`
- lockStatus | confidentialStatus | hiddenStatus
- ${true} | ${true} | ${false}
- ${true} | ${false} | ${false}
- ${false} | ${true} | ${false}
- ${false} | ${false} | ${false}
- ${true} | ${true} | ${true}
- ${true} | ${false} | ${true}
- ${false} | ${true} | ${true}
- ${false} | ${false} | ${true}
- `(
- `when locked=$lockStatus, confidential=$confidentialStatus, and hidden=$hiddenStatus`,
- ({ lockStatus, confidentialStatus, hiddenStatus }) => {
- const store = issuableType === ISSUABLE_TYPE_ISSUE ? createIssueStore() : mrStore;
-
- beforeEach(() => {
- // TODO: simplify to single assignment after issue store is mock
- if (store === mrStore) {
- store.getters.getNoteableData = {};
- }
-
- store.getters.getNoteableData.confidential = confidentialStatus;
- store.getters.getNoteableData.discussion_locked = lockStatus;
- store.getters.getNoteableData.targetType = issuableType;
-
- createComponent({ store, provide: { hidden: hiddenStatus } });
- });
-
- it(`${renderTestMessage(lockStatus)} the locked icon`, () => {
- const lockedIcon = findLockedIcon();
-
- expect(lockedIcon.exists()).toBe(lockStatus);
-
- if (lockStatus) {
- expect(lockedIcon.attributes('title')).toBe(
- `This ${issuableType.replace('_', ' ')} is locked. Only project members can comment.`,
- );
- expect(getBinding(lockedIcon.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
-
- it(`${renderTestMessage(confidentialStatus)} the confidential icon`, () => {
- const confidentialEl = findConfidentialIcon();
- expect(confidentialEl.exists()).toBe(confidentialStatus);
-
- if (confidentialStatus && !hiddenStatus) {
- expect(confidentialEl.props()).toMatchObject({
- workspaceType: 'project',
- issuableType: 'issue',
- });
- }
- });
-
- it(`${renderTestMessage(confidentialStatus)} the hidden icon`, () => {
- const hiddenIcon = findHiddenIcon();
-
- expect(hiddenIcon.exists()).toBe(hiddenStatus);
-
- if (hiddenStatus) {
- expect(hiddenIcon.attributes('title')).toBe(
- `This ${issuableType.replace('_', ' ')} is hidden because its author has been banned`,
- );
- expect(getBinding(hiddenIcon.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
- },
- );
- });
-});
diff --git a/spec/frontend/issuable/components/status_badge_spec.js b/spec/frontend/issuable/components/status_badge_spec.js
new file mode 100644
index 00000000000..cdc848626c7
--- /dev/null
+++ b/spec/frontend/issuable/components/status_badge_spec.js
@@ -0,0 +1,43 @@
+import { GlBadge, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import StatusBadge from '~/issuable/components/status_badge.vue';
+
+describe('StatusBadge component', () => {
+ let wrapper;
+
+ const mountComponent = (propsData) => {
+ wrapper = shallowMount(StatusBadge, { propsData });
+ };
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+
+ describe.each`
+ issuableType | badgeText | state | badgeVariant | badgeIcon
+ ${'merge_request'} | ${'Open'} | ${'opened'} | ${'success'} | ${'merge-request-open'}
+ ${'merge_request'} | ${'Closed'} | ${'closed'} | ${'danger'} | ${'merge-request-close'}
+ ${'merge_request'} | ${'Merged'} | ${'merged'} | ${'info'} | ${'merge'}
+ ${'issue'} | ${'Open'} | ${'opened'} | ${'success'} | ${'issues'}
+ ${'issue'} | ${'Closed'} | ${'closed'} | ${'info'} | ${'issue-closed'}
+ ${'epic'} | ${'Open'} | ${'opened'} | ${'success'} | ${'epic'}
+ ${'epic'} | ${'Closed'} | ${'closed'} | ${'info'} | ${'epic-closed'}
+ `(
+ 'when issuableType=$issuableType and state=$state',
+ ({ issuableType, badgeText, state, badgeVariant, badgeIcon }) => {
+ beforeEach(() => {
+ mountComponent({ state, issuableType });
+ });
+
+ it(`renders badge with text '${badgeText}'`, () => {
+ expect(findBadge().text()).toBe(badgeText);
+ });
+
+ it(`sets badge variant as '${badgeVariant}`, () => {
+ expect(findBadge().props('variant')).toBe(badgeVariant);
+ });
+
+ it(`sets badge icon as '${badgeIcon}'`, () => {
+ expect(findBadge().findComponent(GlIcon).props('name')).toBe(badgeIcon);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/issuable/components/status_box_spec.js b/spec/frontend/issuable/components/status_box_spec.js
deleted file mode 100644
index 0d47595c9e6..00000000000
--- a/spec/frontend/issuable/components/status_box_spec.js
+++ /dev/null
@@ -1,50 +0,0 @@
-import { GlBadge, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import StatusBox from '~/issuable/components/status_box.vue';
-
-let wrapper;
-
-function factory(propsData) {
- wrapper = shallowMount(StatusBox, { propsData, stubs: { GlBadge } });
-}
-
-describe('Merge request status box component', () => {
- const findBadge = () => wrapper.findComponent(GlBadge);
-
- describe.each`
- issuableType | badgeText | initialState | badgeClass | badgeVariant | badgeIcon
- ${'merge_request'} | ${'Open'} | ${'opened'} | ${'issuable-status-badge-open'} | ${'success'} | ${'merge-request-open'}
- ${'merge_request'} | ${'Closed'} | ${'closed'} | ${'issuable-status-badge-closed'} | ${'danger'} | ${'merge-request-close'}
- ${'merge_request'} | ${'Merged'} | ${'merged'} | ${'issuable-status-badge-merged'} | ${'info'} | ${'merge'}
- ${'issue'} | ${'Open'} | ${'opened'} | ${'issuable-status-badge-open'} | ${'success'} | ${'issues'}
- ${'issue'} | ${'Closed'} | ${'closed'} | ${'issuable-status-badge-closed'} | ${'info'} | ${'issue-closed'}
- ${'epic'} | ${'Open'} | ${'opened'} | ${'issuable-status-badge-open'} | ${'success'} | ${'epic'}
- ${'epic'} | ${'Closed'} | ${'closed'} | ${'issuable-status-badge-closed'} | ${'info'} | ${'epic-closed'}
- `(
- 'with issuableType set to "$issuableType" and state set to "$initialState"',
- ({ issuableType, badgeText, initialState, badgeClass, badgeVariant, badgeIcon }) => {
- beforeEach(() => {
- factory({
- initialState,
- issuableType,
- });
- });
-
- it(`renders badge with text '${badgeText}'`, () => {
- expect(findBadge().text()).toBe(badgeText);
- });
-
- it(`sets badge css class as '${badgeClass}'`, () => {
- expect(findBadge().classes()).toContain(badgeClass);
- });
-
- it(`sets badge variant as '${badgeVariant}`, () => {
- expect(findBadge().props('variant')).toBe(badgeVariant);
- });
-
- it(`sets badge icon as '${badgeIcon}'`, () => {
- expect(findBadge().findComponent(GlIcon).props('name')).toBe(badgeIcon);
- });
- },
- );
-});
diff --git a/spec/frontend/issuable/popover/components/issue_popover_spec.js b/spec/frontend/issuable/popover/components/issue_popover_spec.js
index 0596433ce9a..2db3a83572c 100644
--- a/spec/frontend/issuable/popover/components/issue_popover_spec.js
+++ b/spec/frontend/issuable/popover/components/issue_popover_spec.js
@@ -8,7 +8,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
import IssueMilestone from '~/issuable/components/issue_milestone.vue';
-import StatusBox from '~/issuable/components/status_box.vue';
+import StatusBadge from '~/issuable/components/status_badge.vue';
import IssuePopover from '~/issuable/popover/components/issue_popover.vue';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
@@ -52,9 +52,9 @@ describe('Issue Popover', () => {
});
it('shows status badge', () => {
- expect(wrapper.findComponent(StatusBox).props()).toEqual({
+ expect(wrapper.findComponent(StatusBadge).props()).toEqual({
issuableType: 'issue',
- initialState: issueQueryResponse.data.project.issue.state,
+ state: issueQueryResponse.data.project.issue.state,
});
});
diff --git a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
index 4686a4fe0c4..f6c9fab76d1 100644
--- a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
+++ b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
@@ -137,7 +137,6 @@ describe('IssuesDashboardApp component', () => {
issuablesLoading: false,
namespace: 'dashboard',
recentSearchesStorageKey: 'issues',
- searchInputPlaceholder: i18n.searchPlaceholder,
showPaginationControls: true,
sortOptions: getSortOptions({
hasBlockedIssuesFeature: defaultProvide.hasBlockedIssuesFeature,
diff --git a/spec/frontend/issues/dashboard/mock_data.js b/spec/frontend/issues/dashboard/mock_data.js
index adcd4268449..1e3abd5a018 100644
--- a/spec/frontend/issues/dashboard/mock_data.js
+++ b/spec/frontend/issues/dashboard/mock_data.js
@@ -19,6 +19,7 @@ export const issuesQueryResponse = {
reference: 'group/project#123456',
state: 'opened',
title: 'Issue title',
+ titleHtml: 'Issue title',
type: 'issue',
updatedAt: '2021-05-22T04:08:01Z',
upvotes: 3,
diff --git a/spec/frontend/issues/list/components/issue_card_time_info_spec.js b/spec/frontend/issues/list/components/issue_card_time_info_spec.js
index e80ffea0591..8286f84b98a 100644
--- a/spec/frontend/issues/list/components/issue_card_time_info_spec.js
+++ b/spec/frontend/issues/list/components/issue_card_time_info_spec.js
@@ -3,13 +3,14 @@ import { shallowMount } from '@vue/test-utils';
import { useFakeDate } from 'helpers/fake_date';
import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
import IssueCardTimeInfo from '~/issues/list/components/issue_card_time_info.vue';
+import { WIDGET_TYPE_MILESTONE, WIDGET_TYPE_START_AND_DUE_DATE } from '~/work_items/constants';
describe('CE IssueCardTimeInfo component', () => {
useFakeDate(2020, 11, 11); // 2020 Dec 11
let wrapper;
- const issue = {
+ const issueObject = {
milestone: {
dueDate: '2020-12-17',
startDate: '2020-12-10',
@@ -20,22 +21,41 @@ describe('CE IssueCardTimeInfo component', () => {
humanTimeEstimate: '1w',
};
+ const workItemObject = {
+ widgets: [
+ {
+ type: WIDGET_TYPE_MILESTONE,
+ milestone: {
+ dueDate: '2020-12-17',
+ startDate: '2020-12-10',
+ title: 'My milestone',
+ webPath: '/milestone/webPath',
+ },
+ },
+ {
+ type: WIDGET_TYPE_START_AND_DUE_DATE,
+ dueDate: '2020-12-12',
+ },
+ ],
+ };
+
const findMilestone = () => wrapper.find('[data-testid="issuable-milestone"]');
const findMilestoneTitle = () => findMilestone().findComponent(GlLink).attributes('title');
const findDueDate = () => wrapper.find('[data-testid="issuable-due-date"]');
const mountComponent = ({
+ issue = issueObject,
state = STATUS_OPEN,
- dueDate = issue.dueDate,
- milestoneDueDate = issue.milestone.dueDate,
- milestoneStartDate = issue.milestone.startDate,
+ dueDate = issueObject.dueDate,
+ milestoneDueDate = issueObject.milestone.dueDate,
+ milestoneStartDate = issueObject.milestone.startDate,
} = {}) =>
shallowMount(IssueCardTimeInfo, {
propsData: {
issue: {
...issue,
milestone: {
- ...issue.milestone,
+ ...issueObject.milestone,
dueDate: milestoneDueDate,
startDate: milestoneStartDate,
},
@@ -45,63 +65,70 @@ describe('CE IssueCardTimeInfo component', () => {
},
});
- describe('milestone', () => {
- it('renders', () => {
- wrapper = mountComponent();
+ describe.each`
+ type | obj
+ ${'issue'} | ${issueObject}
+ ${'work item'} | ${workItemObject}
+ `('with $type object', ({ obj }) => {
+ describe('milestone', () => {
+ it('renders', () => {
+ wrapper = mountComponent({ issue: obj });
- const milestone = findMilestone();
+ const milestone = findMilestone();
- expect(milestone.text()).toBe(issue.milestone.title);
- expect(milestone.findComponent(GlIcon).props('name')).toBe('clock');
- expect(milestone.findComponent(GlLink).attributes('href')).toBe(issue.milestone.webPath);
- });
+ expect(milestone.text()).toBe('My milestone');
+ expect(milestone.findComponent(GlIcon).props('name')).toBe('clock');
+ expect(milestone.findComponent(GlLink).attributes('href')).toBe('/milestone/webPath');
+ });
- describe.each`
- time | text | milestoneDueDate | milestoneStartDate | expected
- ${'due date is in past'} | ${'Past due'} | ${'2020-09-09'} | ${null} | ${'Sep 9, 2020 (Past due)'}
- ${'due date is today'} | ${'Today'} | ${'2020-12-11'} | ${null} | ${'Dec 11, 2020 (Today)'}
- ${'start date is in future'} | ${'Upcoming'} | ${'2021-03-01'} | ${'2021-02-01'} | ${'Mar 1, 2021 (Upcoming)'}
- ${'due date is in future'} | ${'2 weeks remaining'} | ${'2020-12-25'} | ${null} | ${'Dec 25, 2020 (2 weeks remaining)'}
- `('when $description', ({ text, milestoneDueDate, milestoneStartDate, expected }) => {
- it(`renders with "${text}"`, () => {
- wrapper = mountComponent({ milestoneDueDate, milestoneStartDate });
-
- expect(findMilestoneTitle()).toBe(expected);
+ describe.each`
+ time | text | milestoneDueDate | milestoneStartDate | expected
+ ${'due date is in past'} | ${'Past due'} | ${'2020-09-09'} | ${null} | ${'Sep 9, 2020 (Past due)'}
+ ${'due date is today'} | ${'Today'} | ${'2020-12-11'} | ${null} | ${'Dec 11, 2020 (Today)'}
+ ${'start date is in future'} | ${'Upcoming'} | ${'2021-03-01'} | ${'2021-02-01'} | ${'Mar 1, 2021 (Upcoming)'}
+ ${'due date is in future'} | ${'2 weeks remaining'} | ${'2020-12-25'} | ${null} | ${'Dec 25, 2020 (2 weeks remaining)'}
+ `('when $description', ({ text, milestoneDueDate, milestoneStartDate, expected }) => {
+ it(`renders with "${text}"`, () => {
+ wrapper = mountComponent({ issue: obj, milestoneDueDate, milestoneStartDate });
+
+ expect(findMilestoneTitle()).toBe(expected);
+ });
});
});
- });
- describe('due date', () => {
- describe('when upcoming', () => {
- it('renders', () => {
- wrapper = mountComponent();
+ describe('due date', () => {
+ describe('when upcoming', () => {
+ it('renders', () => {
+ wrapper = mountComponent({ issue: obj });
- const dueDate = findDueDate();
+ const dueDate = findDueDate();
- expect(dueDate.text()).toBe('Dec 12, 2020');
- expect(dueDate.attributes('title')).toBe('Due date');
- expect(dueDate.findComponent(GlIcon).props('name')).toBe('calendar');
- expect(dueDate.classes()).not.toContain('gl-text-red-500');
+ expect(dueDate.text()).toBe('Dec 12, 2020');
+ expect(dueDate.attributes('title')).toBe('Due date');
+ expect(dueDate.findComponent(GlIcon).props('name')).toBe('calendar');
+ expect(dueDate.classes()).not.toContain('gl-text-red-500');
+ });
});
- });
- describe('when in the past', () => {
- describe('when issue is open', () => {
- it('renders in red', () => {
- wrapper = mountComponent({ dueDate: '2020-10-10' });
+ describe('when in the past', () => {
+ describe('when issue is open', () => {
+ it('renders in red', () => {
+ wrapper = mountComponent({ issue: obj, dueDate: '2020-10-10' });
- expect(findDueDate().classes()).toContain('gl-text-red-500');
+ expect(findDueDate().classes()).toContain('gl-text-red-500');
+ });
});
- });
- describe('when issue is closed', () => {
- it('does not render in red', () => {
- wrapper = mountComponent({
- dueDate: '2020-10-10',
- state: STATUS_CLOSED,
- });
+ describe('when issue is closed', () => {
+ it('does not render in red', () => {
+ wrapper = mountComponent({
+ issue: obj,
+ dueDate: '2020-10-10',
+ state: STATUS_CLOSED,
+ });
- expect(findDueDate().classes()).not.toContain('gl-text-red-500');
+ expect(findDueDate().classes()).not.toContain('gl-text-red-500');
+ });
});
});
});
@@ -112,7 +139,7 @@ describe('CE IssueCardTimeInfo component', () => {
const timeEstimate = wrapper.find('[data-testid="time-estimate"]');
- expect(timeEstimate.text()).toBe(issue.humanTimeEstimate);
+ expect(timeEstimate.text()).toBe(issueObject.humanTimeEstimate);
expect(timeEstimate.attributes('title')).toBe('Estimate');
expect(timeEstimate.findComponent(GlIcon).props('name')).toBe('timer');
});
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index de027a21c8f..f830168ce5d 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -237,7 +237,6 @@ describe('CE IssuesListApp component', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: defaultProvide.fullPath,
recentSearchesStorageKey: 'issues',
- searchInputPlaceholder: IssuesListApp.i18n.searchPlaceholder,
sortOptions: getSortOptions({
hasBlockedIssuesFeature: defaultProvide.hasBlockedIssuesFeature,
hasIssuableHealthStatusFeature: defaultProvide.hasIssuableHealthStatusFeature,
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index b9a8bc171db..73fda11f38c 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -49,6 +49,7 @@ export const getIssuesQueryResponse = {
moved: false,
state: 'opened',
title: 'Issue title',
+ titleHtml: 'Issue title',
updatedAt: '2021-05-22T04:08:01Z',
closedAt: null,
upvotes: 3,
diff --git a/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
index 1a199ed2ee9..a4bd9608e34 100644
--- a/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
+++ b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap
@@ -3,15 +3,14 @@
exports[`Issue type info popover renders 1`] = `
<span
class="gl-ml-2"
- id="popovercontainer"
+ id="reference-0"
>
<gl-icon-stub
class="gl-text-blue-600"
- id="issue-type-info"
+ id="reference-1"
name="question-o"
size="16"
/>
-
<gl-popover-stub
container="popovercontainer"
cssclasses=""
@@ -20,7 +19,7 @@ exports[`Issue type info popover renders 1`] = `
triggers="focus hover"
>
<ul
- class="gl-list-style-none gl-p-0 gl-m-0"
+ class="gl-list-style-none gl-m-0 gl-p-0"
>
<li
class="gl-mb-3"
@@ -30,19 +29,16 @@ exports[`Issue type info popover renders 1`] = `
>
Issue
</div>
-
<span>
For general work
</span>
</li>
-
<li>
<div
class="gl-font-weight-bold"
>
Incident
</div>
-
<span>
For investigating IT service disruptions or outages
</span>
diff --git a/spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js b/spec/frontend/issues/service_desk/components/empty_state_with_any_issues_spec.js
index ce8a78767d4..90f0847f37b 100644
--- a/spec/frontend/service_desk/components/empty_state_with_any_issues_spec.js
+++ b/spec/frontend/issues/service_desk/components/empty_state_with_any_issues_spec.js
@@ -1,13 +1,13 @@
import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import EmptyStateWithAnyIssues from '~/service_desk/components/empty_state_with_any_issues.vue';
+import EmptyStateWithAnyIssues from '~/issues/service_desk/components/empty_state_with_any_issues.vue';
import {
noSearchResultsTitle,
noSearchResultsDescription,
infoBannerUserNote,
noOpenIssuesTitle,
noClosedIssuesTitle,
-} from '~/service_desk/constants';
+} from '~/issues/service_desk/constants';
describe('EmptyStateWithAnyIssues component', () => {
let wrapper;
diff --git a/spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js b/spec/frontend/issues/service_desk/components/empty_state_without_any_issues_spec.js
index c67f9588ed4..7f281d6fbfe 100644
--- a/spec/frontend/service_desk/components/empty_state_without_any_issues_spec.js
+++ b/spec/frontend/issues/service_desk/components/empty_state_without_any_issues_spec.js
@@ -1,7 +1,11 @@
import { GlEmptyState, GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import EmptyStateWithoutAnyIssues from '~/service_desk/components/empty_state_without_any_issues.vue';
-import { infoBannerTitle, noIssuesSignedOutButtonText, learnMore } from '~/service_desk/constants';
+import EmptyStateWithoutAnyIssues from '~/issues/service_desk/components/empty_state_without_any_issues.vue';
+import {
+ infoBannerTitle,
+ noIssuesSignedOutButtonText,
+ learnMore,
+} from '~/issues/service_desk/constants';
describe('EmptyStateWithoutAnyIssues component', () => {
let wrapper;
diff --git a/spec/frontend/service_desk/components/info_banner_spec.js b/spec/frontend/issues/service_desk/components/info_banner_spec.js
index 7487d5d8b64..593455f5deb 100644
--- a/spec/frontend/service_desk/components/info_banner_spec.js
+++ b/spec/frontend/issues/service_desk/components/info_banner_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlLink, GlButton } from '@gitlab/ui';
-import InfoBanner from '~/service_desk/components/info_banner.vue';
-import { infoBannerAdminNote, enableServiceDesk } from '~/service_desk/constants';
+import InfoBanner from '~/issues/service_desk/components/info_banner.vue';
+import { infoBannerAdminNote, enableServiceDesk } from '~/issues/service_desk/constants';
describe('InfoBanner', () => {
let wrapper;
diff --git a/spec/frontend/issues/service_desk/components/service_desk_list_app_spec.js b/spec/frontend/issues/service_desk/components/service_desk_list_app_spec.js
new file mode 100644
index 00000000000..d28b4f2fe76
--- /dev/null
+++ b/spec/frontend/issues/service_desk/components/service_desk_list_app_spec.js
@@ -0,0 +1,717 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { cloneDeep } from 'lodash';
+import VueRouter from 'vue-router';
+import * as Sentry from '@sentry/browser';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
+import { joinPaths } from '~/lib/utils/url_utility';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import waitForPromises from 'helpers/wait_for_promises';
+import { scrollUp } from '~/lib/utils/scroll_utils';
+import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
+import { issuableListTabs } from '~/vue_shared/issuable/list/constants';
+import { TYPENAME_USER } from '~/graphql_shared/constants';
+import { convertToGraphQLId, getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { getSortKey, getSortOptions } from '~/issues/list/utils';
+import { STATUS_CLOSED, STATUS_OPEN, STATUS_ALL } from '~/issues/service_desk/constants';
+import getServiceDeskIssuesQuery from 'ee_else_ce/issues/service_desk/queries/get_service_desk_issues.query.graphql';
+import getServiceDeskIssuesCountsQuery from 'ee_else_ce/issues/service_desk/queries/get_service_desk_issues_counts.query.graphql';
+import setSortingPreferenceMutation from '~/issues/service_desk/queries/set_sorting_preference.mutation.graphql';
+import ServiceDeskListApp from '~/issues/service_desk/components/service_desk_list_app.vue';
+import InfoBanner from '~/issues/service_desk/components/info_banner.vue';
+import EmptyStateWithAnyIssues from '~/issues/service_desk/components/empty_state_with_any_issues.vue';
+import EmptyStateWithoutAnyIssues from '~/issues/service_desk/components/empty_state_without_any_issues.vue';
+import { createAlert, VARIANT_INFO } from '~/alert';
+import {
+ TOKEN_TYPE_ASSIGNEE,
+ TOKEN_TYPE_AUTHOR,
+ TOKEN_TYPE_CONFIDENTIAL,
+ TOKEN_TYPE_LABEL,
+ TOKEN_TYPE_MILESTONE,
+ TOKEN_TYPE_MY_REACTION,
+ TOKEN_TYPE_RELEASE,
+ TOKEN_TYPE_SEARCH_WITHIN,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ CREATED_DESC,
+ UPDATED_DESC,
+ RELATIVE_POSITION_ASC,
+ RELATIVE_POSITION,
+ urlSortParams,
+} from '~/issues/list/constants';
+import {
+ getServiceDeskIssuesQueryResponse,
+ getServiceDeskIssuesQueryEmptyResponse,
+ getServiceDeskIssuesCountsQueryResponse,
+ setSortPreferenceMutationResponse,
+ setSortPreferenceMutationResponseWithErrors,
+ filteredTokens,
+ urlParams,
+ locationSearch,
+} from '../mock_data';
+
+jest.mock('@sentry/browser');
+jest.mock('~/alert');
+jest.mock('~/lib/utils/scroll_utils', () => ({ scrollUp: jest.fn() }));
+
+describe('CE ServiceDeskListApp', () => {
+ let wrapper;
+ let router;
+ let axiosMock;
+
+ Vue.use(VueApollo);
+ Vue.use(VueRouter);
+
+ const defaultProvide = {
+ releasesPath: 'releases/path',
+ autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
+ hasBlockedIssuesFeature: false,
+ hasIterationsFeature: true,
+ hasIssueWeightsFeature: true,
+ hasIssuableHealthStatusFeature: true,
+ groupPath: 'group/path',
+ emptyStateSvgPath: 'empty-state.svg',
+ isProject: true,
+ isSignedIn: true,
+ fullPath: 'path/to/project',
+ isServiceDeskSupported: true,
+ hasAnyIssues: true,
+ initialSort: CREATED_DESC,
+ isIssueRepositioningDisabled: false,
+ issuablesLoading: false,
+ showPaginationControls: true,
+ useKeysetPagination: true,
+ hasPreviousPage: getServiceDeskIssuesQueryResponse.data.project.issues.pageInfo.hasPreviousPage,
+ hasNextPage: getServiceDeskIssuesQueryResponse.data.project.issues.pageInfo.hasNextPage,
+ };
+
+ let defaultQueryResponse = getServiceDeskIssuesQueryResponse;
+ if (IS_EE) {
+ defaultQueryResponse = cloneDeep(getServiceDeskIssuesQueryResponse);
+ defaultQueryResponse.data.project.issues.nodes[0].healthStatus = null;
+ defaultQueryResponse.data.project.issues.nodes[0].weight = 5;
+ }
+
+ const mockServiceDeskIssuesQueryResponseHandler = jest
+ .fn()
+ .mockResolvedValue(defaultQueryResponse);
+ const mockServiceDeskIssuesQueryEmptyResponseHandler = jest
+ .fn()
+ .mockResolvedValue(getServiceDeskIssuesQueryEmptyResponse);
+ const mockServiceDeskIssuesCountsQueryResponseHandler = jest
+ .fn()
+ .mockResolvedValue(getServiceDeskIssuesCountsQueryResponse);
+
+ const findIssuableList = () => wrapper.findComponent(IssuableList);
+ const findInfoBanner = () => wrapper.findComponent(InfoBanner);
+ const findLabelsToken = () =>
+ findIssuableList()
+ .props('searchTokens')
+ .find((token) => token.type === TOKEN_TYPE_LABEL);
+
+ const createComponent = ({
+ provide = {},
+ serviceDeskIssuesQueryResponseHandler = mockServiceDeskIssuesQueryResponseHandler,
+ serviceDeskIssuesCountsQueryResponseHandler = mockServiceDeskIssuesCountsQueryResponseHandler,
+ sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
+ } = {}) => {
+ const requestHandlers = [
+ [getServiceDeskIssuesQuery, serviceDeskIssuesQueryResponseHandler],
+ [getServiceDeskIssuesCountsQuery, serviceDeskIssuesCountsQueryResponseHandler],
+ [setSortingPreferenceMutation, sortPreferenceMutationResponse],
+ ];
+
+ router = new VueRouter({ mode: 'history' });
+
+ return shallowMount(ServiceDeskListApp, {
+ apolloProvider: createMockApollo(
+ requestHandlers,
+ {},
+ {
+ typePolicies: {
+ Query: {
+ fields: {
+ project: {
+ merge: true,
+ },
+ },
+ },
+ },
+ },
+ ),
+ router,
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ setWindowLocation(TEST_HOST);
+ axiosMock = new AxiosMockAdapter(axios);
+ wrapper = createComponent();
+ return waitForPromises();
+ });
+
+ afterEach(() => {
+ axiosMock.reset();
+ });
+
+ it('renders the issuable list with skeletons while fetching service desk issues', async () => {
+ wrapper = createComponent();
+ await nextTick();
+
+ expect(findIssuableList().props('issuablesLoading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findIssuableList().props('issuablesLoading')).toBe(false);
+ });
+
+ it('fetches service desk issues and renders them in the issuable list', () => {
+ expect(findIssuableList().props()).toMatchObject({
+ namespace: 'service-desk',
+ recentSearchesStorageKey: 'service-desk-issues',
+ issuables: defaultQueryResponse.data.project.issues.nodes,
+ tabs: issuableListTabs,
+ currentTab: STATUS_OPEN,
+ tabCounts: {
+ opened: 1,
+ closed: 1,
+ all: 1,
+ },
+ sortOptions: getSortOptions({
+ hasBlockedIssuesFeature: defaultProvide.hasBlockedIssuesFeature,
+ hasIssuableHealthStatusFeature: defaultProvide.hasIssuableHealthStatusFeature,
+ hasIssueWeightsFeature: defaultProvide.hasIssueWeightsFeature,
+ }),
+ initialSortBy: CREATED_DESC,
+ isManualOrdering: false,
+ });
+ });
+
+ describe('InfoBanner', () => {
+ it('renders when Service Desk is supported and has any number of issues', () => {
+ expect(findInfoBanner().exists()).toBe(true);
+ });
+
+ it('does not render when Service Desk is not supported and has any number of issues', () => {
+ wrapper = createComponent({ provide: { isServiceDeskSupported: false } });
+
+ expect(findInfoBanner().exists()).toBe(false);
+ });
+
+ it('does not render, when there are no issues', () => {
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
+
+ expect(findInfoBanner().exists()).toBe(false);
+ });
+ });
+
+ describe('Empty states', () => {
+ describe('when there are issues', () => {
+ it('shows EmptyStateWithAnyIssues component', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
+
+ expect(wrapper.findComponent(EmptyStateWithAnyIssues).props()).toEqual({
+ hasSearch: true,
+ isOpenTab: true,
+ });
+ });
+ });
+
+ describe('when there are no issues', () => {
+ it('shows EmptyStateWithoutAnyIssues component', () => {
+ wrapper = createComponent({
+ provide: { hasAnyIssues: false },
+ serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
+ });
+
+ expect(wrapper.findComponent(EmptyStateWithoutAnyIssues).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('Initial url params', () => {
+ describe('search', () => {
+ it('is set from the url params', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent();
+
+ expect(router.history.current.query).toMatchObject({ search: 'find issues' });
+ });
+ });
+
+ describe('sort', () => {
+ describe('when initial sort value uses old enum values', () => {
+ const oldEnumSortValues = Object.values(urlSortParams);
+
+ it.each(oldEnumSortValues)('initial sort is set with value %s', async (sort) => {
+ wrapper = createComponent({ provide: { initialSort: sort } });
+ await waitForPromises();
+
+ expect(findIssuableList().props('initialSortBy')).toBe(getSortKey(sort));
+ });
+ });
+
+ describe('when initial sort value uses new GraphQL enum values', () => {
+ const graphQLEnumSortValues = Object.keys(urlSortParams);
+
+ it.each(graphQLEnumSortValues)('initial sort is set with value %s', async (sort) => {
+ wrapper = createComponent({ provide: { initialSort: sort.toLowerCase() } });
+ await waitForPromises();
+
+ expect(findIssuableList().props('initialSortBy')).toBe(sort);
+ });
+ });
+
+ describe('when initial sort value is invalid', () => {
+ it.each(['', 'asdf', null, undefined])(
+ 'initial sort is set to value CREATED_DESC',
+ async (sort) => {
+ wrapper = createComponent({ provide: { initialSort: sort } });
+ await waitForPromises();
+
+ expect(findIssuableList().props('initialSortBy')).toBe(CREATED_DESC);
+ },
+ );
+ });
+
+ describe('when sort is manual and issue repositioning is disabled', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ provide: { initialSort: RELATIVE_POSITION, isIssueRepositioningDisabled: true },
+ });
+ await waitForPromises();
+ });
+
+ it('changes the sort to the default of created descending', () => {
+ expect(findIssuableList().props('initialSortBy')).toBe(CREATED_DESC);
+ });
+
+ it('shows an alert to tell the user that manual reordering is disabled', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: ServiceDeskListApp.i18n.issueRepositioningMessage,
+ variant: VARIANT_INFO,
+ });
+ });
+ });
+ });
+
+ describe('state', () => {
+ it('is set from the url params', async () => {
+ const initialState = STATUS_ALL;
+ setWindowLocation(`?state=${initialState}`);
+ wrapper = createComponent();
+ await waitForPromises();
+
+ expect(findIssuableList().props('currentTab')).toBe(initialState);
+ });
+ });
+
+ describe('filter tokens', () => {
+ it('are set from the url params', () => {
+ setWindowLocation(locationSearch);
+ wrapper = createComponent();
+
+ expect(findIssuableList().props('initialFilterValue')).toEqual(filteredTokens);
+ });
+ });
+ });
+
+ describe('Tokens', () => {
+ const mockCurrentUser = {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ avatar_url: 'avatar/url',
+ };
+
+ describe('when user is signed out', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ provide: { isSignedIn: false } });
+ return waitForPromises();
+ });
+
+ it('does not render My-Reaction or Confidential tokens', () => {
+ expect(findIssuableList().props('searchTokens')).not.toMatchObject([
+ { type: TOKEN_TYPE_AUTHOR, preloadedUsers: [mockCurrentUser] },
+ { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers: [mockCurrentUser] },
+ { type: TOKEN_TYPE_MY_REACTION },
+ { type: TOKEN_TYPE_CONFIDENTIAL },
+ ]);
+ });
+ });
+
+ describe('when all tokens are available', () => {
+ beforeEach(() => {
+ window.gon = {
+ current_user_id: mockCurrentUser.id,
+ current_user_fullname: mockCurrentUser.name,
+ current_username: mockCurrentUser.username,
+ current_user_avatar_url: mockCurrentUser.avatar_url,
+ };
+
+ wrapper = createComponent();
+ return waitForPromises();
+ });
+
+ it('renders all tokens alphabetically', () => {
+ const preloadedUsers = [
+ { ...mockCurrentUser, id: convertToGraphQLId(TYPENAME_USER, mockCurrentUser.id) },
+ ];
+
+ expect(findIssuableList().props('searchTokens')).toMatchObject([
+ { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
+ { type: TOKEN_TYPE_CONFIDENTIAL },
+ { type: TOKEN_TYPE_LABEL },
+ { type: TOKEN_TYPE_MILESTONE },
+ { type: TOKEN_TYPE_MY_REACTION },
+ { type: TOKEN_TYPE_RELEASE },
+ { type: TOKEN_TYPE_SEARCH_WITHIN },
+ ]);
+ });
+ });
+ });
+
+ describe('Events', () => {
+ describe('when "click-tab" event is emitted by IssuableList', () => {
+ beforeEach(async () => {
+ wrapper = createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('click-tab', STATUS_CLOSED);
+ });
+
+ it('updates ui to the new tab', () => {
+ expect(findIssuableList().props('currentTab')).toBe(STATUS_CLOSED);
+ });
+
+ it('updates url to the new tab', () => {
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ state: STATUS_CLOSED }),
+ });
+ });
+ });
+
+ describe('when "reorder" event is emitted by IssuableList', () => {
+ const issueOne = {
+ ...defaultQueryResponse.data.project.issues.nodes[0],
+ id: 'gid://gitlab/Issue/1',
+ iid: '101',
+ reference: 'group/project#1',
+ webPath: '/group/project/-/issues/1',
+ };
+ const issueTwo = {
+ ...defaultQueryResponse.data.project.issues.nodes[0],
+ id: 'gid://gitlab/Issue/2',
+ iid: '102',
+ reference: 'group/project#2',
+ webPath: '/group/project/-/issues/2',
+ };
+ const issueThree = {
+ ...defaultQueryResponse.data.project.issues.nodes[0],
+ id: 'gid://gitlab/Issue/3',
+ iid: '103',
+ reference: 'group/project#3',
+ webPath: '/group/project/-/issues/3',
+ };
+ const issueFour = {
+ ...defaultQueryResponse.data.project.issues.nodes[0],
+ id: 'gid://gitlab/Issue/4',
+ iid: '104',
+ reference: 'group/project#4',
+ webPath: '/group/project/-/issues/4',
+ };
+ const response = () => ({
+ data: {
+ project: {
+ id: '1',
+ issues: {
+ ...defaultQueryResponse.data.project.issues,
+ nodes: [issueOne, issueTwo, issueThree, issueFour],
+ },
+ },
+ },
+ });
+
+ describe('when successful', () => {
+ describe.each`
+ description | issueToMove | oldIndex | newIndex | moveBeforeId | moveAfterId
+ ${'to the beginning of the list'} | ${issueThree} | ${2} | ${0} | ${null} | ${issueOne.id}
+ ${'down the list'} | ${issueOne} | ${0} | ${1} | ${issueTwo.id} | ${issueThree.id}
+ ${'up the list'} | ${issueThree} | ${2} | ${1} | ${issueOne.id} | ${issueTwo.id}
+ ${'to the end of the list'} | ${issueTwo} | ${1} | ${3} | ${issueFour.id} | ${null}
+ `(
+ 'when moving issue $description',
+ ({ issueToMove, oldIndex, newIndex, moveBeforeId, moveAfterId }) => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: jest.fn().mockResolvedValue(response()),
+ });
+ return waitForPromises();
+ });
+
+ it('makes API call to reorder the issue', async () => {
+ findIssuableList().vm.$emit('reorder', { oldIndex, newIndex });
+ await waitForPromises();
+
+ expect(axiosMock.history.put[0]).toMatchObject({
+ url: joinPaths(issueToMove.webPath, 'reorder'),
+ data: JSON.stringify({
+ move_before_id: getIdFromGraphQLId(moveBeforeId),
+ move_after_id: getIdFromGraphQLId(moveAfterId),
+ }),
+ });
+ });
+ },
+ );
+ });
+
+ describe('when unsuccessful', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: jest.fn().mockResolvedValue(response()),
+ });
+ return waitForPromises();
+ });
+
+ it('displays an error message', async () => {
+ axiosMock
+ .onPut(joinPaths(issueOne.webPath, 'reorder'))
+ .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ findIssuableList().vm.$emit('reorder', { oldIndex: 0, newIndex: 1 });
+ await waitForPromises();
+
+ expect(findIssuableList().props('error')).toBe(ServiceDeskListApp.i18n.reorderError);
+ expect(Sentry.captureException).toHaveBeenCalledWith(
+ new Error('Request failed with status code 500'),
+ );
+ });
+ });
+ });
+
+ describe('when "sort" event is emitted by IssuableList', () => {
+ it.each(Object.keys(urlSortParams))(
+ 'updates to the new sort when payload is `%s`',
+ async (sortKey) => {
+ // Ensure initial sort key is different so we can trigger an update when emitting a sort key
+ wrapper =
+ sortKey === CREATED_DESC
+ ? createComponent({ provide: { initialSort: UPDATED_DESC } })
+ : createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('sort', sortKey);
+
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ sort: urlSortParams[sortKey] }),
+ });
+ },
+ );
+
+ describe('when issue repositioning is disabled', () => {
+ const initialSort = CREATED_DESC;
+
+ beforeEach(async () => {
+ wrapper = createComponent({
+ provide: { initialSort, isIssueRepositioningDisabled: true },
+ });
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('sort', RELATIVE_POSITION_ASC);
+ });
+
+ it('does not update the sort to manual', () => {
+ expect(router.push).not.toHaveBeenCalled();
+ });
+
+ it('shows an alert to tell the user that manual reordering is disabled', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: ServiceDeskListApp.i18n.issueRepositioningMessage,
+ variant: VARIANT_INFO,
+ });
+ });
+ });
+
+ describe('when user is signed in', () => {
+ it('calls mutation to save sort preference', async () => {
+ const mutationMock = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse);
+ wrapper = createComponent({ sortPreferenceMutationResponse: mutationMock });
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('sort', UPDATED_DESC);
+
+ expect(mutationMock).toHaveBeenCalledWith({ input: { issuesSort: UPDATED_DESC } });
+ });
+
+ it('captures error when mutation response has errors', async () => {
+ const mutationMock = jest
+ .fn()
+ .mockResolvedValue(setSortPreferenceMutationResponseWithErrors);
+ wrapper = createComponent({ sortPreferenceMutationResponse: mutationMock });
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('sort', UPDATED_DESC);
+ await waitForPromises();
+
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('oh no!'));
+ });
+ });
+
+ describe('when user is signed out', () => {
+ it('does not call mutation to save sort preference', async () => {
+ const mutationMock = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse);
+ wrapper = createComponent({
+ provide: { isSignedIn: false },
+ sortPreferenceMutationResponse: mutationMock,
+ });
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('sort', CREATED_DESC);
+
+ expect(mutationMock).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe.each`
+ event | params
+ ${'next-page'} | ${{ page_after: 'endcursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
+ ${'previous-page'} | ${{ page_after: undefined, page_before: 'startcursor', first_page_size: undefined, last_page_size: 20 }}
+ `('when "$event" event is emitted by IssuableList', ({ event, params }) => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ data: {
+ pageInfo: {
+ endCursor: 'endCursor',
+ startCursor: 'startCursor',
+ },
+ },
+ });
+ await waitForPromises();
+ router.push = jest.fn();
+
+ findIssuableList().vm.$emit(event);
+ });
+
+ it('scrolls to the top', () => {
+ expect(scrollUp).toHaveBeenCalled();
+ });
+
+ it('updates url', () => {
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining(params),
+ });
+ });
+ });
+
+ describe('when "filter" event is emitted by IssuableList', () => {
+ it('updates IssuableList with url params', async () => {
+ wrapper = createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('filter', filteredTokens);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining(urlParams),
+ });
+ });
+ });
+
+ describe('when "page-size-change" event is emitted by IssuableList', () => {
+ it('updates url params with new page size', async () => {
+ wrapper = createComponent();
+ router.push = jest.fn();
+ await waitForPromises();
+
+ findIssuableList().vm.$emit('page-size-change', 50);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledTimes(1);
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ first_page_size: 50 }),
+ });
+ });
+ });
+ });
+
+ describe('Errors', () => {
+ describe.each`
+ error | responseHandler | message
+ ${'fetching issues'} | ${'serviceDeskIssuesQueryResponseHandler'} | ${'An error occurred while loading issues'}
+ ${'fetching issue counts'} | ${'serviceDeskIssuesCountsQueryResponseHandler'} | ${'An error occurred while getting issue counts'}
+ `('when there is an error $error', ({ responseHandler, message }) => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ [responseHandler]: jest.fn().mockRejectedValue(new Error('ERROR')),
+ });
+ return waitForPromises();
+ });
+
+ it('shows an error message', () => {
+ expect(findIssuableList().props('error')).toBe(message);
+ });
+
+ it('is captured with Sentry', () => {
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('ERROR'));
+ });
+ });
+
+ it('clears error message when "dismiss-alert" event is emitted from IssuableList', async () => {
+ wrapper = createComponent({
+ serviceDeskIssuesQueryResponseHandler: jest.fn().mockRejectedValue(new Error()),
+ });
+ await waitForPromises();
+ findIssuableList().vm.$emit('dismiss-alert');
+ await nextTick();
+
+ expect(findIssuableList().props('error')).toBe('');
+ });
+ });
+
+ describe('When providing token for labels', () => {
+ it('passes function to fetchLatestLabels property if frontend caching is enabled', async () => {
+ wrapper = createComponent({
+ provide: {
+ glFeatures: {
+ frontendCaching: true,
+ },
+ },
+ });
+ await waitForPromises();
+
+ expect(typeof findLabelsToken().fetchLatestLabels).toBe('function');
+ });
+
+ it('passes null to fetchLatestLabels property if frontend caching is disabled', async () => {
+ wrapper = createComponent({
+ provide: {
+ glFeatures: {
+ frontendCaching: false,
+ },
+ },
+ });
+ await waitForPromises();
+
+ expect(findLabelsToken().fetchLatestLabels).toBe(null);
+ });
+ });
+});
diff --git a/spec/frontend/service_desk/mock_data.js b/spec/frontend/issues/service_desk/mock_data.js
index dc875cb5c1e..1e2f209d732 100644
--- a/spec/frontend/service_desk/mock_data.js
+++ b/spec/frontend/issues/service_desk/mock_data.js
@@ -74,6 +74,7 @@ export const getServiceDeskIssuesQueryResponse = {
username: 'support-bot',
webUrl: 'url/hsimpson',
},
+ externalAuthor: 'client@client.com',
labels: {
nodes: [
{
@@ -134,6 +135,22 @@ export const getServiceDeskIssuesCountsQueryResponse = {
},
};
+export const setSortPreferenceMutationResponse = {
+ data: {
+ userPreferencesUpdate: {
+ errors: [],
+ },
+ },
+};
+
+export const setSortPreferenceMutationResponseWithErrors = {
+ data: {
+ userPreferencesUpdate: {
+ errors: ['oh no!'],
+ },
+ },
+};
+
export const filteredTokens = [
{ type: FILTERED_SEARCH_TERM, value: { data: 'find issues', operator: 'undefined' } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
diff --git a/spec/frontend/issues/show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js
index de183f94277..8999952c54c 100644
--- a/spec/frontend/issues/show/components/app_spec.js
+++ b/spec/frontend/issues/show/components/app_spec.js
@@ -1,23 +1,14 @@
-import { GlIcon, GlIntersectionObserver } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import {
- issuableStatusText,
- STATUS_CLOSED,
- STATUS_OPEN,
- STATUS_REOPENED,
- TYPE_EPIC,
- TYPE_INCIDENT,
- TYPE_ISSUE,
-} from '~/issues/constants';
+import { TYPE_EPIC, TYPE_INCIDENT, TYPE_ISSUE } from '~/issues/constants';
import IssuableApp from '~/issues/show/components/app.vue';
import DescriptionComponent from '~/issues/show/components/description.vue';
import EditedComponent from '~/issues/show/components/edited.vue';
import FormComponent from '~/issues/show/components/form.vue';
+import StickyHeader from '~/issues/show/components/sticky_header.vue';
import TitleComponent from '~/issues/show/components/title.vue';
import IncidentTabs from '~/issues/show/components/incidents/incident_tabs.vue';
import PinnedLinks from '~/issues/show/components/pinned_links.vue';
@@ -44,22 +35,15 @@ describe('Issuable output', () => {
let axiosMock;
let wrapper;
- const findStickyHeader = () => wrapper.findByTestId('issue-sticky-header');
- const findLockedBadge = () => wrapper.findByTestId('locked');
- const findConfidentialBadge = () => wrapper.findByTestId('confidential');
- const findHiddenBadge = () => wrapper.findByTestId('hidden');
-
+ const findStickyHeader = () => wrapper.findComponent(StickyHeader);
const findTitle = () => wrapper.findComponent(TitleComponent);
const findDescription = () => wrapper.findComponent(DescriptionComponent);
const findEdited = () => wrapper.findComponent(EditedComponent);
const findForm = () => wrapper.findComponent(FormComponent);
const findPinnedLinks = () => wrapper.findComponent(PinnedLinks);
- const createComponent = ({ props = {}, options = {}, data = {} } = {}) => {
- wrapper = shallowMountExtended(IssuableApp, {
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
+ const createComponent = ({ props = {}, options = {} } = {}) => {
+ wrapper = shallowMount(IssuableApp, {
propsData: { ...appProps, ...props },
provide: {
fullPath: 'gitlab-org/incidents',
@@ -69,11 +53,6 @@ describe('Issuable output', () => {
HighlightBar: true,
IncidentTabs: true,
},
- data() {
- return {
- ...data,
- };
- },
...options,
});
@@ -81,13 +60,6 @@ describe('Issuable output', () => {
return waitForPromises();
};
- const createComponentAndScroll = async (props) => {
- await createComponent({ props });
- global.pageYOffset = 100;
- wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
- await nextTick();
- };
-
const emitHubEvent = (event) => {
eventHub.$emit(event);
return waitForPromises();
@@ -332,104 +304,36 @@ describe('Issuable output', () => {
describe('when title is in view', () => {
it('is not shown', async () => {
await createComponent();
- wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
- expect(findStickyHeader().exists()).toBe(false);
+ wrapper.findComponent(StickyHeader).vm.$emit('show');
+
+ expect(findStickyHeader().props('show')).toBe(false);
});
});
- describe('when title is not in view', () => {
- it.each([TYPE_INCIDENT, TYPE_ISSUE, TYPE_EPIC])(
- 'shows with title when issuableType="%s"',
- async (issuableType) => {
- await createComponentAndScroll({ issuableType });
-
- expect(findStickyHeader().text()).toContain('this is a title');
- },
- );
-
- it.each`
- issuableType | issuableStatus | statusIcon
- ${TYPE_INCIDENT} | ${STATUS_OPEN} | ${'issues'}
- ${TYPE_INCIDENT} | ${STATUS_CLOSED} | ${'issue-closed'}
- ${TYPE_ISSUE} | ${STATUS_OPEN} | ${'issues'}
- ${TYPE_ISSUE} | ${STATUS_CLOSED} | ${'issue-closed'}
- ${TYPE_EPIC} | ${STATUS_OPEN} | ${'epic'}
- ${TYPE_EPIC} | ${STATUS_CLOSED} | ${'epic-closed'}
- `(
- 'shows with state icon "$statusIcon" for $issuableType when status is $issuableStatus',
- async ({ issuableType, issuableStatus, statusIcon }) => {
- await createComponentAndScroll({ issuableType, issuableStatus });
-
- expect(findStickyHeader().findComponent(GlIcon).props('name')).toBe(statusIcon);
- },
- );
-
- it.each`
- title | issuableStatus
- ${'shows with Open when status is opened'} | ${STATUS_OPEN}
- ${'shows with Closed when status is closed'} | ${STATUS_CLOSED}
- ${'shows with Open when status is reopened'} | ${STATUS_REOPENED}
- `('$title', async ({ issuableStatus }) => {
- await createComponentAndScroll({ issuableStatus });
-
- expect(findStickyHeader().text()).toContain(issuableStatusText[issuableStatus]);
- });
+ describe.each([TYPE_INCIDENT, TYPE_ISSUE, TYPE_EPIC])(
+ 'when title is not in view',
+ (issuableType) => {
+ beforeEach(async () => {
+ await createComponent({ props: { issuableType } });
- it.each`
- title | isConfidential
- ${'does not show confidential badge when issue is not confidential'} | ${false}
- ${'shows confidential badge when issue is confidential'} | ${true}
- `('$title', async ({ isConfidential }) => {
- await createComponentAndScroll({ isConfidential });
- const confidentialEl = findConfidentialBadge();
-
- expect(confidentialEl.exists()).toBe(isConfidential);
-
- if (isConfidential) {
- expect(confidentialEl.props()).toMatchObject({
- workspaceType: 'project',
- issuableType: 'issue',
- });
- }
- });
+ global.pageYOffset = 100;
+ wrapper.findComponent(StickyHeader).vm.$emit('show');
+ await nextTick();
+ });
- it.each`
- title | isLocked
- ${'does not show locked badge when issue is not locked'} | ${false}
- ${'shows locked badge when issue is locked'} | ${true}
- `('$title', async ({ isLocked }) => {
- await createComponentAndScroll({ isLocked });
- const lockedBadge = findLockedBadge();
-
- expect(lockedBadge.exists()).toBe(isLocked);
-
- if (isLocked) {
- expect(lockedBadge.attributes('title')).toBe(
- 'This issue is locked. Only project members can comment.',
- );
- expect(getBinding(lockedBadge.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
+ it(`shows when issuableType=${issuableType}`, () => {
+ expect(findStickyHeader().props('show')).toBe(true);
+ });
- it.each`
- title | isHidden
- ${'does not show hidden badge when issue is not hidden'} | ${false}
- ${'shows hidden badge when issue is hidden'} | ${true}
- `('$title', async ({ isHidden }) => {
- await createComponentAndScroll({ isHidden });
- const hiddenBadge = findHiddenBadge();
-
- expect(hiddenBadge.exists()).toBe(isHidden);
-
- if (isHidden) {
- expect(hiddenBadge.attributes('title')).toBe(
- 'This issue is hidden because its author has been banned',
- );
- expect(getBinding(hiddenBadge.element, 'gl-tooltip')).not.toBeUndefined();
- }
- });
- });
+ it('hides again when title is back in view', async () => {
+ wrapper.findComponent(StickyHeader).vm.$emit('hide');
+ await nextTick();
+
+ expect(findStickyHeader().props('show')).toBe(false);
+ });
+ },
+ );
});
describe('Composable description component', () => {
diff --git a/spec/frontend/issues/show/components/sticky_header_spec.js b/spec/frontend/issues/show/components/sticky_header_spec.js
new file mode 100644
index 00000000000..0c54ae45e70
--- /dev/null
+++ b/spec/frontend/issues/show/components/sticky_header_spec.js
@@ -0,0 +1,135 @@
+import { GlIcon } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ issuableStatusText,
+ STATUS_CLOSED,
+ STATUS_OPEN,
+ STATUS_REOPENED,
+ TYPE_EPIC,
+ TYPE_INCIDENT,
+ TYPE_ISSUE,
+} from '~/issues/constants';
+import StickyHeader from '~/issues/show/components/sticky_header.vue';
+import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+
+describe('StickyHeader component', () => {
+ let wrapper;
+
+ const findConfidentialBadge = () => wrapper.findComponent(ConfidentialityBadge);
+ const findHiddenBadge = () => wrapper.findByTestId('hidden');
+ const findLockedBadge = () => wrapper.findByTestId('locked');
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(StickyHeader, {
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ propsData: {
+ issuableStatus: STATUS_OPEN,
+ issuableType: TYPE_ISSUE,
+ show: true,
+ title: 'A sticky issue',
+ titleHtml: '',
+ ...props,
+ },
+ });
+ };
+
+ it.each`
+ issuableType | issuableStatus | statusIcon
+ ${TYPE_INCIDENT} | ${STATUS_OPEN} | ${'issues'}
+ ${TYPE_INCIDENT} | ${STATUS_CLOSED} | ${'issue-closed'}
+ ${TYPE_ISSUE} | ${STATUS_OPEN} | ${'issues'}
+ ${TYPE_ISSUE} | ${STATUS_CLOSED} | ${'issue-closed'}
+ ${TYPE_EPIC} | ${STATUS_OPEN} | ${'epic'}
+ ${TYPE_EPIC} | ${STATUS_CLOSED} | ${'epic-closed'}
+ `(
+ 'shows with state icon "$statusIcon" for $issuableType when status is $issuableStatus',
+ ({ issuableType, issuableStatus, statusIcon }) => {
+ createComponent({ issuableType, issuableStatus });
+
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe(statusIcon);
+ },
+ );
+
+ it.each`
+ title | issuableStatus
+ ${'shows with Open when status is opened'} | ${STATUS_OPEN}
+ ${'shows with Closed when status is closed'} | ${STATUS_CLOSED}
+ ${'shows with Open when status is reopened'} | ${STATUS_REOPENED}
+ `('$title', ({ issuableStatus }) => {
+ createComponent({ issuableStatus });
+
+ expect(wrapper.text()).toContain(issuableStatusText[issuableStatus]);
+ });
+
+ it.each`
+ title | isConfidential
+ ${'does not show confidential badge when issue is not confidential'} | ${false}
+ ${'shows confidential badge when issue is confidential'} | ${true}
+ `('$title', ({ isConfidential }) => {
+ createComponent({ isConfidential });
+ const confidentialBadge = findConfidentialBadge();
+
+ expect(confidentialBadge.exists()).toBe(isConfidential);
+
+ if (isConfidential) {
+ expect(confidentialBadge.props()).toMatchObject({
+ workspaceType: 'project',
+ issuableType: 'issue',
+ });
+ }
+ });
+
+ it.each`
+ title | isLocked
+ ${'does not show locked badge when issue is not locked'} | ${false}
+ ${'shows locked badge when issue is locked'} | ${true}
+ `('$title', ({ isLocked }) => {
+ createComponent({ isLocked });
+ const lockedBadge = findLockedBadge();
+
+ expect(lockedBadge.exists()).toBe(isLocked);
+
+ if (isLocked) {
+ expect(lockedBadge.attributes('title')).toBe(
+ 'This issue is locked. Only project members can comment.',
+ );
+ expect(getBinding(lockedBadge.element, 'gl-tooltip')).not.toBeUndefined();
+ }
+ });
+
+ it.each`
+ title | isHidden
+ ${'does not show hidden badge when issue is not hidden'} | ${false}
+ ${'shows hidden badge when issue is hidden'} | ${true}
+ `('$title', ({ isHidden }) => {
+ createComponent({ isHidden });
+ const hiddenBadge = findHiddenBadge();
+
+ expect(hiddenBadge.exists()).toBe(isHidden);
+
+ if (isHidden) {
+ expect(hiddenBadge.attributes('title')).toBe(
+ 'This issue is hidden because its author has been banned',
+ );
+ expect(getBinding(hiddenBadge.element, 'gl-tooltip')).not.toBeUndefined();
+ }
+ });
+
+ it('shows with title', () => {
+ createComponent();
+ const title = wrapper.find('a');
+
+ expect(title.text()).toContain('A sticky issue');
+ expect(title.attributes('href')).toBe('#top');
+ });
+
+ it('shows title containing markup', () => {
+ const titleHtml = '<b>A sticky issue</b>';
+ createComponent({ titleHtml });
+
+ expect(wrapper.find('a').html()).toContain(titleHtml);
+ });
+});
diff --git a/spec/frontend/issues/show/components/task_list_item_actions_spec.js b/spec/frontend/issues/show/components/task_list_item_actions_spec.js
index 93cb7b5ae16..b2e57bf49d0 100644
--- a/spec/frontend/issues/show/components/task_list_item_actions_spec.js
+++ b/spec/frontend/issues/show/components/task_list_item_actions_spec.js
@@ -1,5 +1,6 @@
-import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlDisclosureDropdown } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { TYPE_EPIC, TYPE_INCIDENT, TYPE_ISSUE } from '~/issues/constants';
import TaskListItemActions from '~/issues/show/components/task_list_item_actions.vue';
import eventHub from '~/issues/show/event_hub';
@@ -9,26 +10,24 @@ describe('TaskListItemActions component', () => {
let wrapper;
const findGlDisclosureDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
- const findConvertToTaskItem = () => wrapper.findAllComponents(GlDisclosureDropdownItem).at(0);
- const findDeleteItem = () => wrapper.findAllComponents(GlDisclosureDropdownItem).at(1);
+ const findConvertToTaskItem = () => wrapper.findByTestId('convert');
+ const findDeleteItem = () => wrapper.findByTestId('delete');
- const mountComponent = () => {
+ const mountComponent = ({ issuableType = TYPE_ISSUE } = {}) => {
const li = document.createElement('li');
li.dataset.sourcepos = '3:1-3:10';
li.appendChild(document.createElement('div'));
document.body.appendChild(li);
- wrapper = shallowMount(TaskListItemActions, {
- provide: { canUpdate: true },
+ wrapper = shallowMountExtended(TaskListItemActions, {
+ provide: { canUpdate: true, issuableType },
attachTo: document.querySelector('div'),
});
};
- beforeEach(() => {
+ it('renders dropdown', () => {
mountComponent();
- });
- it('renders dropdown', () => {
expect(findGlDisclosureDropdown().props()).toMatchObject({
category: 'tertiary',
icon: 'ellipsis_v',
@@ -38,15 +37,36 @@ describe('TaskListItemActions component', () => {
});
});
- it('emits event when `Convert to task` dropdown item is clicked', () => {
- findConvertToTaskItem().vm.$emit('action');
+ describe('"Convert to task" dropdown item', () => {
+ describe.each`
+ issuableType | exists
+ ${TYPE_EPIC} | ${false}
+ ${TYPE_INCIDENT} | ${true}
+ ${TYPE_ISSUE} | ${true}
+ `(`when $issuableType`, ({ issuableType, exists }) => {
+ it(`${exists ? 'renders' : 'does not render'}`, () => {
+ mountComponent({ issuableType });
- expect(eventHub.$emit).toHaveBeenCalledWith('convert-task-list-item', '3:1-3:10');
+ expect(findConvertToTaskItem().exists()).toBe(exists);
+ });
+ });
});
- it('emits event when `Delete` dropdown item is clicked', () => {
- findDeleteItem().vm.$emit('action');
+ describe('events', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('emits event when `Convert to task` dropdown item is clicked', () => {
+ findConvertToTaskItem().vm.$emit('action');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('convert-task-list-item', '3:1-3:10');
+ });
- expect(eventHub.$emit).toHaveBeenCalledWith('delete-task-list-item', '3:1-3:10');
+ it('emits event when `Delete` dropdown item is clicked', () => {
+ findDeleteItem().vm.$emit('action');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('delete-task-list-item', '3:1-3:10');
+ });
});
});
diff --git a/spec/frontend/issues/show/issue_spec.js b/spec/frontend/issues/show/issue_spec.js
deleted file mode 100644
index 561035242eb..00000000000
--- a/spec/frontend/issues/show/issue_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import { initIssueApp } from '~/issues/show';
-import * as parseData from '~/issues/show/utils/parse_data';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import createStore from '~/notes/stores';
-import { appProps } from './mock_data/mock_data';
-
-const mock = new MockAdapter(axios);
-mock.onGet().reply(HTTP_STATUS_OK);
-
-jest.mock('~/lib/utils/poll');
-
-const setupHTML = (initialData) => {
- document.body.innerHTML = `<div id="js-issuable-app"></div>`;
- document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(initialData);
-};
-
-describe('Issue show index', () => {
- describe('initIssueApp', () => {
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/390368
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('should initialize app with no potential XSS attack', async () => {
- const alertSpy = jest.spyOn(window, 'alert').mockImplementation(() => {});
- const parseDataSpy = jest.spyOn(parseData, 'parseIssuableData');
-
- setupHTML({
- ...appProps,
- initialDescriptionHtml: '<svg onload=window.alert(1)>',
- });
-
- const initialDataEl = document.getElementById('js-issuable-app');
- const issuableData = parseData.parseIssuableData(initialDataEl);
- initIssueApp(issuableData, createStore());
-
- await waitForPromises();
-
- expect(parseDataSpy).toHaveBeenCalled();
- expect(alertSpy).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/issues/show/mock_data/mock_data.js b/spec/frontend/issues/show/mock_data/mock_data.js
index ed969a08ac5..37aa18ced8d 100644
--- a/spec/frontend/issues/show/mock_data/mock_data.js
+++ b/spec/frontend/issues/show/mock_data/mock_data.js
@@ -1,8 +1,9 @@
import { TEST_HOST } from 'helpers/test_constants';
export const initialRequest = {
- title: '<p>this is a title</p>',
+ title: '<gl-emoji title="party-parrot"></gl-emoji>this is a title',
title_text: 'this is a title',
+ title_html: '<gl-emoji title="party-parrot"></gl-emoji>this is a title',
description: '<p>this is a description!</p>',
description_text: 'this is a description',
task_completion_status: { completed_count: 2, count: 4 },
diff --git a/spec/frontend/issues/show/store_spec.js b/spec/frontend/issues/show/store_spec.js
deleted file mode 100644
index 20d3a6cdaae..00000000000
--- a/spec/frontend/issues/show/store_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import Store from '~/issues/show/stores';
-import updateDescription from '~/issues/show/utils/update_description';
-
-jest.mock('~/issues/show/utils/update_description');
-
-describe('Store', () => {
- let store;
-
- beforeEach(() => {
- store = new Store({
- descriptionHtml: '<p>This is a description</p>',
- });
- });
-
- describe('updateState', () => {
- beforeEach(() => {
- document.body.innerHTML = `
- <div class="detail-page-description content-block">
- <details open>
- <summary>One</summary>
- </details>
- <details>
- <summary>Two</summary>
- </details>
- </div>
- `;
- });
-
- afterEach(() => {
- document.getElementsByTagName('html')[0].innerHTML = '';
- });
-
- it('calls updateDetailsState', () => {
- store.updateState({ description: '' });
-
- expect(updateDescription).toHaveBeenCalledTimes(1);
- });
- });
-});
diff --git a/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js
index cf2dacb50d8..95658f66d09 100644
--- a/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js
+++ b/spec/frontend/jira_connect/branches/components/source_branch_dropdown_spec.js
@@ -1,58 +1,45 @@
-import { GlCollapsibleListbox } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { GlCollapsibleListbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+
import SourceBranchDropdown from '~/jira_connect/branches/components/source_branch_dropdown.vue';
import { BRANCHES_PER_PAGE } from '~/jira_connect/branches/constants';
import getProjectQuery from '~/jira_connect/branches/graphql/queries/get_project.query.graphql';
-import { mockProjects } from '../mock_data';
-
-const mockProject = {
- id: 'test',
- repository: {
- branchNames: ['main', 'f-test', 'release'],
- rootRef: 'main',
- },
-};
-const mockSelectedProject = mockProjects[0];
-
-const mockProjectQueryResponse = {
- data: {
- project: mockProject,
- },
-};
-const mockGetProjectQuery = jest.fn().mockResolvedValue(mockProjectQueryResponse);
-const mockQueryLoading = jest.fn().mockReturnValue(new Promise(() => {}));
+import {
+ mockBranchNames,
+ mockBranchNames2,
+ mockProjects,
+ mockProjectQueryResponse,
+} from '../mock_data';
+
+Vue.use(VueApollo);
describe('SourceBranchDropdown', () => {
let wrapper;
+ const mockSelectedProject = mockProjects[0];
+ const querySuccessHandler = jest.fn().mockResolvedValue(mockProjectQueryResponse());
+ const queryLoadingHandler = jest.fn().mockReturnValue(new Promise(() => {}));
+
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
- const assertListboxItems = () => {
+ const assertListboxItems = (branchNames = mockBranchNames) => {
const listboxItems = findListbox().props('items');
- expect(listboxItems).toHaveLength(mockProject.repository.branchNames.length);
- expect(listboxItems.map((item) => item.text)).toEqual(mockProject.repository.branchNames);
+ expect(listboxItems).toHaveLength(branchNames.length);
+ expect(listboxItems.map((item) => item.text)).toEqual(branchNames);
};
- function createMockApolloProvider({ getProjectQueryLoading = false } = {}) {
- Vue.use(VueApollo);
-
- const mockApollo = createMockApollo([
- [getProjectQuery, getProjectQueryLoading ? mockQueryLoading : mockGetProjectQuery],
- ]);
+ const createComponent = ({ props, handler = querySuccessHandler } = {}) => {
+ const mockApollo = createMockApollo([[getProjectQuery, handler]]);
- return mockApollo;
- }
-
- function createComponent({ mockApollo, props, mountFn = shallowMount } = {}) {
- wrapper = mountFn(SourceBranchDropdown, {
- apolloProvider: mockApollo || createMockApolloProvider(),
+ wrapper = shallowMount(SourceBranchDropdown, {
+ apolloProvider: mockApollo,
propsData: props,
});
- }
+ };
describe('when `selectedProject` prop is not specified', () => {
beforeEach(() => {
@@ -78,6 +65,7 @@ describe('SourceBranchDropdown', () => {
loading: false,
searchable: true,
searching: false,
+ selected: null,
toggleText: 'Select a branch',
});
});
@@ -92,23 +80,26 @@ describe('SourceBranchDropdown', () => {
describe('when branches are loading', () => {
it('sets loading prop to true', () => {
createComponent({
- mockApollo: createMockApolloProvider({ getProjectQueryLoading: true }),
props: { selectedProject: mockSelectedProject },
+ handler: queryLoadingHandler,
});
- expect(findListbox().props('loading')).toEqual(true);
+ expect(findListbox().props('loading')).toBe(true);
});
});
describe('when branches have loaded', () => {
describe('when searching branches', () => {
it('triggers a refetch', async () => {
- createComponent({ mountFn: mount, props: { selectedProject: mockSelectedProject } });
+ createComponent({ props: { selectedProject: mockSelectedProject } });
await waitForPromises();
const mockSearchTerm = 'mai';
+ expect(querySuccessHandler).toHaveBeenCalledTimes(1);
+
await findListbox().vm.$emit('search', mockSearchTerm);
- expect(mockGetProjectQuery).toHaveBeenCalledWith({
+ expect(querySuccessHandler).toHaveBeenCalledTimes(2);
+ expect(querySuccessHandler).toHaveBeenLastCalledWith({
branchNamesLimit: BRANCHES_PER_PAGE,
branchNamesOffset: 0,
branchNamesSearchPattern: `*${mockSearchTerm}*`,
@@ -129,10 +120,15 @@ describe('SourceBranchDropdown', () => {
loading: false,
searchable: true,
searching: false,
+ selected: null,
toggleText: 'Select a branch',
});
});
+ it('disables infinite scroll', () => {
+ expect(findListbox().props('infiniteScroll')).toBe(false);
+ });
+
it('omits monospace styling from listbox', () => {
expect(findListbox().classes()).not.toContain('gl-font-monospace');
});
@@ -142,19 +138,19 @@ describe('SourceBranchDropdown', () => {
});
it("emits `change` event with the repository's `rootRef` by default", () => {
- expect(wrapper.emitted('change')[0]).toEqual([mockProject.repository.rootRef]);
+ expect(wrapper.emitted('change')[0]).toEqual([mockBranchNames[0]]);
});
describe('when selecting a listbox item', () => {
it('emits `change` event with the selected branch name', () => {
- const mockBranchName = mockProject.repository.branchNames[1];
+ const mockBranchName = mockBranchNames[1];
findListbox().vm.$emit('select', mockBranchName);
expect(wrapper.emitted('change')[1]).toEqual([mockBranchName]);
});
});
describe('when `selectedBranchName` prop is specified', () => {
- const mockBranchName = mockProject.repository.branchNames[2];
+ const mockBranchName = mockBranchNames[2];
beforeEach(() => {
wrapper.setProps({
@@ -162,6 +158,10 @@ describe('SourceBranchDropdown', () => {
});
});
+ it('sets listbox selected to `selectedBranchName`', () => {
+ expect(findListbox().props('selected')).toBe(mockBranchName);
+ });
+
it('sets listbox text to `selectedBranchName` value', () => {
expect(findListbox().props('toggleText')).toBe(mockBranchName);
});
@@ -170,6 +170,66 @@ describe('SourceBranchDropdown', () => {
expect(findListbox().classes()).toContain('gl-font-monospace');
});
});
+
+ describe('when full page of branches returns', () => {
+ const fullPageBranchNames = Array(BRANCHES_PER_PAGE)
+ .fill(1)
+ .map((_, i) => mockBranchNames[i % mockBranchNames.length]);
+
+ beforeEach(async () => {
+ createComponent({
+ props: { selectedProject: mockSelectedProject },
+ handler: () => Promise.resolve(mockProjectQueryResponse(fullPageBranchNames)),
+ });
+ await waitForPromises();
+ });
+
+ it('enables infinite scroll', () => {
+ expect(findListbox().props('infiniteScroll')).toBe(true);
+ });
+ });
+ });
+
+ describe('when loading more branches from infinite scroll', () => {
+ const queryLoadMoreHandler = jest.fn();
+
+ beforeEach(async () => {
+ queryLoadMoreHandler.mockResolvedValueOnce(mockProjectQueryResponse());
+ queryLoadMoreHandler.mockResolvedValueOnce(mockProjectQueryResponse(mockBranchNames2));
+ createComponent({
+ props: { selectedProject: mockSelectedProject },
+ handler: queryLoadMoreHandler,
+ });
+
+ await waitForPromises();
+
+ await findListbox().vm.$emit('bottom-reached');
+ });
+
+ it('sets loading more prop to true', () => {
+ expect(findListbox().props('infiniteScrollLoading')).toBe(true);
+ });
+
+ it('triggers load more query', () => {
+ expect(queryLoadMoreHandler).toHaveBeenLastCalledWith({
+ branchNamesLimit: BRANCHES_PER_PAGE,
+ branchNamesOffset: 3,
+ branchNamesSearchPattern: '*',
+ projectPath: 'test-path',
+ });
+ });
+
+ it('renders available source branches as listbox items', async () => {
+ await waitForPromises();
+
+ assertListboxItems([...mockBranchNames, ...mockBranchNames2]);
+ });
+
+ it('sets loading more prop to false once done', async () => {
+ await waitForPromises();
+
+ expect(findListbox().props('infiniteScrollLoading')).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/jira_connect/branches/mock_data.js b/spec/frontend/jira_connect/branches/mock_data.js
index 742ab5392c8..1720e0118c8 100644
--- a/spec/frontend/jira_connect/branches/mock_data.js
+++ b/spec/frontend/jira_connect/branches/mock_data.js
@@ -1,3 +1,6 @@
+export const mockBranchNames = ['main', 'f-test', 'release'];
+export const mockBranchNames2 = ['dev', 'dev-1', 'dev-2'];
+
export const mockProjects = [
{
id: 'test',
@@ -28,3 +31,15 @@ export const mockProjects = [
},
},
];
+
+export const mockProjectQueryResponse = (branchNames = mockBranchNames) => ({
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/27',
+ repository: {
+ branchNames,
+ rootRef: 'main',
+ },
+ },
+ },
+});
diff --git a/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap b/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap
index 21c903f064d..af9f827117f 100644
--- a/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap
+++ b/spec/frontend/jira_connect/subscriptions/components/__snapshots__/group_item_name_spec.js.snap
@@ -2,16 +2,15 @@
exports[`GroupItemName template matches the snapshot 1`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-icon-stub
class="gl-mr-3"
name="folder-o"
size="16"
/>
-
<div
- class="gl-display-none gl-flex-shrink-0 gl-sm-display-flex gl-mr-3"
+ class="gl-display-none gl-flex-shrink-0 gl-mr-3 gl-sm-display-flex"
>
<gl-avatar-stub
alt="avatar"
@@ -22,19 +21,15 @@ exports[`GroupItemName template matches the snapshot 1`] = `
src="avatar.png"
/>
</div>
-
<div>
<span
- class="gl-mr-3 gl-text-gray-900! gl-font-weight-bold"
+ class="gl-font-weight-bold gl-mr-3 gl-text-gray-900!"
>
-
Gitlab Org
-
</span>
-
<div>
<p
- class="gl-mt-2! gl-mb-0 gl-text-gray-600"
+ class="gl-mb-0 gl-mt-2! gl-text-gray-600"
>
Open source software to collaborate on code
</p>
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index abd849b387e..263deb3b616 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -4,23 +4,17 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<table
aria-busy=""
aria-colcount="3"
- class="table b-table gl-table b-table-fixed"
+ class="b-table b-table-fixed gl-table table"
role="table"
>
- <!---->
- <!---->
<thead
- class=""
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<th
aria-colindex="1"
- class=""
role="columnheader"
scope="col"
>
@@ -31,7 +25,6 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<th
aria-colindex="2"
aria-label="Arrow"
- class=""
role="columnheader"
scope="col"
>
@@ -39,7 +32,6 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
</th>
<th
aria-colindex="3"
- class=""
role="columnheader"
scope="col"
>
@@ -52,21 +44,17 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<tbody
role="rowgroup"
>
- <!---->
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
Jane Doe
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
<svg
@@ -82,33 +70,26 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
</td>
<td
aria-colindex="3"
- class=""
role="cell"
>
<div
aria-label="The GitLab user to which the Jira user Jane Doe will be mapped"
- class="dropdown b-dropdown gl-dropdown w-100 btn-group"
+ class="b-dropdown btn-group dropdown gl-dropdown w-100"
>
- <!---->
<button
aria-expanded="false"
aria-haspopup="menu"
- class="btn dropdown-toggle btn-default btn-md gl-button gl-dropdown-toggle"
+ class="btn btn-default btn-md dropdown-toggle gl-button gl-dropdown-toggle"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-dropdown-button-text"
>
janedoe
</span>
-
<svg
aria-hidden="true"
- class="gl-button-icon dropdown-chevron gl-icon s16"
+ class="dropdown-chevron gl-button-icon gl-icon s16"
data-testid="chevron-down-icon"
role="img"
>
@@ -125,21 +106,15 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<div
class="gl-dropdown-inner"
>
- <!---->
-
- <!---->
-
<div
class="gl-dropdown-contents"
>
- <!---->
-
<div
class="gl-search-box-by-type"
>
<svg
aria-hidden="true"
- class="gl-search-box-by-type-search-icon gl-icon s16"
+ class="gl-icon gl-search-box-by-type-search-icon s16"
data-testid="search-icon"
role="img"
>
@@ -147,17 +122,13 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
href="file-mock#search"
/>
</svg>
-
<input
aria-label="Search"
- class="gl-form-input form-control gl-search-box-by-type-input"
+ class="form-control gl-form-input gl-search-box-by-type-input"
placeholder="Search"
type="search"
/>
-
- <!---->
</div>
-
<li
class="gl-dropdown-text text-secondary"
role="presentation"
@@ -165,33 +136,26 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<p
class="b-dropdown-text"
>
-
- No matches found
-
+ No matches found
</p>
</li>
</div>
-
- <!---->
</div>
</ul>
</div>
</td>
</tr>
<tr
- class=""
role="row"
>
<td
aria-colindex="1"
- class=""
role="cell"
>
Fred Chopin
</td>
<td
aria-colindex="2"
- class=""
role="cell"
>
<svg
@@ -207,33 +171,26 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
</td>
<td
aria-colindex="3"
- class=""
role="cell"
>
<div
aria-label="The GitLab user to which the Jira user Fred Chopin will be mapped"
- class="dropdown b-dropdown gl-dropdown w-100 btn-group"
+ class="b-dropdown btn-group dropdown gl-dropdown w-100"
>
- <!---->
<button
aria-expanded="false"
aria-haspopup="menu"
- class="btn dropdown-toggle btn-default btn-md gl-button gl-dropdown-toggle"
+ class="btn btn-default btn-md dropdown-toggle gl-button gl-dropdown-toggle"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-dropdown-button-text"
>
mrgitlab
</span>
-
<svg
aria-hidden="true"
- class="gl-button-icon dropdown-chevron gl-icon s16"
+ class="dropdown-chevron gl-button-icon gl-icon s16"
data-testid="chevron-down-icon"
role="img"
>
@@ -250,21 +207,15 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<div
class="gl-dropdown-inner"
>
- <!---->
-
- <!---->
-
<div
class="gl-dropdown-contents"
>
- <!---->
-
<div
class="gl-search-box-by-type"
>
<svg
aria-hidden="true"
- class="gl-search-box-by-type-search-icon gl-icon s16"
+ class="gl-icon gl-search-box-by-type-search-icon s16"
data-testid="search-icon"
role="img"
>
@@ -272,17 +223,13 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
href="file-mock#search"
/>
</svg>
-
<input
aria-label="Search"
- class="gl-form-input form-control gl-search-box-by-type-input"
+ class="form-control gl-form-input gl-search-box-by-type-input"
placeholder="Search"
type="search"
/>
-
- <!---->
</div>
-
<li
class="gl-dropdown-text text-secondary"
role="presentation"
@@ -290,22 +237,15 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<p
class="b-dropdown-text"
>
-
- No matches found
-
+ No matches found
</p>
</li>
</div>
-
- <!---->
</div>
</ul>
</div>
</td>
</tr>
- <!---->
- <!---->
</tbody>
- <!---->
</table>
`;
diff --git a/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js b/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js
deleted file mode 100644
index 5ecddc7efd6..00000000000
--- a/spec/frontend/jobs/components/filtered_search/jobs_filtered_search_spec.js
+++ /dev/null
@@ -1,71 +0,0 @@
-import { GlFilteredSearch } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import {
- OPERATORS_IS,
- TOKEN_TITLE_STATUS,
- TOKEN_TYPE_STATUS,
-} from '~/vue_shared/components/filtered_search_bar/constants';
-import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
-import { mockFailedSearchToken } from '../../mock_data';
-
-describe('Jobs filtered search', () => {
- let wrapper;
-
- const findFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
- const getSearchToken = (type) =>
- findFilteredSearch()
- .props('availableTokens')
- .find((token) => token.type === type);
-
- const findStatusToken = () => getSearchToken('status');
-
- const createComponent = (props) => {
- wrapper = shallowMount(JobsFilteredSearch, {
- propsData: {
- ...props,
- },
- });
- };
-
- it('displays filtered search', () => {
- createComponent();
-
- expect(findFilteredSearch().exists()).toBe(true);
- });
-
- it('displays status token', () => {
- createComponent();
-
- expect(findStatusToken()).toMatchObject({
- type: TOKEN_TYPE_STATUS,
- icon: 'status',
- title: TOKEN_TITLE_STATUS,
- unique: true,
- operators: OPERATORS_IS,
- });
- });
-
- it('emits filter token to parent component', () => {
- createComponent();
-
- findFilteredSearch().vm.$emit('submit', mockFailedSearchToken);
-
- expect(wrapper.emitted('filterJobsBySearch')).toEqual([[mockFailedSearchToken]]);
- });
-
- it('filtered search value is empty array when no query string is passed', () => {
- createComponent();
-
- expect(findFilteredSearch().props('value')).toEqual([]);
- });
-
- it('filtered search returns correct data shape when passed query string', () => {
- const value = 'SUCCESS';
-
- createComponent({ queryString: { statuses: value } });
-
- expect(findFilteredSearch().props('value')).toEqual([
- { type: TOKEN_TYPE_STATUS, value: { data: value, operator: '=' } },
- ]);
- });
-});
diff --git a/spec/frontend/jobs/components/filtered_search/utils_spec.js b/spec/frontend/jobs/components/filtered_search/utils_spec.js
deleted file mode 100644
index 8440ab42b86..00000000000
--- a/spec/frontend/jobs/components/filtered_search/utils_spec.js
+++ /dev/null
@@ -1,19 +0,0 @@
-import { validateQueryString } from '~/jobs/components/filtered_search/utils';
-
-describe('Filtered search utils', () => {
- describe('validateQueryString', () => {
- it.each`
- queryStringObject | expected
- ${{ statuses: 'SUCCESS' }} | ${{ statuses: 'SUCCESS' }}
- ${{ statuses: 'failed' }} | ${{ statuses: 'FAILED' }}
- ${{ wrong: 'SUCCESS' }} | ${null}
- ${{ statuses: 'wrong' }} | ${null}
- ${{ wrong: 'wrong' }} | ${null}
- `(
- 'when provided $queryStringObject, the expected result is $expected',
- ({ queryStringObject, expected }) => {
- expect(validateQueryString(queryStringObject)).toEqual(expected);
- },
- );
- });
-});
diff --git a/spec/frontend/lib/utils/array_utility_spec.js b/spec/frontend/lib/utils/array_utility_spec.js
index 64ddd400114..94461c72106 100644
--- a/spec/frontend/lib/utils/array_utility_spec.js
+++ b/spec/frontend/lib/utils/array_utility_spec.js
@@ -42,4 +42,40 @@ describe('array_utility', () => {
expect(arrayUtils.getDuplicateItemsFromArray(array)).toEqual(result);
});
});
+
+ describe('toggleArrayItem', () => {
+ it('adds an item to the array if it does not exist', () => {
+ expect(arrayUtils.toggleArrayItem([], 'item')).toStrictEqual(['item']);
+ });
+
+ it('removes an item from the array if it already exists', () => {
+ expect(arrayUtils.toggleArrayItem(['item'], 'item')).toStrictEqual([]);
+ });
+
+ describe('pass by value', () => {
+ it('does not toggle the array item when passed a new object', () => {
+ expect(arrayUtils.toggleArrayItem([{ a: 1 }], { a: 1 })).toStrictEqual([
+ { a: 1 },
+ { a: 1 },
+ ]);
+ });
+
+ it('does not toggle the array item when passed a new array', () => {
+ expect(arrayUtils.toggleArrayItem([[1]], [1])).toStrictEqual([[1], [1]]);
+ });
+ });
+
+ describe('pass by reference', () => {
+ const array = [1];
+ const object = { a: 1 };
+
+ it('toggles the array item when passed a object reference', () => {
+ expect(arrayUtils.toggleArrayItem([object], object)).toStrictEqual([]);
+ });
+
+ it('toggles the array item when passed an array reference', () => {
+ expect(arrayUtils.toggleArrayItem([array], array)).toStrictEqual([]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/breadcrumbs_spec.js b/spec/frontend/lib/utils/breadcrumbs_spec.js
new file mode 100644
index 00000000000..3c29e3723d3
--- /dev/null
+++ b/spec/frontend/lib/utils/breadcrumbs_spec.js
@@ -0,0 +1,84 @@
+import { createWrapper } from '@vue/test-utils';
+import Vue from 'vue';
+import { injectVueAppBreadcrumbs } from '~/lib/utils/breadcrumbs';
+import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+describe('Breadcrumbs utils', () => {
+ const breadcrumbsHTML = `
+ <nav>
+ <ul class="js-breadcrumbs-list">
+ <li>
+ <a href="/group-name" data-testid="existing-crumb">Group name</a>
+ </li>
+ <li>
+ <a href="/group-name/project-name/-/subpage" data-testid="last-crumb">Subpage</a>
+ </li>
+ </ul>
+ </nav>
+ `;
+
+ const emptyBreadcrumbsHTML = `
+ <nav>
+ <ul class="js-breadcrumbs-list" data-testid="breadcumbs-list">
+ </ul>
+ </nav>
+ `;
+
+ const mockRouter = jest.fn();
+ let MockComponent;
+ let mockApolloProvider;
+
+ beforeEach(() => {
+ MockComponent = Vue.component('MockComponent', {
+ render: (createElement) =>
+ createElement('span', {
+ attrs: {
+ 'data-testid': 'mock-component',
+ },
+ }),
+ });
+ mockApolloProvider = createMockApollo();
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ MockComponent = null;
+ });
+
+ describe('injectVueAppBreadcrumbs', () => {
+ describe('without any breadcrumbs', () => {
+ beforeEach(() => {
+ setHTMLFixture(emptyBreadcrumbsHTML);
+ });
+
+ it('returns early and stops trying to inject', () => {
+ expect(injectVueAppBreadcrumbs(mockRouter, MockComponent)).toBe(false);
+ });
+ });
+
+ describe('with breadcrumbs', () => {
+ beforeEach(() => {
+ setHTMLFixture(breadcrumbsHTML);
+ });
+
+ describe.each`
+ testLabel | apolloProvider
+ ${'set'} | ${mockApolloProvider}
+ ${'not set'} | ${null}
+ `('given the apollo provider is $testLabel', ({ apolloProvider }) => {
+ beforeEach(() => {
+ createWrapper(injectVueAppBreadcrumbs(mockRouter, MockComponent, apolloProvider));
+ });
+
+ it('returns a new breadcrumbs component replacing the inject HTML', () => {
+ // Using `querySelectorAll` because we're not testing a full Vue app.
+ // We are testing a partial Vue app added into the pages HTML.
+ expect(document.querySelectorAll('[data-testid="existing-crumb"]')).toHaveLength(1);
+ expect(document.querySelectorAll('[data-testid="last-crumb"]')).toHaveLength(0);
+ expect(document.querySelectorAll('[data-testid="mock-component"]')).toHaveLength(1);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 444d4a96f9c..8697249ebf5 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1174,4 +1174,43 @@ describe('common_utils', () => {
});
});
});
+
+ describe('cloneWithoutReferences', () => {
+ it('clones the provided object', () => {
+ const obj = {
+ foo: 'bar',
+ cool: 1337,
+ nested: {
+ peanut: 'butter',
+ },
+ arrays: [0, 1, 2],
+ };
+
+ const cloned = commonUtils.cloneWithoutReferences(obj);
+
+ expect(cloned).toMatchObject({
+ foo: 'bar',
+ cool: 1337,
+ nested: {
+ peanut: 'butter',
+ },
+ arrays: [0, 1, 2],
+ });
+ });
+
+ it('does not persist object references after cloning', () => {
+ const ref = {
+ foo: 'bar',
+ };
+
+ const obj = {
+ ref,
+ };
+
+ const cloned = commonUtils.cloneWithoutReferences(obj);
+
+ expect(cloned.ref).toMatchObject({ foo: 'bar' });
+ expect(cloned.ref === ref).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/datetime_range_spec.js b/spec/frontend/lib/utils/datetime_range_spec.js
deleted file mode 100644
index 996a8e2e47b..00000000000
--- a/spec/frontend/lib/utils/datetime_range_spec.js
+++ /dev/null
@@ -1,382 +0,0 @@
-import _ from 'lodash';
-import {
- getRangeType,
- convertToFixedRange,
- isEqualTimeRanges,
- findTimeRange,
- timeRangeToParams,
- timeRangeFromParams,
-} from '~/lib/utils/datetime_range';
-
-const MOCK_NOW = Date.UTC(2020, 0, 23, 20);
-
-const MOCK_NOW_ISO_STRING = new Date(MOCK_NOW).toISOString();
-
-const mockFixedRange = {
- label: 'January 2020',
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-31T23:59:00.000Z',
-};
-
-const mockAnchoredRange = {
- label: 'First two minutes of 2020',
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
-};
-
-const mockRollingRange = {
- label: 'Next 2 minutes',
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
-};
-
-const mockOpenRange = {
- label: '2020 so far',
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'after',
-};
-
-describe('Date time range utils', () => {
- describe('getRangeType', () => {
- it('infers correctly the range type from the input object', () => {
- const rangeTypes = {
- fixed: [{ start: MOCK_NOW_ISO_STRING, end: MOCK_NOW_ISO_STRING }],
- anchored: [{ anchor: MOCK_NOW_ISO_STRING, duration: { seconds: 0 } }],
- rolling: [{ duration: { seconds: 0 } }],
- open: [{ anchor: MOCK_NOW_ISO_STRING }],
- invalid: [
- {},
- { start: MOCK_NOW_ISO_STRING },
- { end: MOCK_NOW_ISO_STRING },
- { start: 'NOT_A_DATE', end: 'NOT_A_DATE' },
- { duration: { seconds: 'NOT_A_NUMBER' } },
- { duration: { seconds: Infinity } },
- { duration: { minutes: 20 } },
- { anchor: MOCK_NOW_ISO_STRING, duration: { seconds: 'NOT_A_NUMBER' } },
- { anchor: MOCK_NOW_ISO_STRING, duration: { seconds: Infinity } },
- { junk: 'exists' },
- ],
- };
-
- Object.entries(rangeTypes).forEach(([type, examples]) => {
- examples.forEach((example) => expect(getRangeType(example)).toEqual(type));
- });
- });
- });
-
- describe('convertToFixedRange', () => {
- beforeEach(() => {
- jest.spyOn(Date, 'now').mockImplementation(() => MOCK_NOW);
- });
-
- afterEach(() => {
- Date.now.mockRestore();
- });
-
- describe('When a fixed range is input', () => {
- it('converts a fixed range to an equal fixed range', () => {
- expect(convertToFixedRange(mockFixedRange)).toEqual({
- start: mockFixedRange.start,
- end: mockFixedRange.end,
- });
- });
-
- it('throws an error when fixed range does not contain an end time', () => {
- const aFixedRangeMissingEnd = _.omit(mockFixedRange, 'end');
-
- expect(() => convertToFixedRange(aFixedRangeMissingEnd)).toThrow();
- });
-
- it('throws an error when fixed range does not contain a start time', () => {
- const aFixedRangeMissingStart = _.omit(mockFixedRange, 'start');
-
- expect(() => convertToFixedRange(aFixedRangeMissingStart)).toThrow();
- });
-
- it('throws an error when the dates cannot be parsed', () => {
- const wrongStart = { ...mockFixedRange, start: 'I_CANNOT_BE_PARSED' };
- const wrongEnd = { ...mockFixedRange, end: 'I_CANNOT_BE_PARSED' };
-
- expect(() => convertToFixedRange(wrongStart)).toThrow();
- expect(() => convertToFixedRange(wrongEnd)).toThrow();
- });
- });
-
- describe('When an anchored range is input', () => {
- it('converts to a fixed range', () => {
- expect(convertToFixedRange(mockAnchoredRange)).toEqual({
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-01T00:02:00.000Z',
- });
- });
-
- it('converts to a fixed range with a `before` direction', () => {
- expect(convertToFixedRange({ ...mockAnchoredRange, direction: 'before' })).toEqual({
- start: '2019-12-31T23:58:00.000Z',
- end: '2020-01-01T00:00:00.000Z',
- });
- });
-
- it('converts to a fixed range without an explicit direction, defaulting to `before`', () => {
- const defaultDirectionRange = _.omit(mockAnchoredRange, 'direction');
-
- expect(convertToFixedRange(defaultDirectionRange)).toEqual({
- start: '2019-12-31T23:58:00.000Z',
- end: '2020-01-01T00:00:00.000Z',
- });
- });
-
- it('throws an error when the anchor cannot be parsed', () => {
- const wrongAnchor = { ...mockAnchoredRange, anchor: 'I_CANNOT_BE_PARSED' };
-
- expect(() => convertToFixedRange(wrongAnchor)).toThrow();
- });
- });
-
- describe('when a rolling range is input', () => {
- it('converts to a fixed range', () => {
- expect(convertToFixedRange(mockRollingRange)).toEqual({
- start: '2020-01-23T20:00:00.000Z',
- end: '2020-01-23T20:02:00.000Z',
- });
- });
-
- it('converts to a fixed range with an implicit `before` direction', () => {
- const noDirection = _.omit(mockRollingRange, 'direction');
-
- expect(convertToFixedRange(noDirection)).toEqual({
- start: '2020-01-23T19:58:00.000Z',
- end: '2020-01-23T20:00:00.000Z',
- });
- });
-
- it('throws an error when the duration is not in the right format', () => {
- const wrongDuration = { ...mockRollingRange, duration: { minutes: 20 } };
-
- expect(() => convertToFixedRange(wrongDuration)).toThrow();
- });
-
- it('throws an error when the anchor is not valid', () => {
- const wrongAnchor = { ...mockRollingRange, anchor: 'CAN_T_PARSE_THIS' };
-
- expect(() => convertToFixedRange(wrongAnchor)).toThrow();
- });
- });
-
- describe('when an open range is input', () => {
- it('converts to a fixed range with an `after` direction', () => {
- expect(convertToFixedRange(mockOpenRange)).toEqual({
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-23T20:00:00.000Z',
- });
- });
-
- it('converts to a fixed range with the explicit `before` direction', () => {
- const beforeOpenRange = { ...mockOpenRange, direction: 'before' };
-
- expect(convertToFixedRange(beforeOpenRange)).toEqual({
- start: '1970-01-01T00:00:00.000Z',
- end: '2020-01-01T00:00:00.000Z',
- });
- });
-
- it('converts to a fixed range with the implicit `before` direction', () => {
- const noDirectionOpenRange = _.omit(mockOpenRange, 'direction');
-
- expect(convertToFixedRange(noDirectionOpenRange)).toEqual({
- start: '1970-01-01T00:00:00.000Z',
- end: '2020-01-01T00:00:00.000Z',
- });
- });
-
- it('throws an error when the anchor cannot be parsed', () => {
- const wrongAnchor = { ...mockOpenRange, anchor: 'CAN_T_PARSE_THIS' };
-
- expect(() => convertToFixedRange(wrongAnchor)).toThrow();
- });
- });
- });
-
- describe('isEqualTimeRanges', () => {
- it('equal only compares relevant properies', () => {
- expect(
- isEqualTimeRanges(
- {
- ...mockFixedRange,
- label: 'A label',
- default: true,
- },
- {
- ...mockFixedRange,
- label: 'Another label',
- default: false,
- anotherKey: 'anotherValue',
- },
- ),
- ).toBe(true);
-
- expect(
- isEqualTimeRanges(
- {
- ...mockAnchoredRange,
- label: 'A label',
- default: true,
- },
- {
- ...mockAnchoredRange,
- anotherKey: 'anotherValue',
- },
- ),
- ).toBe(true);
- });
- });
-
- describe('findTimeRange', () => {
- const timeRanges = [
- {
- label: 'Before 2020',
- anchor: '2020-01-01T00:00:00.000Z',
- },
- {
- label: 'Last 30 minutes',
- duration: { seconds: 60 * 30 },
- },
- {
- label: 'In 2019',
- start: '2019-01-01T00:00:00.000Z',
- end: '2019-12-31T12:59:59.999Z',
- },
- {
- label: 'Next 2 minutes',
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
- },
- ];
-
- it('finds a time range', () => {
- const tr0 = {
- anchor: '2020-01-01T00:00:00.000Z',
- };
- expect(findTimeRange(tr0, timeRanges)).toBe(timeRanges[0]);
-
- const tr1 = {
- duration: { seconds: 60 * 30 },
- };
- expect(findTimeRange(tr1, timeRanges)).toBe(timeRanges[1]);
-
- const tr1Direction = {
- direction: 'before',
- duration: {
- seconds: 60 * 30,
- },
- };
- expect(findTimeRange(tr1Direction, timeRanges)).toBe(timeRanges[1]);
-
- const tr2 = {
- someOtherLabel: 'Added arbitrarily',
- start: '2019-01-01T00:00:00.000Z',
- end: '2019-12-31T12:59:59.999Z',
- };
- expect(findTimeRange(tr2, timeRanges)).toBe(timeRanges[2]);
-
- const tr3 = {
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
- };
- expect(findTimeRange(tr3, timeRanges)).toBe(timeRanges[3]);
- });
-
- it('doesnot finds a missing time range', () => {
- const nonExistant = {
- direction: 'before',
- duration: {
- seconds: 200,
- },
- };
- expect(findTimeRange(nonExistant, timeRanges)).toBeUndefined();
- });
- });
-
- describe('conversion to/from params', () => {
- const mockFixedParams = {
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-31T23:59:00.000Z',
- };
-
- const mockAnchoredParams = {
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'after',
- duration_seconds: '120',
- };
-
- const mockRollingParams = {
- direction: 'after',
- duration_seconds: '120',
- };
-
- describe('timeRangeToParams', () => {
- it('converts fixed ranges to params', () => {
- expect(timeRangeToParams(mockFixedRange)).toEqual(mockFixedParams);
- });
-
- it('converts anchored ranges to params', () => {
- expect(timeRangeToParams(mockAnchoredRange)).toEqual(mockAnchoredParams);
- });
-
- it('converts rolling ranges to params', () => {
- expect(timeRangeToParams(mockRollingRange)).toEqual(mockRollingParams);
- });
- });
-
- describe('timeRangeFromParams', () => {
- it('converts fixed ranges from params', () => {
- const params = { ...mockFixedParams, other_param: 'other_value' };
- const expectedRange = _.omit(mockFixedRange, 'label');
-
- expect(timeRangeFromParams(params)).toEqual(expectedRange);
- });
-
- it('converts anchored ranges to params', () => {
- const expectedRange = _.omit(mockRollingRange, 'label');
-
- expect(timeRangeFromParams(mockRollingParams)).toEqual(expectedRange);
- });
-
- it('converts rolling ranges from params', () => {
- const params = { ...mockRollingParams, other_param: 'other_value' };
- const expectedRange = _.omit(mockRollingRange, 'label');
-
- expect(timeRangeFromParams(params)).toEqual(expectedRange);
- });
-
- it('converts rolling ranges from params with a default direction', () => {
- const params = {
- ...mockRollingParams,
- direction: 'before',
- other_param: 'other_value',
- };
- const expectedRange = _.omit(mockRollingRange, 'label', 'direction');
-
- expect(timeRangeFromParams(params)).toEqual(expectedRange);
- });
-
- it('converts to null when for no relevant params', () => {
- const range = {
- useless_param_1: 'value1',
- useless_param_2: 'value2',
- };
-
- expect(timeRangeFromParams(range)).toBe(null);
- });
- });
- });
-});
diff --git a/spec/frontend/lib/utils/secret_detection_spec.js b/spec/frontend/lib/utils/secret_detection_spec.js
index 3213ecf3fe1..761062f0340 100644
--- a/spec/frontend/lib/utils/secret_detection_spec.js
+++ b/spec/frontend/lib/utils/secret_detection_spec.js
@@ -28,6 +28,7 @@ describe('containsSensitiveToken', () => {
'token: feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'token: feed_token=glft-ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'token: feed_token=glft-a8cc74ccb0de004d09a968705ba49099229b288b3de43f26c473a9d8d7fb7693-1234',
+ 'token: gloas-a8cc74ccb0de004d09a968705ba49099229b288b3de43f26c473a9d8d7fb7693',
'https://example.com/feed?feed_token=123456789_abcdefghij',
'glpat-1234567890 and feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
];
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index b7d6bbd3991..6821ed56857 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -221,23 +221,6 @@ describe('text_utility', () => {
});
});
- describe('getFirstCharacterCapitalized', () => {
- it('returns the first character capitalized, if first character is alphabetic', () => {
- expect(textUtils.getFirstCharacterCapitalized('loremIpsumDolar')).toEqual('L');
- expect(textUtils.getFirstCharacterCapitalized('Sit amit !')).toEqual('S');
- });
-
- it('returns the first character, if first character is non-alphabetic', () => {
- expect(textUtils.getFirstCharacterCapitalized(' lorem')).toEqual(' ');
- expect(textUtils.getFirstCharacterCapitalized('%#!')).toEqual('%');
- });
-
- it('returns an empty string, if string is falsey', () => {
- expect(textUtils.getFirstCharacterCapitalized('')).toEqual('');
- expect(textUtils.getFirstCharacterCapitalized(null)).toEqual('');
- });
- });
-
describe('slugifyWithUnderscore', () => {
it('should replaces whitespaces with underscore and convert to lower case', () => {
expect(textUtils.slugifyWithUnderscore('My Input String')).toEqual('my_input_string');
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 450eeefd898..ecd2d7f888d 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1,11 +1,8 @@
-import * as Sentry from '@sentry/browser';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as urlUtils from '~/lib/utils/url_utility';
import { safeUrls, unsafeUrls } from './mock_data';
-jest.mock('@sentry/browser');
-
const shas = {
valid: [
'ad9be38573f9ee4c4daec22673478c2dd1d81cd8',
@@ -434,11 +431,10 @@ describe('URL utility', () => {
it('does not navigate to unsafe urls', () => {
// eslint-disable-next-line no-script-url
const url = 'javascript:alert(document.domain)';
- urlUtils.visitUrl(url);
- expect(Sentry.captureException).toHaveBeenCalledWith(
- new RangeError(`Only http and https protocols are allowed: ${url}`),
- );
+ expect(() => {
+ urlUtils.visitUrl(url);
+ }).toThrow(new RangeError(`Only http and https protocols are allowed: ${url}`));
});
it('navigates to a page', () => {
diff --git a/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap b/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap
index a0d9bae8a0b..3ad02d3851d 100644
--- a/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap
+++ b/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap
@@ -2,21 +2,14 @@
exports[`MemberActivity with a member that does not have all of the fields renders \`User created\` field 1`] = `
<div>
- <!---->
-
<div>
<strong>
Access granted:
</strong>
-
<span>
-
- Aug 06, 2020
-
+ Aug 06, 2020
</span>
</div>
-
- <!---->
</div>
`;
@@ -26,35 +19,24 @@ exports[`MemberActivity with a member that has all fields renders \`User created
<strong>
User created:
</strong>
-
<span>
-
- Mar 10, 2022
-
+ Mar 10, 2022
</span>
</div>
-
<div>
<strong>
Access granted:
</strong>
-
<span>
-
- Jul 17, 2020
-
+ Jul 17, 2020
</span>
</div>
-
<div>
<strong>
Last activity:
</strong>
-
<span>
-
- Mar 15, 2022
-
+ Mar 15, 2022
</span>
</div>
</div>
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index 3b8c9dd3bf3..6c4ea7063ad 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -281,6 +281,14 @@ describe('MergeRequestTabs', () => {
testContext.class.expandViewContainer();
expect($('.content-wrapper .container-limited')).toHaveLength(0);
});
+
+ it('adds the diff-specific width-limiter', () => {
+ testContext.class.expandViewContainer();
+
+ expect(testContext.class.contentWrapper.classList.contains('diffs-container-limited')).toBe(
+ true,
+ );
+ });
});
describe('resetViewContainer', () => {
@@ -302,6 +310,14 @@ describe('MergeRequestTabs', () => {
expect($('.content-wrapper .container-limited')).toHaveLength(1);
});
+
+ it('removes the diff-specific width-limiter', () => {
+ testContext.class.resetViewContainer();
+
+ expect(testContext.class.contentWrapper.classList.contains('diffs-container-limited')).toBe(
+ false,
+ );
+ });
});
describe('tabShown', () => {
diff --git a/spec/frontend/merge_requests/components/compare_app_spec.js b/spec/frontend/merge_requests/components/compare_app_spec.js
index ba129363ffd..887f79f9fad 100644
--- a/spec/frontend/merge_requests/components/compare_app_spec.js
+++ b/spec/frontend/merge_requests/components/compare_app_spec.js
@@ -1,10 +1,14 @@
-import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import axios from '~/lib/utils/axios_utils';
import CompareApp from '~/merge_requests/components/compare_app.vue';
let wrapper;
+let mock;
function factory(provideData = {}) {
- wrapper = shallowMount(CompareApp, {
+ wrapper = shallowMountExtended(CompareApp, {
provide: {
inputs: {
project: {
@@ -16,6 +20,7 @@ function factory(provideData = {}) {
name: 'branch',
},
},
+ branchCommitPath: '/commit',
toggleClass: {
project: 'project',
branch: 'branch',
@@ -29,7 +34,18 @@ function factory(provideData = {}) {
});
}
+const findCommitBox = () => wrapper.findByTestId('commit-box');
+
describe('Merge requests compare app component', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet('/commit').reply(200, 'commit content');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
it('shows commit box when selected branch is empty', () => {
factory({
currentBranch: {
@@ -38,9 +54,41 @@ describe('Merge requests compare app component', () => {
},
});
- const commitBox = wrapper.find('[data-testid="commit-box"]');
+ const commitBox = findCommitBox();
expect(commitBox.exists()).toBe(true);
expect(commitBox.text()).toBe('Select a branch to compare');
});
+
+ it('emits select-branch on selected event', () => {
+ factory({
+ currentBranch: {
+ text: '',
+ value: '',
+ },
+ });
+
+ wrapper.findByTestId('compare-dropdown').vm.$emit('selected', { value: 'main' });
+
+ expect(wrapper.emitted('select-branch')).toEqual([['main']]);
+ });
+
+ describe('currentBranch watcher', () => {
+ it('changes selected value', async () => {
+ factory({
+ currentBranch: {
+ text: '',
+ value: '',
+ },
+ });
+
+ expect(findCommitBox().text()).toBe('Select a branch to compare');
+
+ wrapper.setProps({ currentBranch: { text: 'main', value: 'main ' } });
+
+ await waitForPromises();
+
+ expect(findCommitBox().text()).toBe('commit content');
+ });
+ });
});
diff --git a/spec/frontend/merge_requests/components/header_metadata_spec.js b/spec/frontend/merge_requests/components/header_metadata_spec.js
new file mode 100644
index 00000000000..2823b4b9d97
--- /dev/null
+++ b/spec/frontend/merge_requests/components/header_metadata_spec.js
@@ -0,0 +1,93 @@
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import HeaderMetadata from '~/merge_requests/components/header_metadata.vue';
+import mrStore from '~/mr_notes/stores';
+import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
+
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
+
+describe('HeaderMetadata component', () => {
+ let wrapper;
+
+ const findConfidentialIcon = () => wrapper.findComponent(ConfidentialityBadge);
+ const findLockedIcon = () => wrapper.findByTestId('locked');
+ const findHiddenIcon = () => wrapper.findByTestId('hidden');
+
+ const renderTestMessage = (renders) => (renders ? 'renders' : 'does not render');
+
+ const createComponent = ({ store, provide }) => {
+ wrapper = shallowMountExtended(HeaderMetadata, {
+ mocks: {
+ $store: store,
+ },
+ provide,
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ describe.each`
+ lockStatus | confidentialStatus | hiddenStatus
+ ${true} | ${true} | ${false}
+ ${true} | ${false} | ${false}
+ ${false} | ${true} | ${false}
+ ${false} | ${false} | ${false}
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${true}
+ `(
+ `when locked=$lockStatus, confidential=$confidentialStatus, and hidden=$hiddenStatus`,
+ ({ lockStatus, confidentialStatus, hiddenStatus }) => {
+ const store = mrStore;
+
+ beforeEach(() => {
+ store.getters.getNoteableData = {};
+ store.getters.getNoteableData.confidential = confidentialStatus;
+ store.getters.getNoteableData.discussion_locked = lockStatus;
+ store.getters.getNoteableData.targetType = 'merge_request';
+
+ createComponent({ store, provide: { hidden: hiddenStatus } });
+ });
+
+ it(`${renderTestMessage(lockStatus)} the locked icon`, () => {
+ const lockedIcon = findLockedIcon();
+
+ expect(lockedIcon.exists()).toBe(lockStatus);
+
+ if (lockStatus) {
+ expect(lockedIcon.attributes('title')).toBe(
+ `This merge request is locked. Only project members can comment.`,
+ );
+ expect(getBinding(lockedIcon.element, 'gl-tooltip')).not.toBeUndefined();
+ }
+ });
+
+ it(`${renderTestMessage(confidentialStatus)} the confidential icon`, () => {
+ const confidentialIcon = findConfidentialIcon();
+ expect(confidentialIcon.exists()).toBe(confidentialStatus);
+
+ if (confidentialStatus && !hiddenStatus) {
+ expect(confidentialIcon.props()).toMatchObject({
+ workspaceType: 'project',
+ issuableType: 'issue',
+ });
+ }
+ });
+
+ it(`${renderTestMessage(confidentialStatus)} the hidden icon`, () => {
+ const hiddenIcon = findHiddenIcon();
+
+ expect(hiddenIcon.exists()).toBe(hiddenStatus);
+
+ if (hiddenStatus) {
+ expect(hiddenIcon.attributes('title')).toBe(
+ `This merge request is hidden because its author has been banned`,
+ );
+ expect(getBinding(hiddenIcon.element, 'gl-tooltip')).not.toBeUndefined();
+ }
+ });
+ },
+ );
+});
diff --git a/spec/frontend/nav/components/top_nav_new_dropdown_spec.js b/spec/frontend/nav/components/top_nav_new_dropdown_spec.js
index 2cd65307b0b..432ee5e9ecd 100644
--- a/spec/frontend/nav/components/top_nav_new_dropdown_spec.js
+++ b/spec/frontend/nav/components/top_nav_new_dropdown_spec.js
@@ -57,7 +57,8 @@ describe('~/nav/components/top_nav_menu_sections.vue', () => {
if (type === 'divider') {
return { type };
- } else if (type === 'header') {
+ }
+ if (type === 'header') {
return { type, text: child.text() };
}
diff --git a/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
index a4611149432..277ff2aa441 100644
--- a/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
+++ b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
@@ -1,17 +1,37 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`note_app when sort direction is asc shows skeleton notes after the loaded discussions 1`] = `
-"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\">
- <noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\" shouldscrolltonote=\\"true\\"></noteable-discussion-stub>
- <skeleton-loading-container-stub class=\\"note-skeleton\\"></skeleton-loading-container-stub>
- <!---->
-</ul>"
+<ul
+ class="main-notes-list notes timeline"
+ id="reference-0"
+>
+ <noteable-discussion-stub
+ discussion="[object Object]"
+ helppagepath=""
+ isoverviewtab="true"
+ renderdifffile="true"
+ shouldscrolltonote="true"
+ />
+ <skeleton-loading-container-stub
+ class="note-skeleton"
+ />
+</ul>
`;
exports[`note_app when sort direction is desc shows skeleton notes before the loaded discussions 1`] = `
-"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\">
- <skeleton-loading-container-stub class=\\"note-skeleton\\"></skeleton-loading-container-stub>
- <noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\" shouldscrolltonote=\\"true\\"></noteable-discussion-stub>
- <!---->
-</ul>"
+<ul
+ class="main-notes-list notes timeline"
+ id="reference-0"
+>
+ <skeleton-loading-container-stub
+ class="note-skeleton"
+ />
+ <noteable-discussion-stub
+ discussion="[object Object]"
+ helppagepath=""
+ isoverviewtab="true"
+ renderdifffile="true"
+ shouldscrolltonote="true"
+ />
+</ul>
`;
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 0728646246d..9b1678c0a8a 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -153,21 +153,18 @@ describe('issue_comment_form component', () => {
mountComponent({ mountFunction: mount, initialData: { note: 'hello world' } });
jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
- jest.spyOn(wrapper.vm, 'stopPolling');
findCloseReopenButton().trigger('click');
expect(wrapper.vm.isSubmitting).toBe(true);
expect(wrapper.vm.note).toBe('');
expect(wrapper.vm.saveNote).toHaveBeenCalled();
- expect(wrapper.vm.stopPolling).toHaveBeenCalled();
});
it('tracks event', () => {
mountComponent({ mountFunction: mount, initialData: { note: 'hello world' } });
jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
- jest.spyOn(wrapper.vm, 'stopPolling');
findCloseReopenButton().trigger('click');
@@ -302,7 +299,6 @@ describe('issue_comment_form component', () => {
const saveNotePromise = Promise.resolve();
jest.spyOn(wrapper.vm, 'saveNote').mockReturnValue(saveNotePromise);
- jest.spyOn(wrapper.vm, 'stopPolling');
const actionButton = findCloseReopenButton();
@@ -351,7 +347,6 @@ describe('issue_comment_form component', () => {
it('should make textarea disabled while requesting', async () => {
mountComponent({ mountFunction: mount });
- jest.spyOn(wrapper.vm, 'stopPolling');
jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue();
findMarkdownEditor().vm.$emit('input', 'hello world');
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index caf47febedd..d49ab0d71db 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -288,7 +288,6 @@ describe('note_app', () => {
wrapper.vm.$store.hotUpdate({
actions: {
toggleAward: toggleAwardAction,
- stopPolling() {},
},
});
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 0205f606297..104c297b44e 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -8,11 +8,7 @@ import { createAlert } from '~/alert';
import toast from '~/vue_shared/plugins/global_toast';
import { EVENT_ISSUABLE_VUE_APP_CHANGE } from '~/issuable/constants';
import axios from '~/lib/utils/axios_utils';
-import {
- HTTP_STATUS_INTERNAL_SERVER_ERROR,
- HTTP_STATUS_OK,
- HTTP_STATUS_SERVICE_UNAVAILABLE,
-} from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK, HTTP_STATUS_SERVICE_UNAVAILABLE } from '~/lib/utils/http_status';
import * as notesConstants from '~/notes/constants';
import createStore from '~/notes/stores';
import * as actions from '~/notes/stores/actions';
@@ -24,7 +20,6 @@ import updateMergeRequestLockMutation from '~/sidebar/queries/update_merge_reque
import promoteTimelineEvent from '~/notes/graphql/promote_timeline_event.mutation.graphql';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
import notesEventHub from '~/notes/event_hub';
-import waitForPromises from 'helpers/wait_for_promises';
import { resetStore } from '../helpers';
import {
discussionMock,
@@ -262,13 +257,7 @@ describe('Actions Notes Store', () => {
});
describe('initPolling', () => {
- afterEach(() => {
- gon.features = {};
- });
-
it('creates the Action Cable subscription', () => {
- gon.features = { actionCableNotes: true };
-
store.dispatch('setNotesData', notesDataMock);
store.dispatch('initPolling');
@@ -290,8 +279,6 @@ describe('Actions Notes Store', () => {
const response = { notes: [], last_fetched_at: '123456' };
const successMock = () =>
axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_OK, response);
- const failureMock = () =>
- axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
beforeEach(() => {
return store.dispatch('setNotesData', notesDataMock);
@@ -304,153 +291,6 @@ describe('Actions Notes Store', () => {
expect(store.state.lastFetchedAt).toBe('123456');
});
-
- it('shows an alert when fetching fails', async () => {
- failureMock();
-
- await store.dispatch('fetchUpdatedNotes');
-
- expect(createAlert).toHaveBeenCalledTimes(1);
- });
- });
-
- describe('poll', () => {
- const pollInterval = 6000;
- const pollResponse = { notes: [], last_fetched_at: '123456' };
- const pollHeaders = { 'poll-interval': `${pollInterval}` };
- const successMock = () =>
- axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_OK, pollResponse, pollHeaders);
- const failureMock = () =>
- axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- const advanceAndRAF = (time) => {
- if (time) {
- jest.advanceTimersByTime(time);
- }
-
- return waitForPromises();
- };
- const advanceXMoreIntervals = (number) => {
- const timeoutLength = pollInterval * number;
-
- return advanceAndRAF(timeoutLength);
- };
- const startPolling = async () => {
- await store.dispatch('poll');
- await advanceAndRAF(2);
- };
- const cleanUp = () => {
- jest.clearAllTimers();
-
- return store.dispatch('stopPolling');
- };
-
- beforeEach(() => {
- return store.dispatch('setNotesData', notesDataMock);
- });
-
- afterEach(() => {
- return cleanUp();
- });
-
- it('calls service with last fetched state', async () => {
- successMock();
-
- await startPolling();
-
- expect(store.state.lastFetchedAt).toBe('123456');
-
- await advanceXMoreIntervals(1);
-
- expect(axiosMock.history.get).toHaveLength(2);
- expect(axiosMock.history.get[1].headers).toMatchObject({
- 'X-Last-Fetched-At': '123456',
- });
- });
-
- describe('polling side effects', () => {
- it('retries twice', async () => {
- failureMock();
-
- await startPolling();
-
- // This is the first request, not a retry
- expect(axiosMock.history.get).toHaveLength(1);
-
- await advanceXMoreIntervals(1);
-
- // Retry #1
- expect(axiosMock.history.get).toHaveLength(2);
-
- await advanceXMoreIntervals(1);
-
- // Retry #2
- expect(axiosMock.history.get).toHaveLength(3);
-
- await advanceXMoreIntervals(10);
-
- // There are no more retries
- expect(axiosMock.history.get).toHaveLength(3);
- });
-
- it('shows the error display on the second failure', async () => {
- failureMock();
-
- await startPolling();
-
- expect(axiosMock.history.get).toHaveLength(1);
- expect(createAlert).not.toHaveBeenCalled();
-
- await advanceXMoreIntervals(1);
-
- expect(axiosMock.history.get).toHaveLength(2);
- expect(createAlert).toHaveBeenCalled();
- expect(createAlert).toHaveBeenCalledTimes(1);
- });
-
- it('resets the failure counter on success', async () => {
- // We can't get access to the actual counter in the polling closure.
- // So we can infer that it's reset by ensuring that the error is only
- // shown when we cause two failures in a row - no successes between
-
- axiosMock
- .onGet(notesDataMock.notesPath)
- .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR) // cause one error
- .onGet(notesDataMock.notesPath)
- .replyOnce(HTTP_STATUS_OK, pollResponse, pollHeaders) // then a success
- .onGet(notesDataMock.notesPath)
- .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR); // and then more errors
-
- await startPolling(); // Failure #1
- await advanceXMoreIntervals(1); // Success #1
- await advanceXMoreIntervals(1); // Failure #2
-
- // That was the first failure AFTER a success, so we should NOT see the error displayed
- expect(createAlert).not.toHaveBeenCalled();
-
- // Now we'll allow another failure
- await advanceXMoreIntervals(1); // Failure #3
-
- // Since this is the second failure in a row, the error should happen
- expect(createAlert).toHaveBeenCalledTimes(1);
- });
-
- it('hides the error display if it exists on success', async () => {
- failureMock();
-
- await startPolling();
- await advanceXMoreIntervals(2);
-
- // After two errors, the error should be displayed
- expect(createAlert).toHaveBeenCalledTimes(1);
-
- axiosMock.reset();
- successMock();
-
- await advanceXMoreIntervals(1);
-
- expect(mockAlertDismiss).toHaveBeenCalledTimes(1);
- });
- });
});
describe('setNotesFetchedState', () => {
@@ -996,11 +836,7 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(dispatch.mock.calls).toEqual([
- ['stopPolling'],
- ['resolveDiscussion', { discussionId }],
- ['restartPolling'],
- ]);
+ expect(dispatch.mock.calls).toEqual([['resolveDiscussion', { discussionId }]]);
expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -1015,7 +851,6 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, true],
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
expect(createAlert).toHaveBeenCalledWith({
message: TEST_ERROR_MESSAGE,
parent: flashContainer,
@@ -1033,7 +868,6 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, true],
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while applying the suggestion. Please try again.',
parent: flashContainer,
@@ -1081,10 +915,8 @@ describe('Actions Notes Store', () => {
]);
expect(dispatch.mock.calls).toEqual([
- ['stopPolling'],
['resolveDiscussion', { discussionId: discussionIds[0] }],
['resolveDiscussion', { discussionId: discussionIds[1] }],
- ['restartPolling'],
]);
expect(createAlert).not.toHaveBeenCalled();
@@ -1104,7 +936,6 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
expect(createAlert).toHaveBeenCalledWith({
message: TEST_ERROR_MESSAGE,
parent: flashContainer,
@@ -1125,7 +956,6 @@ describe('Actions Notes Store', () => {
[mutationTypes.SET_RESOLVING_DISCUSSION, false],
]);
- expect(dispatch.mock.calls).toEqual([['stopPolling'], ['restartPolling']]);
expect(createAlert).toHaveBeenCalledWith({
message:
'Something went wrong while applying the batch of suggestions. Please try again.',
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index 10fdc8c33c4..056175eac07 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -1,15 +1,19 @@
import MockAdapter from 'axios-mock-adapter';
+import * as Sentry from '@sentry/browser';
import { buildClient } from '~/observability/client';
import axios from '~/lib/utils/axios_utils';
jest.mock('~/lib/utils/axios_utils');
+jest.mock('@sentry/browser');
describe('buildClient', () => {
let client;
let axiosMock;
const tracingUrl = 'https://example.com/tracing';
- const EXPECTED_ERROR_MESSAGE = 'traces are missing/invalid in the response';
+ const provisioningUrl = 'https://example.com/provisioning';
+
+ const FETCHING_TRACES_ERROR = 'traces are missing/invalid in the response';
beforeEach(() => {
axiosMock = new MockAdapter(axios);
@@ -17,7 +21,7 @@ describe('buildClient', () => {
client = buildClient({
tracingUrl,
- provisioningUrl: 'https://example.com/provisioning',
+ provisioningUrl,
});
});
@@ -25,10 +29,85 @@ describe('buildClient', () => {
axiosMock.restore();
});
+ describe('isTracingEnabled', () => {
+ it('returns true if requests succeedes', async () => {
+ axiosMock.onGet(provisioningUrl).reply(200, {
+ status: 'ready',
+ });
+
+ const enabled = await client.isTracingEnabled();
+
+ expect(enabled).toBe(true);
+ });
+
+ it('returns false if response is 404', async () => {
+ axiosMock.onGet(provisioningUrl).reply(404);
+
+ const enabled = await client.isTracingEnabled();
+
+ expect(enabled).toBe(false);
+ });
+
+ // we currently ignore the 'status' payload and just check if the request was successful
+ // We might improve this as part of https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2315
+ it('returns true for any status', async () => {
+ axiosMock.onGet(provisioningUrl).reply(200, {
+ status: 'not ready',
+ });
+
+ const enabled = await client.isTracingEnabled();
+
+ expect(enabled).toBe(true);
+ });
+
+ it('throws in case of any non-404 error', async () => {
+ axiosMock.onGet(provisioningUrl).reply(500);
+
+ const e = 'Request failed with status code 500';
+ await expect(client.isTracingEnabled()).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+
+ it('throws in case of unexpected response', async () => {
+ axiosMock.onGet(provisioningUrl).reply(200, {});
+
+ const e = 'Failed to check provisioning';
+ await expect(client.isTracingEnabled()).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+ });
+
+ describe('enableTraces', () => {
+ it('makes a PUT request to the provisioning URL', async () => {
+ let putConfig;
+ axiosMock.onPut(provisioningUrl).reply((config) => {
+ putConfig = config;
+ return [200];
+ });
+
+ await client.enableTraces();
+
+ expect(putConfig.withCredentials).toBe(true);
+ });
+
+ it('reports an error if the req fails', async () => {
+ axiosMock.onPut(provisioningUrl).reply(401);
+
+ const e = 'Request failed with status code 401';
+
+ await expect(client.enableTraces()).rejects.toThrow(e);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(e));
+ });
+ });
+
describe('fetchTrace', () => {
it('fetches the trace from the tracing URL', async () => {
const mockTraces = [
- { trace_id: 'trace-1', spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }] },
+ {
+ trace_id: 'trace-1',
+ duration_nano: 3000,
+ spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }],
+ },
];
axiosMock.onGet(tracingUrl).reply(200, {
@@ -42,34 +121,37 @@ describe('buildClient', () => {
withCredentials: true,
params: { trace_id: 'trace-1' },
});
- expect(result).toEqual({
- ...mockTraces[0],
- duration: 1,
- });
+ expect(result).toEqual(mockTraces[0]);
});
it('rejects if trace id is missing', () => {
return expect(client.fetchTrace()).rejects.toThrow('traceId is required.');
});
- it('rejects if traces are empty', () => {
+ it('rejects if traces are empty', async () => {
axiosMock.onGet(tracingUrl).reply(200, { traces: [] });
- return expect(client.fetchTrace('trace-1')).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ await expect(client.fetchTrace('trace-1')).rejects.toThrow(FETCHING_TRACES_ERROR);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(FETCHING_TRACES_ERROR));
});
- it('rejects if traces are invalid', () => {
+ it('rejects if traces are invalid', async () => {
axiosMock.onGet(tracingUrl).reply(200, { traces: 'invalid' });
- return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ await expect(client.fetchTraces()).rejects.toThrow(FETCHING_TRACES_ERROR);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(FETCHING_TRACES_ERROR));
});
});
describe('fetchTraces', () => {
it('fetches traces from the tracing URL', async () => {
const mockTraces = [
- { trace_id: 'trace-1', spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }] },
- { trace_id: 'trace-2', spans: [{ duration_nano: 2000 }] },
+ {
+ trace_id: 'trace-1',
+ duration_nano: 3000,
+ spans: [{ duration_nano: 1000 }, { duration_nano: 2000 }],
+ },
+ { trace_id: 'trace-2', duration_nano: 3000, spans: [{ duration_nano: 2000 }] },
];
axiosMock.onGet(tracingUrl).reply(200, {
@@ -83,28 +165,21 @@ describe('buildClient', () => {
withCredentials: true,
params: new URLSearchParams(),
});
- expect(result).toEqual([
- {
- ...mockTraces[0],
- duration: 1,
- },
- {
- ...mockTraces[1],
- duration: 2,
- },
- ]);
+ expect(result).toEqual(mockTraces);
});
- it('rejects if traces are missing', () => {
+ it('rejects if traces are missing', async () => {
axiosMock.onGet(tracingUrl).reply(200, {});
- return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ await expect(client.fetchTraces()).rejects.toThrow(FETCHING_TRACES_ERROR);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(FETCHING_TRACES_ERROR));
});
- it('rejects if traces are invalid', () => {
+ it('rejects if traces are invalid', async () => {
axiosMock.onGet(tracingUrl).reply(200, { traces: 'invalid' });
- return expect(client.fetchTraces()).rejects.toThrow(EXPECTED_ERROR_MESSAGE);
+ await expect(client.fetchTraces()).rejects.toThrow(FETCHING_TRACES_ERROR);
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error(FETCHING_TRACES_ERROR));
});
describe('query filter', () => {
diff --git a/spec/frontend/organizations/groups_and_projects/components/app_spec.js b/spec/frontend/organizations/groups_and_projects/components/app_spec.js
index 64182b74e4f..e2301de8607 100644
--- a/spec/frontend/organizations/groups_and_projects/components/app_spec.js
+++ b/spec/frontend/organizations/groups_and_projects/components/app_spec.js
@@ -1,10 +1,9 @@
import { GlCollapsibleListbox, GlSorting, GlSortingItem } from '@gitlab/ui';
import App from '~/organizations/groups_and_projects/components/app.vue';
-import GroupsPage from '~/organizations/groups_and_projects/components/groups_page.vue';
-import ProjectsPage from '~/organizations/groups_and_projects/components/projects_page.vue';
+import GroupsView from '~/organizations/shared/components/groups_view.vue';
+import ProjectsView from '~/organizations/shared/components/projects_view.vue';
+import { RESOURCE_TYPE_GROUPS, RESOURCE_TYPE_PROJECTS } from '~/organizations/constants';
import {
- DISPLAY_QUERY_GROUPS,
- DISPLAY_QUERY_PROJECTS,
SORT_ITEM_CREATED,
SORT_DIRECTION_DESC,
} from '~/organizations/groups_and_projects/constants';
@@ -36,10 +35,10 @@ describe('GroupsAndProjectsApp', () => {
describe.each`
display | expectedComponent | expectedDisplayListboxSelectedProp
- ${null} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
- ${'unsupported_value'} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
- ${DISPLAY_QUERY_GROUPS} | ${GroupsPage} | ${DISPLAY_QUERY_GROUPS}
- ${DISPLAY_QUERY_PROJECTS} | ${ProjectsPage} | ${DISPLAY_QUERY_PROJECTS}
+ ${null} | ${GroupsView} | ${RESOURCE_TYPE_GROUPS}
+ ${'unsupported_value'} | ${GroupsView} | ${RESOURCE_TYPE_GROUPS}
+ ${RESOURCE_TYPE_GROUPS} | ${GroupsView} | ${RESOURCE_TYPE_GROUPS}
+ ${RESOURCE_TYPE_PROJECTS} | ${ProjectsView} | ${RESOURCE_TYPE_PROJECTS}
`(
'when `display` query string is $display',
({ display, expectedComponent, expectedDisplayListboxSelectedProp }) => {
@@ -122,11 +121,11 @@ describe('GroupsAndProjectsApp', () => {
beforeEach(() => {
createComponent();
- findListbox().vm.$emit('select', DISPLAY_QUERY_PROJECTS);
+ findListbox().vm.$emit('select', RESOURCE_TYPE_PROJECTS);
});
it('updates `display` query string', () => {
- expect(routerMock.push).toHaveBeenCalledWith({ query: { display: DISPLAY_QUERY_PROJECTS } });
+ expect(routerMock.push).toHaveBeenCalledWith({ query: { display: RESOURCE_TYPE_PROJECTS } });
});
});
diff --git a/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js b/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js
deleted file mode 100644
index 537f8114fcf..00000000000
--- a/spec/frontend/organizations/groups_and_projects/components/groups_page_spec.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import VueApollo from 'vue-apollo';
-import Vue from 'vue';
-import { GlLoadingIcon } from '@gitlab/ui';
-import GroupsPage from '~/organizations/groups_and_projects/components/groups_page.vue';
-import { formatGroups } from '~/organizations/groups_and_projects/utils';
-import resolvers from '~/organizations/groups_and_projects/graphql/resolvers';
-import GroupsList from '~/vue_shared/components/groups_list/groups_list.vue';
-import { createAlert } from '~/alert';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import { organizationGroups } from '../mock_data';
-
-jest.mock('~/alert');
-
-Vue.use(VueApollo);
-jest.useFakeTimers();
-
-describe('GroupsPage', () => {
- let wrapper;
- let mockApollo;
-
- const createComponent = ({ mockResolvers = resolvers } = {}) => {
- mockApollo = createMockApollo([], mockResolvers);
-
- wrapper = shallowMountExtended(GroupsPage, { apolloProvider: mockApollo });
- };
-
- afterEach(() => {
- mockApollo = null;
- });
-
- describe('when API call is loading', () => {
- beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
- },
- };
-
- createComponent({ mockResolvers });
- });
-
- it('renders loading icon', () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- });
- });
-
- describe('when API call is successful', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders `GroupsList` component and passes correct props', async () => {
- jest.runAllTimers();
- await waitForPromises();
-
- expect(wrapper.findComponent(GroupsList).props()).toEqual({
- groups: formatGroups(organizationGroups.nodes),
- showGroupIcon: true,
- });
- });
- });
-
- describe('when API call is not successful', () => {
- const error = new Error();
-
- beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockRejectedValueOnce(error),
- },
- };
-
- createComponent({ mockResolvers });
- });
-
- it('displays error alert', async () => {
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith({
- message: GroupsPage.i18n.errorMessage,
- error,
- captureError: true,
- });
- });
- });
-});
diff --git a/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js b/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js
deleted file mode 100644
index 7cadcab5021..00000000000
--- a/spec/frontend/organizations/groups_and_projects/components/projects_page_spec.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import VueApollo from 'vue-apollo';
-import Vue from 'vue';
-import { GlLoadingIcon } from '@gitlab/ui';
-import ProjectsPage from '~/organizations/groups_and_projects/components/projects_page.vue';
-import { formatProjects } from '~/organizations/groups_and_projects/utils';
-import resolvers from '~/organizations/groups_and_projects/graphql/resolvers';
-import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
-import { createAlert } from '~/alert';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import { organizationProjects } from '../mock_data';
-
-jest.mock('~/alert');
-
-Vue.use(VueApollo);
-jest.useFakeTimers();
-
-describe('ProjectsPage', () => {
- let wrapper;
- let mockApollo;
-
- const createComponent = ({ mockResolvers = resolvers } = {}) => {
- mockApollo = createMockApollo([], mockResolvers);
-
- wrapper = shallowMountExtended(ProjectsPage, { apolloProvider: mockApollo });
- };
-
- afterEach(() => {
- mockApollo = null;
- });
-
- describe('when API call is loading', () => {
- beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
- },
- };
-
- createComponent({ mockResolvers });
- });
-
- it('renders loading icon', () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- });
- });
-
- describe('when API call is successful', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders `ProjectsList` component and passes correct props', async () => {
- jest.runAllTimers();
- await waitForPromises();
-
- expect(wrapper.findComponent(ProjectsList).props()).toEqual({
- projects: formatProjects(organizationProjects.nodes),
- showProjectIcon: true,
- });
- });
- });
-
- describe('when API call is not successful', () => {
- const error = new Error();
-
- beforeEach(() => {
- const mockResolvers = {
- Query: {
- organization: jest.fn().mockRejectedValueOnce(error),
- },
- };
-
- createComponent({ mockResolvers });
- });
-
- it('displays error alert', async () => {
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith({
- message: ProjectsPage.i18n.errorMessage,
- error,
- captureError: true,
- });
- });
- });
-});
diff --git a/spec/frontend/organizations/groups_and_projects/mock_data.js b/spec/frontend/organizations/groups_and_projects/mock_data.js
deleted file mode 100644
index eb829a24f50..00000000000
--- a/spec/frontend/organizations/groups_and_projects/mock_data.js
+++ /dev/null
@@ -1,252 +0,0 @@
-export const organization = {
- id: 'gid://gitlab/Organization/1',
- __typename: 'Organization',
-};
-
-export const organizationProjects = {
- nodes: [
- {
- id: 'gid://gitlab/Project/8',
- nameWithNamespace: 'Twitter / Typeahead.Js',
- webUrl: 'http://127.0.0.1:3000/twitter/Typeahead.Js',
- topics: ['JavaScript', 'Vue.js'],
- forksCount: 4,
- avatarUrl: null,
- starCount: 0,
- visibility: 'public',
- openIssuesCount: 48,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:59" dir="auto">Optio et reprehenderit enim doloremque deserunt et commodi.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- isForked: true,
- accessLevel: {
- integerValue: 30,
- },
- },
- {
- id: 'gid://gitlab/Project/7',
- nameWithNamespace: 'Flightjs / Flight',
- webUrl: 'http://127.0.0.1:3000/flightjs/Flight',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'private',
- openIssuesCount: 37,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:49" dir="auto">Dolor dicta rerum et ut eius voluptate earum qui.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- isForked: false,
- accessLevel: {
- integerValue: 20,
- },
- },
- {
- id: 'gid://gitlab/Project/6',
- nameWithNamespace: 'Jashkenas / Underscore',
- webUrl: 'http://127.0.0.1:3000/jashkenas/Underscore',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'private',
- openIssuesCount: 34,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:52" dir="auto">Incidunt est aliquam autem nihil eveniet quis autem.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- isForked: false,
- accessLevel: {
- integerValue: 40,
- },
- },
- {
- id: 'gid://gitlab/Project/5',
- nameWithNamespace: 'Commit451 / Lab Coat',
- webUrl: 'http://127.0.0.1:3000/Commit451/lab-coat',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'internal',
- openIssuesCount: 49,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:34" dir="auto">Sint eos dolorem impedit rerum et.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- isForked: false,
- accessLevel: {
- integerValue: 10,
- },
- },
- {
- id: 'gid://gitlab/Project/1',
- nameWithNamespace: 'Toolbox / Gitlab Smoke Tests',
- webUrl: 'http://127.0.0.1:3000/toolbox/gitlab-smoke-tests',
- topics: [],
- forksCount: 0,
- avatarUrl: null,
- starCount: 0,
- visibility: 'internal',
- openIssuesCount: 34,
- descriptionHtml:
- '<p data-sourcepos="1:1-1:40" dir="auto">Veritatis error laboriosam libero autem.</p>',
- issuesAccessLevel: 'enabled',
- forkingAccessLevel: 'enabled',
- isForked: false,
- accessLevel: {
- integerValue: 30,
- },
- },
- ],
-};
-
-export const organizationGroups = {
- nodes: [
- {
- id: 'gid://gitlab/Group/29',
- fullName: 'Commit451',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/Commit451',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:52" dir="auto">Autem praesentium vel ut ratione itaque ullam culpa.</p>',
- avatarUrl: null,
- descendantGroupsCount: 0,
- projectsCount: 3,
- groupMembersCount: 2,
- visibility: 'public',
- accessLevel: {
- integerValue: 30,
- },
- },
- {
- id: 'gid://gitlab/Group/33',
- fullName: 'Flightjs',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/flightjs',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:60" dir="auto">Ipsa reiciendis deleniti officiis illum nostrum quo aliquam.</p>',
- avatarUrl: null,
- descendantGroupsCount: 4,
- projectsCount: 3,
- groupMembersCount: 1,
- visibility: 'private',
- accessLevel: {
- integerValue: 20,
- },
- },
- {
- id: 'gid://gitlab/Group/24',
- fullName: 'Gitlab Org',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/gitlab-org',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:64" dir="auto">Dolorem dolorem omnis impedit cupiditate pariatur officia velit.</p>',
- avatarUrl: null,
- descendantGroupsCount: 1,
- projectsCount: 1,
- groupMembersCount: 2,
- visibility: 'internal',
- accessLevel: {
- integerValue: 10,
- },
- },
- {
- id: 'gid://gitlab/Group/27',
- fullName: 'Gnuwget',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/gnuwgetf',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:47" dir="auto">Culpa soluta aut eius dolores est vel sapiente.</p>',
- avatarUrl: null,
- descendantGroupsCount: 4,
- projectsCount: 2,
- groupMembersCount: 3,
- visibility: 'public',
- accessLevel: {
- integerValue: 40,
- },
- },
- {
- id: 'gid://gitlab/Group/31',
- fullName: 'Jashkenas',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/jashkenas',
- descriptionHtml: '<p data-sourcepos="1:1-1:25" dir="auto">Ut ut id aliquid nostrum.</p>',
- avatarUrl: null,
- descendantGroupsCount: 3,
- projectsCount: 3,
- groupMembersCount: 10,
- visibility: 'private',
- accessLevel: {
- integerValue: 10,
- },
- },
- {
- id: 'gid://gitlab/Group/22',
- fullName: 'Toolbox',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/toolbox',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:46" dir="auto">Quo voluptatem magnam facere voluptates alias.</p>',
- avatarUrl: null,
- descendantGroupsCount: 2,
- projectsCount: 3,
- groupMembersCount: 40,
- visibility: 'internal',
- accessLevel: {
- integerValue: 30,
- },
- },
- {
- id: 'gid://gitlab/Group/35',
- fullName: 'Twitter',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/twitter',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:40" dir="auto">Quae nulla consequatur assumenda id quo.</p>',
- avatarUrl: null,
- descendantGroupsCount: 20,
- projectsCount: 30,
- groupMembersCount: 100,
- visibility: 'public',
- accessLevel: {
- integerValue: 40,
- },
- },
- {
- id: 'gid://gitlab/Group/73',
- fullName: 'test',
- parent: null,
- webUrl: 'http://127.0.0.1:3000/groups/test',
- descriptionHtml: '',
- avatarUrl: null,
- descendantGroupsCount: 1,
- projectsCount: 1,
- groupMembersCount: 1,
- visibility: 'private',
- accessLevel: {
- integerValue: 30,
- },
- },
- {
- id: 'gid://gitlab/Group/74',
- fullName: 'Twitter / test subgroup',
- parent: {
- id: 'gid://gitlab/Group/35',
- },
- webUrl: 'http://127.0.0.1:3000/groups/twitter/test-subgroup',
- descriptionHtml: '',
- avatarUrl: null,
- descendantGroupsCount: 4,
- projectsCount: 4,
- groupMembersCount: 4,
- visibility: 'internal',
- accessLevel: {
- integerValue: 20,
- },
- },
- ],
-};
diff --git a/spec/frontend/organizations/shared/components/groups_view_spec.js b/spec/frontend/organizations/shared/components/groups_view_spec.js
new file mode 100644
index 00000000000..8d6ea60ffd2
--- /dev/null
+++ b/spec/frontend/organizations/shared/components/groups_view_spec.js
@@ -0,0 +1,146 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import GroupsView from '~/organizations/shared/components/groups_view.vue';
+import { formatGroups } from '~/organizations/shared/utils';
+import resolvers from '~/organizations/shared/graphql/resolvers';
+import GroupsList from '~/vue_shared/components/groups_list/groups_list.vue';
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { organizationGroups } from '~/organizations/mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+jest.useFakeTimers();
+
+describe('GroupsView', () => {
+ let wrapper;
+ let mockApollo;
+
+ const defaultProvide = {
+ groupsEmptyStateSvgPath: 'illustrations/empty-state/empty-groups-md.svg',
+ newGroupPath: '/groups/new',
+ };
+
+ const createComponent = ({ mockResolvers = resolvers, propsData = {} } = {}) => {
+ mockApollo = createMockApollo([], mockResolvers);
+
+ wrapper = shallowMountExtended(GroupsView, {
+ apolloProvider: mockApollo,
+ provide: defaultProvide,
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ describe('when API call is loading', () => {
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('renders loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when API call is successful', () => {
+ describe('when there are no groups', () => {
+ it('renders empty state without buttons by default', async () => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockResolvedValueOnce({
+ groups: { nodes: [] },
+ }),
+ },
+ };
+ createComponent({ mockResolvers });
+
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ title: "You don't have any groups yet.",
+ description:
+ 'A group is a collection of several projects. If you organize your projects under a group, it works like a folder.',
+ svgHeight: 144,
+ svgPath: defaultProvide.groupsEmptyStateSvgPath,
+ primaryButtonLink: null,
+ primaryButtonText: null,
+ });
+ });
+
+ describe('when `shouldShowEmptyStateButtons` is `true` and `groupsEmptyStateSvgPath` is set', () => {
+ it('renders empty state with buttons', async () => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockResolvedValueOnce({
+ groups: { nodes: [] },
+ }),
+ },
+ };
+ createComponent({ mockResolvers, propsData: { shouldShowEmptyStateButtons: true } });
+
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ primaryButtonLink: defaultProvide.newGroupPath,
+ primaryButtonText: 'New group',
+ });
+ });
+ });
+ });
+
+ describe('when there are groups', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `GroupsList` component and passes correct props', async () => {
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GroupsList).props()).toEqual({
+ groups: formatGroups(organizationGroups.nodes),
+ showGroupIcon: true,
+ });
+ });
+ });
+ });
+
+ describe('when API call is not successful', () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('displays error alert', async () => {
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: GroupsView.i18n.errorMessage,
+ error,
+ captureError: true,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/shared/components/projects_view_spec.js b/spec/frontend/organizations/shared/components/projects_view_spec.js
new file mode 100644
index 00000000000..490b0c89348
--- /dev/null
+++ b/spec/frontend/organizations/shared/components/projects_view_spec.js
@@ -0,0 +1,146 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { GlLoadingIcon, GlEmptyState } from '@gitlab/ui';
+import ProjectsView from '~/organizations/shared/components/projects_view.vue';
+import { formatProjects } from '~/organizations/shared/utils';
+import resolvers from '~/organizations/shared/graphql/resolvers';
+import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { organizationProjects } from '~/organizations/mock_data';
+
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+jest.useFakeTimers();
+
+describe('ProjectsView', () => {
+ let wrapper;
+ let mockApollo;
+
+ const defaultProvide = {
+ projectsEmptyStateSvgPath: 'illustrations/empty-state/empty-projects-md.svg',
+ newProjectPath: '/projects/new',
+ };
+
+ const createComponent = ({ mockResolvers = resolvers, propsData = {} } = {}) => {
+ mockApollo = createMockApollo([], mockResolvers);
+
+ wrapper = shallowMountExtended(ProjectsView, {
+ apolloProvider: mockApollo,
+ provide: defaultProvide,
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ describe('when API call is loading', () => {
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockReturnValueOnce(new Promise(() => {})),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('renders loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when API call is successful', () => {
+ describe('when there are no projects', () => {
+ it('renders empty state without buttons by default', async () => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockResolvedValueOnce({
+ projects: { nodes: [] },
+ }),
+ },
+ };
+ createComponent({ mockResolvers });
+
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ title: "You don't have any projects yet.",
+ description:
+ 'Projects are where you can store your code, access issues, wiki, and other features of Gitlab.',
+ svgHeight: 144,
+ svgPath: defaultProvide.projectsEmptyStateSvgPath,
+ primaryButtonLink: null,
+ primaryButtonText: null,
+ });
+ });
+
+ describe('when `shouldShowEmptyStateButtons` is `true` and `projectsEmptyStateSvgPath` is set', () => {
+ it('renders empty state with buttons', async () => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockResolvedValueOnce({
+ projects: { nodes: [] },
+ }),
+ },
+ };
+ createComponent({ mockResolvers, propsData: { shouldShowEmptyStateButtons: true } });
+
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ primaryButtonLink: defaultProvide.newProjectPath,
+ primaryButtonText: 'New project',
+ });
+ });
+ });
+ });
+
+ describe('when there are projects', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `ProjectsList` component and passes correct props', async () => {
+ jest.runAllTimers();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(ProjectsList).props()).toEqual({
+ projects: formatProjects(organizationProjects.nodes),
+ showProjectIcon: true,
+ });
+ });
+ });
+ });
+
+ describe('when API call is not successful', () => {
+ const error = new Error();
+
+ beforeEach(() => {
+ const mockResolvers = {
+ Query: {
+ organization: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ createComponent({ mockResolvers });
+ });
+
+ it('displays error alert', async () => {
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: ProjectsView.i18n.errorMessage,
+ error,
+ captureError: true,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/groups_and_projects/utils_spec.js b/spec/frontend/organizations/shared/utils_spec.js
index 2cb1ee02061..778a18ab2bc 100644
--- a/spec/frontend/organizations/groups_and_projects/utils_spec.js
+++ b/spec/frontend/organizations/shared/utils_spec.js
@@ -1,7 +1,7 @@
-import { formatProjects, formatGroups } from '~/organizations/groups_and_projects/utils';
-import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/projects_list/constants';
+import { formatProjects, formatGroups } from '~/organizations/shared/utils';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { organizationProjects, organizationGroups } from './mock_data';
+import { organizationProjects, organizationGroups } from '~/organizations/mock_data';
describe('formatProjects', () => {
it('correctly formats the projects', () => {
@@ -17,7 +17,7 @@ describe('formatProjects', () => {
accessLevel: firstMockProject.accessLevel.integerValue,
},
},
- actions: [ACTION_EDIT, ACTION_DELETE],
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
});
expect(formattedProjects.length).toBe(organizationProjects.nodes.length);
});
@@ -29,7 +29,11 @@ describe('formatGroups', () => {
const formattedGroups = formatGroups(organizationGroups.nodes);
const [firstFormattedGroup] = formattedGroups;
- expect(firstFormattedGroup.id).toBe(getIdFromGraphQLId(firstMockGroup.id));
+ expect(firstFormattedGroup).toMatchObject({
+ id: getIdFromGraphQLId(firstMockGroup.id),
+ editPath: `${firstFormattedGroup.webUrl}/-/edit`,
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
+ });
expect(formattedGroups.length).toBe(organizationGroups.nodes.length);
});
});
diff --git a/spec/frontend/organizations/show/components/app_spec.js b/spec/frontend/organizations/show/components/app_spec.js
new file mode 100644
index 00000000000..46496e40bdd
--- /dev/null
+++ b/spec/frontend/organizations/show/components/app_spec.js
@@ -0,0 +1,49 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import App from '~/organizations/show/components/app.vue';
+import OrganizationAvatar from '~/organizations/show/components/organization_avatar.vue';
+import GroupsAndProjects from '~/organizations/show/components/groups_and_projects.vue';
+import AssociationCount from '~/organizations/show/components/association_counts.vue';
+
+describe('OrganizationShowApp', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ organization: {
+ id: 1,
+ name: 'GitLab',
+ },
+ associationCounts: {
+ groups: 10,
+ projects: 5,
+ users: 6,
+ },
+ groupsAndProjectsOrganizationPath: '/-/organizations/default/groups_and_projects',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(App, { propsData: defaultPropsData });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders organization avatar and passes organization prop', () => {
+ expect(wrapper.findComponent(OrganizationAvatar).props('organization')).toEqual(
+ defaultPropsData.organization,
+ );
+ });
+
+ it('renders groups and projects component and passes `groupsAndProjectsOrganizationPath` prop', () => {
+ expect(
+ wrapper.findComponent(GroupsAndProjects).props('groupsAndProjectsOrganizationPath'),
+ ).toEqual(defaultPropsData.groupsAndProjectsOrganizationPath);
+ });
+
+ it('renders associations count component and passes expected props', () => {
+ expect(wrapper.findComponent(AssociationCount).props()).toEqual({
+ associationCounts: defaultPropsData.associationCounts,
+ groupsAndProjectsOrganizationPath: defaultPropsData.groupsAndProjectsOrganizationPath,
+ });
+ });
+});
diff --git a/spec/frontend/organizations/show/components/association_count_card_spec.js b/spec/frontend/organizations/show/components/association_count_card_spec.js
new file mode 100644
index 00000000000..752a02110b6
--- /dev/null
+++ b/spec/frontend/organizations/show/components/association_count_card_spec.js
@@ -0,0 +1,48 @@
+import { GlCard, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import AssociationCountCard from '~/organizations/show/components/association_count_card.vue';
+
+describe('AssociationCountCard', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ title: 'Groups',
+ iconName: 'group',
+ count: 1050,
+ linkHref: '/-/organizations/default/groups_and_projects?display=groups',
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(AssociationCountCard, {
+ propsData: { ...defaultPropsData, ...propsData },
+ });
+ };
+
+ const findCard = () => wrapper.findComponent(GlCard);
+ const findLink = () => findCard().findComponent(GlLink);
+
+ it('renders card with title, link and count', () => {
+ createComponent();
+
+ const card = findCard();
+ const link = findLink();
+
+ expect(card.text()).toContain(defaultPropsData.title);
+ expect(card.text()).toContain('1k');
+ expect(link.text()).toBe('View all');
+ expect(link.attributes('href')).toBe(defaultPropsData.linkHref);
+ });
+
+ describe('when `linkText` prop is set', () => {
+ const linkText = 'Manage';
+ beforeEach(() => {
+ createComponent({
+ propsData: { linkText },
+ });
+ });
+
+ it('sets link text', () => {
+ expect(findLink().text()).toBe(linkText);
+ });
+ });
+});
diff --git a/spec/frontend/organizations/show/components/association_counts_spec.js b/spec/frontend/organizations/show/components/association_counts_spec.js
new file mode 100644
index 00000000000..80e57ede502
--- /dev/null
+++ b/spec/frontend/organizations/show/components/association_counts_spec.js
@@ -0,0 +1,61 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import AssociationCounts from '~/organizations/show/components/association_counts.vue';
+import AssociationCountCard from '~/organizations/show/components/association_count_card.vue';
+
+describe('AssociationCounts', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ associationCounts: {
+ groups: 10,
+ projects: 5,
+ users: 6,
+ },
+ groupsAndProjectsOrganizationPath: '/-/organizations/default/groups_and_projects',
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(AssociationCounts, {
+ propsData: { ...defaultPropsData, ...propsData },
+ });
+ };
+
+ const findAssociationCountCardAt = (index) =>
+ wrapper.findAllComponents(AssociationCountCard).at(index);
+
+ it('renders groups association count card', () => {
+ createComponent();
+
+ expect(findAssociationCountCardAt(0).props()).toEqual({
+ title: 'Groups',
+ iconName: 'group',
+ count: defaultPropsData.associationCounts.groups,
+ linkText: 'View all',
+ linkHref: '/-/organizations/default/groups_and_projects?display=groups',
+ });
+ });
+
+ it('renders projects association count card', () => {
+ createComponent();
+
+ expect(findAssociationCountCardAt(1).props()).toEqual({
+ title: 'Projects',
+ iconName: 'project',
+ count: defaultPropsData.associationCounts.projects,
+ linkText: 'View all',
+ linkHref: '/-/organizations/default/groups_and_projects?display=projects',
+ });
+ });
+
+ it('renders users association count card', () => {
+ createComponent();
+
+ expect(findAssociationCountCardAt(2).props()).toEqual({
+ title: 'Users',
+ iconName: 'users',
+ count: defaultPropsData.associationCounts.users,
+ linkText: 'Manage',
+ linkHref: '/',
+ });
+ });
+});
diff --git a/spec/frontend/organizations/show/components/groups_and_projects_spec.js b/spec/frontend/organizations/show/components/groups_and_projects_spec.js
new file mode 100644
index 00000000000..83970d4e76d
--- /dev/null
+++ b/spec/frontend/organizations/show/components/groups_and_projects_spec.js
@@ -0,0 +1,106 @@
+import { GlCollapsibleListbox, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GroupsAndProjects from '~/organizations/show/components/groups_and_projects.vue';
+import { createRouter } from '~/organizations/show';
+import GroupsView from '~/organizations/shared/components/groups_view.vue';
+import ProjectsView from '~/organizations/shared/components/projects_view.vue';
+
+describe('OrganizationShowGroupsAndProjects', () => {
+ const router = createRouter();
+ const routerMock = {
+ push: jest.fn(),
+ };
+ const defaultPropsData = {
+ groupsAndProjectsOrganizationPath: '/-/organizations/default/groups_and_projects',
+ };
+
+ let wrapper;
+
+ const createComponent = ({ routeQuery = {} } = {}) => {
+ wrapper = shallowMountExtended(GroupsAndProjects, {
+ router,
+ mocks: { $route: { path: '/', query: routeQuery }, $router: routerMock },
+ propsData: defaultPropsData,
+ });
+ };
+
+ const findCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ it('renders listbox with expected props', () => {
+ createComponent();
+
+ expect(findCollapsibleListbox().props()).toMatchObject({
+ items: [
+ {
+ value: 'frequently_visited_projects',
+ text: 'Frequently visited projects',
+ },
+ {
+ value: 'frequently_visited_groups',
+ text: 'Frequently visited groups',
+ },
+ ],
+ selected: 'frequently_visited_projects',
+ });
+ });
+
+ describe.each`
+ displayQueryParam | expectedViewAllLinkQuery | expectedViewComponent | expectedDisplayListboxSelectedProp
+ ${'frequently_visited_projects'} | ${'?display=projects'} | ${ProjectsView} | ${'frequently_visited_projects'}
+ ${'frequently_visited_groups'} | ${'?display=groups'} | ${GroupsView} | ${'frequently_visited_groups'}
+ ${'unsupported'} | ${'?display=projects'} | ${ProjectsView} | ${'frequently_visited_projects'}
+ `(
+ 'when display query param is $displayQueryParam',
+ ({
+ displayQueryParam,
+ expectedViewAllLinkQuery,
+ expectedViewComponent,
+ expectedDisplayListboxSelectedProp,
+ }) => {
+ beforeEach(() => {
+ createComponent({ routeQuery: { display: displayQueryParam } });
+ });
+
+ it('sets listbox `selected` prop correctly', () => {
+ expect(findCollapsibleListbox().props('selected')).toBe(expectedDisplayListboxSelectedProp);
+ });
+
+ it('renders "View all" link with correct href', () => {
+ expect(wrapper.findComponent(GlLink).attributes('href')).toBe(
+ `${defaultPropsData.groupsAndProjectsOrganizationPath}${expectedViewAllLinkQuery}`,
+ );
+ });
+
+ it('renders expected view', () => {
+ expect(
+ wrapper.findComponent(expectedViewComponent).props('shouldShowEmptyStateButtons'),
+ ).toBe(true);
+ });
+ },
+ );
+
+ it('renders label and associates listbox with it', () => {
+ createComponent();
+
+ const expectedId = 'display-listbox-label';
+
+ expect(wrapper.findByTestId('label').attributes('id')).toBe(expectedId);
+ expect(findCollapsibleListbox().props('toggleAriaLabelledBy')).toBe(expectedId);
+ });
+
+ describe('when listbox item is selected', () => {
+ const selectValue = 'frequently_visited_groups';
+
+ beforeEach(() => {
+ createComponent();
+
+ findCollapsibleListbox().vm.$emit('select', selectValue);
+ });
+
+ it('updates `display` query param', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({
+ query: { display: selectValue },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/organizations/show/components/organization_avatar_spec.js b/spec/frontend/organizations/show/components/organization_avatar_spec.js
new file mode 100644
index 00000000000..c98fa14e49b
--- /dev/null
+++ b/spec/frontend/organizations/show/components/organization_avatar_spec.js
@@ -0,0 +1,64 @@
+import { GlAvatar, GlIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import OrganizationAvatar from '~/organizations/show/components/organization_avatar.vue';
+import {
+ VISIBILITY_TYPE_ICON,
+ ORGANIZATION_VISIBILITY_TYPE,
+ VISIBILITY_LEVEL_PUBLIC_STRING,
+} from '~/visibility_level/constants';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+
+describe('OrganizationAvatar', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ organization: {
+ id: 1,
+ name: 'GitLab',
+ },
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(OrganizationAvatar, {
+ propsData: defaultPropsData,
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders avatar', () => {
+ expect(wrapper.findComponent(GlAvatar).props()).toMatchObject({
+ entityId: defaultPropsData.organization.id,
+ entityName: defaultPropsData.organization.name,
+ });
+ });
+
+ it('renders organization name', () => {
+ expect(
+ wrapper.findByRole('heading', { name: defaultPropsData.organization.name }).exists(),
+ ).toBe(true);
+ });
+
+ it('renders visibility icon', () => {
+ const icon = wrapper.findComponent(GlIcon);
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+
+ expect(icon.props('name')).toBe(VISIBILITY_TYPE_ICON[VISIBILITY_LEVEL_PUBLIC_STRING]);
+ expect(tooltip.value).toBe(ORGANIZATION_VISIBILITY_TYPE[VISIBILITY_LEVEL_PUBLIC_STRING]);
+ });
+
+ it('renders button to copy organization ID', () => {
+ expect(wrapper.findComponent(ClipboardButton).props()).toMatchObject({
+ category: 'tertiary',
+ title: 'Copy organization ID',
+ text: '1',
+ size: 'small',
+ });
+ });
+});
diff --git a/spec/frontend/organizations/show/utils_spec.js b/spec/frontend/organizations/show/utils_spec.js
new file mode 100644
index 00000000000..583f105c8c0
--- /dev/null
+++ b/spec/frontend/organizations/show/utils_spec.js
@@ -0,0 +1,20 @@
+import { buildDisplayListboxItem } from '~/organizations/show/utils';
+import { RESOURCE_TYPE_PROJECTS } from '~/organizations/constants';
+import { FILTER_FREQUENTLY_VISITED } from '~/organizations/show/constants';
+
+describe('buildDisplayListboxItem', () => {
+ it('returns list item in correct format', () => {
+ const text = 'Frequently visited projects';
+
+ expect(
+ buildDisplayListboxItem({
+ filter: FILTER_FREQUENTLY_VISITED,
+ resourceType: RESOURCE_TYPE_PROJECTS,
+ text,
+ }),
+ ).toEqual({
+ text,
+ value: 'frequently_visited_projects',
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/__snapshots__/tags_loader_spec.js.snap b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/__snapshots__/tags_loader_spec.js.snap
index 5f191ef5561..771fb9e4e08 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/__snapshots__/tags_loader_spec.js.snap
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/__snapshots__/tags_loader_spec.js.snap
@@ -10,7 +10,6 @@ exports[`TagsLoader component has the correct markup 1`] = `
x="0"
y="12.5"
/>
-
<rect
height="20"
rx="4"
@@ -18,13 +17,11 @@ exports[`TagsLoader component has the correct markup 1`] = `
x="25"
y="10"
/>
-
<circle
cx="290"
cy="20"
r="10"
/>
-
<rect
height="20"
rx="4"
@@ -32,7 +29,6 @@ exports[`TagsLoader component has the correct markup 1`] = `
x="315"
y="10"
/>
-
<rect
height="20"
rx="4"
@@ -40,7 +36,6 @@ exports[`TagsLoader component has the correct markup 1`] = `
x="500"
y="10"
/>
-
<rect
height="20"
rx="4"
@@ -48,7 +43,6 @@ exports[`TagsLoader component has the correct markup 1`] = `
x="630"
y="10"
/>
-
<rect
height="40"
rx="4"
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/group_empty_state_spec.js.snap b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/group_empty_state_spec.js.snap
index 56579847468..3e136c750cd 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/group_empty_state_spec.js.snap
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/group_empty_state_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Registry Group Empty state to match the default snapshot 1`] = `
<div>
<p>
- With the Container Registry, every project can have its own space to store its Docker images. Push at least one Docker image in one of this group's projects in order to show up here.
+ With the Container Registry, every project can have its own space to store its Docker images. Push at least one Docker image in one of this group's projects in order to show up here.
<gl-link-stub
href="baz"
target="_blank"
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
index 4b52e84d1a6..5a6d84734a0 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Registry Project Empty state to match the default snapshot 1`] = `
<div>
<p>
- With the Container Registry, every project can have its own space to store its Docker images.
+ With the Container Registry, every project can have its own space to store its Docker images.
<gl-link-stub
href="baz"
target="_blank"
@@ -11,29 +11,26 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
More Information
</gl-link-stub>
</p>
-
<h5>
CLI Commands
</h5>
-
<p>
- If you are not already logged in, you need to authenticate to the Container Registry by using your GitLab username and password. If you have
+ If you are not already logged in, you need to authenticate to the Container Registry by using your GitLab username and password. If you have
<gl-link-stub
href="barBaz"
target="_blank"
>
Two-Factor Authentication
</gl-link-stub>
- enabled, use a
+ enabled, use a
<gl-link-stub
href="fooBaz"
target="_blank"
>
Personal Access Token
</gl-link-stub>
- instead of a password.
+ instead of a password.
</p>
-
<gl-form-input-group-stub
class="gl-mb-4"
inputclass=""
@@ -47,15 +44,11 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
value="bazbaz"
/>
</gl-form-input-group-stub>
-
<p
class="gl-mb-4"
>
-
- You can add an image to this registry with the following commands:
-
+ You can add an image to this registry with the following commands:
</p>
-
<gl-form-input-group-stub
class="gl-mb-4"
inputclass=""
@@ -69,7 +62,6 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
value="foofoo"
/>
</gl-form-input-group-stub>
-
<gl-form-input-group-stub
inputclass=""
predefinedoptions="[object Object]"
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
index f590cff0312..dd70fca9dd2 100644
--- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
+++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
@@ -1,12 +1,12 @@
import {
GlAlert,
- GlDropdown,
- GlDropdownItem,
GlFormInputGroup,
GlFormGroup,
GlModal,
GlSprintf,
GlSkeletonLoader,
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
} from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -18,12 +18,13 @@ import waitForPromises from 'helpers/wait_for_promises';
import { GRAPHQL_PAGE_SIZE } from '~/packages_and_registries/dependency_proxy/constants';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_ACCEPTED } from '~/lib/utils/http_status';
-
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import DependencyProxyApp from '~/packages_and_registries/dependency_proxy/app.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ManifestsList from '~/packages_and_registries/dependency_proxy/components/manifests_list.vue';
-
+import createRouter from '~/packages_and_registries/dependency_proxy/router';
import getDependencyProxyDetailsQuery from '~/packages_and_registries/dependency_proxy/graphql/queries/get_dependency_proxy_details.query.graphql';
import { proxyDetailsQuery, proxyData, pagination, proxyManifests } from './mock_data';
@@ -37,6 +38,7 @@ Vue.use(VueApollo);
describe('DependencyProxyApp', () => {
let wrapper;
+ let router;
let apolloProvider;
let resolver;
let mock;
@@ -53,15 +55,14 @@ describe('DependencyProxyApp', () => {
const requestHandlers = [[getDependencyProxyDetailsQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
+ router = createRouter('/');
wrapper = shallowMountExtended(DependencyProxyApp, {
apolloProvider,
provide,
+ router,
stubs: {
GlAlert,
- GlDropdown,
- GlDropdownItem,
- GlFormInputGroup,
GlFormGroup,
GlModal,
GlSprintf,
@@ -79,7 +80,7 @@ describe('DependencyProxyApp', () => {
const findProxyCountText = () => wrapper.findByTestId('proxy-count');
const findManifestList = () => wrapper.findComponent(ManifestsList);
const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
- const findClearCacheDropdownList = () => wrapper.findComponent(GlDropdown);
+ const findClearCacheDropdownList = () => wrapper.findComponent(GlDisclosureDropdown);
const findClearCacheModal = () => wrapper.findComponent(GlModal);
const findClearCacheAlert = () => wrapper.findComponent(GlAlert);
const findSettingsLink = () => wrapper.findByTestId('settings-link');
@@ -94,6 +95,7 @@ describe('DependencyProxyApp', () => {
mock = new MockAdapter(axios);
mock.onDelete(expectedUrl).reply(HTTP_STATUS_ACCEPTED, {});
+ setWindowLocation(TEST_HOST);
});
afterEach(() => {
@@ -123,6 +125,13 @@ describe('DependencyProxyApp', () => {
return waitForPromises();
});
+ it('resolver is called with right arguments', () => {
+ expect(resolver).toHaveBeenCalledWith({
+ first: GRAPHQL_PAGE_SIZE,
+ fullPath: provideDefaults.groupPath,
+ });
+ });
+
it('renders a form group with a label', () => {
expect(findFormGroup().attributes('label')).toBe(
DependencyProxyApp.i18n.proxyImagePrefix,
@@ -225,6 +234,7 @@ describe('DependencyProxyApp', () => {
fullPath: provideDefaults.groupPath,
last: GRAPHQL_PAGE_SIZE,
});
+ expect(window.location.search).toBe(`?before=${pagination().startCursor}`);
});
});
@@ -252,6 +262,7 @@ describe('DependencyProxyApp', () => {
first: GRAPHQL_PAGE_SIZE,
fullPath: provideDefaults.groupPath,
});
+ expect(window.location.search).toBe(`?after=${pagination().endCursor}`);
});
});
@@ -270,7 +281,7 @@ describe('DependencyProxyApp', () => {
expect(findClearCacheDropdownList().exists()).toBe(true);
const clearCacheDropdownItem = findClearCacheDropdownList().findComponent(
- GlDropdownItem,
+ GlDisclosureDropdownItem,
);
expect(clearCacheDropdownItem.text()).toBe('Clear cache');
@@ -315,6 +326,48 @@ describe('DependencyProxyApp', () => {
});
});
});
+
+ describe('pagination params', () => {
+ it('after is set from the url params', async () => {
+ setWindowLocation('?after=1234');
+ createComponent();
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledWith({
+ first: GRAPHQL_PAGE_SIZE,
+ after: '1234',
+ fullPath: provideDefaults.groupPath,
+ });
+ });
+
+ it('before is set from the url params', async () => {
+ setWindowLocation('?before=1234');
+ createComponent();
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledWith({
+ first: null,
+ last: GRAPHQL_PAGE_SIZE,
+ before: '1234',
+ fullPath: provideDefaults.groupPath,
+ });
+ });
+
+ describe('when url params are changed', () => {
+ it('after is set from the url params', async () => {
+ createComponent();
+ await waitForPromises();
+ router.push('?after=1234');
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledWith({
+ first: GRAPHQL_PAGE_SIZE,
+ after: '1234',
+ fullPath: provideDefaults.groupPath,
+ });
+ });
+ });
+ });
});
});
});
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/utils_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/utils_spec.js
new file mode 100644
index 00000000000..72072c08537
--- /dev/null
+++ b/spec/frontend/packages_and_registries/dependency_proxy/utils_spec.js
@@ -0,0 +1,25 @@
+import { getPageParams } from '~/packages_and_registries/dependency_proxy/utils';
+
+describe('getPageParams', () => {
+ it('should return the previous page params if before cursor is available', () => {
+ const pageInfo = { before: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ first: null,
+ before: pageInfo.before,
+ last: 20,
+ });
+ });
+
+ it('should return the next page params if after cursor is available', () => {
+ const pageInfo = { after: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ after: pageInfo.after,
+ first: 20,
+ });
+ });
+
+ it('should return an empty object if both before and after cursors are not available', () => {
+ const pageInfo = {};
+ expect(getPageParams(pageInfo)).toEqual({});
+ });
+});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
index f95564e3fad..8e757c136ec 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/file_sha_spec.js.snap
@@ -2,18 +2,13 @@
exports[`FileSha renders 1`] = `
<div
- class="gl-display-flex gl-align-items-center gl-font-monospace gl-font-sm gl-word-break-all gl-py-2 gl-border-b-solid gl-border-gray-100 gl-border-b-1"
+ class="gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
>
- <!---->
-
<span>
<div
class="gl-px-4"
>
-
- bar:
- foo
-
+ bar: foo
<gl-button-stub
aria-label="Copy SHA"
aria-live="polite"
@@ -22,7 +17,7 @@ exports[`FileSha renders 1`] = `
data-clipboard-handle-tooltip="false"
data-clipboard-text="foo"
icon="copy-to-clipboard"
- id="clipboard-button-1"
+ id="reference-0"
size="small"
title="Copy SHA"
variant="default"
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap
index 03236737572..30fe6544057 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/__snapshots__/terraform_installation_spec.js.snap
@@ -7,7 +7,6 @@ exports[`TerraformInstallation renders all the messages 1`] = `
>
Provision instructions
</h3>
-
<code-instruction-stub
copytext="Copy Terraform Command"
instruction="module \\"my_module_name\\" {
@@ -19,13 +18,11 @@ exports[`TerraformInstallation renders all the messages 1`] = `
trackingaction=""
trackinglabel=""
/>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<code-instruction-stub
copytext="Copy Terraform Setup Command"
instruction="credentials \\"bar.dev\\" {
@@ -36,7 +33,6 @@ exports[`TerraformInstallation renders all the messages 1`] = `
trackingaction=""
trackinglabel=""
/>
-
<gl-sprintf-stub
message="For more information on the Terraform registry, %{linkStart}see our documentation%{linkEnd}."
/>
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
index d0841c6110f..7f26ed778a5 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -6,12 +6,10 @@ exports[`packages_list_app renders 1`] = `
count="1"
helpurl="foo"
/>
-
<infrastructure-search-stub />
-
<div>
<section
- class="gl-display-flex empty-state gl-text-center gl-flex-direction-column"
+ class="empty-state gl-display-flex gl-flex-direction-column gl-text-center"
>
<div
class="gl-max-w-full"
@@ -21,15 +19,14 @@ exports[`packages_list_app renders 1`] = `
>
<img
alt=""
- class="gl-max-w-full gl-dark-invert-keep-hue"
+ class="gl-dark-invert-keep-hue gl-max-w-full"
role="img"
src="helpSvg"
/>
</div>
</div>
-
<div
- class="gl-max-w-full gl-m-auto"
+ class="gl-m-auto gl-max-w-full"
data-testid="gl-empty-state-content"
>
<div
@@ -38,15 +35,12 @@ exports[`packages_list_app renders 1`] = `
<h1
class="gl-font-size-h-display gl-line-height-36 h4"
>
-
- There are no packages yet
-
+ There are no packages yet
</h1>
-
<p
class="gl-mt-3"
>
- Learn how to
+ Learn how to
<b-link-stub
class="gl-link"
href="helpUrl"
@@ -54,16 +48,11 @@ exports[`packages_list_app renders 1`] = `
>
publish and share your packages
</b-link-stub>
- with GitLab.
+ with GitLab.
</p>
-
<div
class="gl-display-flex gl-flex-wrap gl-justify-content-center"
- >
- <!---->
-
- <!---->
- </div>
+ />
</div>
</div>
</section>
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
index 250b33cbb14..edba81da1f5 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
@@ -2,28 +2,26 @@
exports[`packages_list_row renders 1`] = `
<div
- class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100"
+ class="gl-border-b-1 gl-border-b-gray-100 gl-border-b-solid gl-border-t-1 gl-border-t-solid gl-border-t-transparent gl-display-flex gl-flex-direction-column"
data-testid="package-row"
>
<div
- class="gl-display-flex gl-align-items-center gl-py-3"
+ class="gl-align-items-center gl-display-flex gl-py-3"
>
- <!---->
-
<div
- class="gl-display-flex gl-xs-flex-direction-column gl-justify-content-space-between gl-align-items-stretch gl-flex-grow-1"
+ class="gl-align-items-stretch gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-xs-flex-direction-column"
>
<div
- class="gl-display-flex gl-flex-direction-column gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-min-w-0 gl-xs-mb-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-text-body gl-font-weight-bold gl-min-h-6 gl-min-w-0"
+ class="gl-align-items-center gl-display-flex gl-font-weight-bold gl-min-h-6 gl-min-w-0 gl-text-body"
>
<div
- class="gl-display-flex gl-align-items-center gl-mr-3 gl-min-w-0"
+ class="gl-align-items-center gl-display-flex gl-min-w-0 gl-mr-3"
>
<gl-link-stub
- class="gl-text-body gl-min-w-0"
+ class="gl-min-w-0 gl-text-body"
data-testid="details-link"
href="foo"
>
@@ -32,17 +30,10 @@ exports[`packages_list_row renders 1`] = `
text="Test package"
/>
</gl-link-stub>
-
- <!---->
-
- <!---->
</div>
-
- <!---->
</div>
-
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-grow-1"
+ class="gl-align-items-center gl-display-flex gl-flex-grow-1 gl-min-h-6 gl-min-w-0 gl-text-gray-500"
>
<div
class="gl-display-flex"
@@ -50,31 +41,25 @@ exports[`packages_list_row renders 1`] = `
<span>
1.0.0
</span>
-
- <!---->
-
<div />
-
<package-path-stub
path="foo/bar/baz"
/>
</div>
</div>
</div>
-
<div
- class="gl-display-flex gl-flex-direction-column gl-sm-align-items-flex-end gl-justify-content-space-between gl-text-gray-500 gl-flex-shrink-0"
+ class="gl-display-flex gl-flex-direction-column gl-flex-shrink-0 gl-justify-content-space-between gl-sm-align-items-flex-end gl-text-gray-500"
>
<div
- class="gl-display-flex gl-align-items-center gl-sm-text-body gl-sm-font-weight-bold gl-min-h-6"
+ class="gl-align-items-center gl-display-flex gl-min-h-6 gl-sm-font-weight-bold gl-sm-text-body"
>
<publish-method-stub
packageentity="[object Object]"
/>
</div>
-
<div
- class="gl-display-flex gl-align-items-center gl-min-h-6"
+ class="gl-align-items-center gl-display-flex gl-min-h-6"
>
<span>
<gl-sprintf-stub
@@ -84,9 +69,8 @@ exports[`packages_list_row renders 1`] = `
</div>
</div>
</div>
-
<div
- class="gl-w-9 gl-display-flex gl-justify-content-end gl-pr-1"
+ class="gl-display-flex gl-justify-content-end gl-pr-1 gl-w-9"
>
<gl-button-stub
aria-label="Remove package"
@@ -100,7 +84,5 @@ exports[`packages_list_row renders 1`] = `
/>
</div>
</div>
-
- <!---->
</div>
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap
index b3d0d88be4d..cfdaebd889d 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/conan_installation_spec.js.snap
@@ -6,7 +6,6 @@ exports[`ConanInstallation renders all the messages 1`] = `
options="[object Object]"
packagetype="conan"
/>
-
<code-instruction-stub
copytext="Copy Conan Command"
instruction="conan install @gitlab-org/package-15 --remote=gitlab"
@@ -14,13 +13,11 @@ exports[`ConanInstallation renders all the messages 1`] = `
trackingaction="copy_conan_command"
trackinglabel="code_instruction"
/>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<code-instruction-stub
copytext="Copy Conan Setup Command"
instruction="conan remote add gitlab http://gdk.test:3000/api/v4/projects/1/packages/conan"
@@ -28,7 +25,7 @@ exports[`ConanInstallation renders all the messages 1`] = `
trackingaction="copy_conan_setup_command"
trackinglabel="code_instruction"
/>
- For more information on the Conan registry,
+ For more information on the Conan registry,
<gl-link-stub
href="/help/user/packages/conan_repository/index"
target="_blank"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap
index f83df7b11f4..37401786d21 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/dependency_row_spec.js.snap
@@ -5,25 +5,21 @@ exports[`DependencyRow renders full dependency 1`] = `
class="gl-responsive-table-row"
>
<div
- class="table-section section-50"
+ class="section-50 table-section"
>
<strong
class="gl-text-body"
>
Ninject.Extensions.Factory
</strong>
-
<span
data-testid="target-framework"
>
-
(.NETCoreApp3.1)
-
</span>
</div>
-
<div
- class="table-section section-50 gl-display-flex gl-md-justify-content-end"
+ class="gl-display-flex gl-md-justify-content-end section-50 table-section"
data-testid="version-pattern"
>
<span
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
index f95564e3fad..8e757c136ec 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/file_sha_spec.js.snap
@@ -2,18 +2,13 @@
exports[`FileSha renders 1`] = `
<div
- class="gl-display-flex gl-align-items-center gl-font-monospace gl-font-sm gl-word-break-all gl-py-2 gl-border-b-solid gl-border-gray-100 gl-border-b-1"
+ class="gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-gray-100 gl-display-flex gl-font-monospace gl-font-sm gl-py-2 gl-word-break-all"
>
- <!---->
-
<span>
<div
class="gl-px-4"
>
-
- bar:
- foo
-
+ bar: foo
<gl-button-stub
aria-label="Copy SHA"
aria-live="polite"
@@ -22,7 +17,7 @@ exports[`FileSha renders 1`] = `
data-clipboard-handle-tooltip="false"
data-clipboard-text="foo"
icon="copy-to-clipboard"
- id="clipboard-button-1"
+ id="reference-0"
size="small"
title="Copy SHA"
variant="default"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap
index 9b429c39faa..23cdf4864de 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/maven_installation_spec.js.snap
@@ -6,7 +6,6 @@ exports[`MavenInstallation groovy renders all the messages 1`] = `
options="[object Object],[object Object],[object Object]"
packagetype="maven"
/>
-
<code-instruction-stub
class="gl-mb-5"
copytext="Copy Gradle Groovy DSL install command"
@@ -15,7 +14,6 @@ exports[`MavenInstallation groovy renders all the messages 1`] = `
trackingaction="copy_gradle_install_command"
trackinglabel="code_instruction"
/>
-
<code-instruction-stub
copytext="Copy add Gradle Groovy DSL repository command"
instruction="maven {
@@ -35,7 +33,6 @@ exports[`MavenInstallation kotlin renders all the messages 1`] = `
options="[object Object],[object Object],[object Object]"
packagetype="maven"
/>
-
<code-instruction-stub
class="gl-mb-5"
copytext="Copy Gradle Kotlin DSL install command"
@@ -44,7 +41,6 @@ exports[`MavenInstallation kotlin renders all the messages 1`] = `
trackingaction="copy_kotlin_install_command"
trackinglabel="code_instruction"
/>
-
<code-instruction-stub
copytext="Copy add Gradle Kotlin DSL repository command"
instruction="maven(\\"http://gdk.test:3000/api/v4/projects/1/packages/maven\\")"
@@ -62,81 +58,72 @@ exports[`MavenInstallation maven renders all the messages 1`] = `
options="[object Object],[object Object],[object Object]"
packagetype="maven"
/>
-
<p>
- Copy and paste this inside your
+ Copy and paste this inside your
<code>
pom.xml
</code>
-
<code>
dependencies
</code>
- block.
+ block.
</p>
-
<code-instruction-stub
copytext="Copy Maven XML"
- instruction="<dependency>
- <groupId>appGroup</groupId>
- <artifactId>appName</artifactId>
- <version>appVersion</version>
-</dependency>"
+ instruction=<dependency>
+ <groupid>
+ appGroup
+ </groupid>
+ <artifactid>
+ appName
+ </artifactid>
+ <version>
+ appVersion
+ </version>
+ </dependency>
label=""
multiline="true"
trackingaction="copy_maven_xml"
trackinglabel="code_instruction"
/>
-
<code-instruction-stub
- class="gl-w-20 gl-mt-5"
+ class="gl-mt-5 gl-w-20"
copytext="Copy Maven command"
instruction="mvn install"
label="Maven Command"
trackingaction="copy_maven_command"
trackinglabel="code_instruction"
/>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<p>
- If you haven't already done so, you will need to add the below to your
+ If you haven't already done so, you will need to add the below to your
<code>
pom.xml
</code>
- file.
+ file.
</p>
-
<code-instruction-stub
copytext="Copy Maven registry XML"
- instruction="<repositories>
- <repository>
- <id>gitlab-maven</id>
- <url>http://gdk.test:3000/api/v4/projects/1/packages/maven</url>
- </repository>
-</repositories>
-
-<distributionManagement>
- <repository>
- <id>gitlab-maven</id>
- <url>http://gdk.test:3000/api/v4/projects/1/packages/maven</url>
- </repository>
-
- <snapshotRepository>
- <id>gitlab-maven</id>
- <url>http://gdk.test:3000/api/v4/projects/1/packages/maven</url>
- </snapshotRepository>
-</distributionManagement>"
+ instruction=<repositories>
+ <repository>
+ <id>
+ gitlab-maven
+ </id>
+ <url>
+ http://gdk.test:3000/api/v4/projects/1/packages/maven
+ </url>
+ </repository>
+ </repositories>
label=""
multiline="true"
trackingaction="copy_maven_setup_xml"
trackinglabel="code_instruction"
/>
- For more information on the Maven registry,
+ For more information on the Maven registry,
<gl-link-stub
href="/help/user/packages/maven_repository/index"
target="_blank"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
index 4520ae9c328..7e36bfb5dc0 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/npm_installation_spec.js.snap
@@ -6,7 +6,6 @@ exports[`NpmInstallation renders all the messages 1`] = `
options="[object Object],[object Object]"
packagetype="npm"
/>
-
<code-instruction-stub
copytext="Copy npm command"
instruction="npm i @gitlab-org/package-15"
@@ -14,13 +13,11 @@ exports[`NpmInstallation renders all the messages 1`] = `
trackingaction="copy_npm_install_command"
trackinglabel="code_instruction"
/>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<gl-form-radio-group-stub
checked="instance"
disabledfield="disabled"
@@ -29,7 +26,6 @@ exports[`NpmInstallation renders all the messages 1`] = `
textfield="text"
valuefield="value"
/>
-
<code-instruction-stub
copytext="Copy npm setup command"
instruction="echo @gitlab-org:registry=npmInstanceUrl/ >> .npmrc"
@@ -37,13 +33,13 @@ exports[`NpmInstallation renders all the messages 1`] = `
trackingaction="copy_npm_setup_command"
trackinglabel="code_instruction"
/>
- You may also need to setup authentication using an auth token.
+ You may also need to setup authentication using an auth token.
<gl-link-stub
href="/help/user/packages/npm_registry/index"
target="_blank"
>
See the documentation
</gl-link-stub>
- to find out more.
+ to find out more.
</div>
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap
index 92930a6309a..554d4e08523 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/nuget_installation_spec.js.snap
@@ -6,7 +6,6 @@ exports[`NugetInstallation renders all the messages 1`] = `
options="[object Object]"
packagetype="nuget"
/>
-
<code-instruction-stub
copytext="Copy NuGet Command"
instruction="nuget install @gitlab-org/package-15 -Source \\"GitLab\\""
@@ -14,13 +13,11 @@ exports[`NugetInstallation renders all the messages 1`] = `
trackingaction="copy_nuget_install_command"
trackinglabel="code_instruction"
/>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<code-instruction-stub
copytext="Copy NuGet Setup Command"
instruction="nuget source Add -Name \\"GitLab\\" -Source \\"http://gdk.test:3000/api/v4/projects/1/packages/nuget/index.json\\" -UserName <your_username> -Password <your_token>"
@@ -28,7 +25,7 @@ exports[`NugetInstallation renders all the messages 1`] = `
trackingaction="copy_nuget_setup_command"
trackinglabel="code_instruction"
/>
- For more information on the NuGet registry,
+ For more information on the NuGet registry,
<gl-link-stub
href="/help/user/packages/nuget_repository/index"
target="_blank"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
index 99ee6ce01b2..05a5a718e52 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -3,45 +3,37 @@
exports[`PypiInstallation renders all the messages 1`] = `
<div>
<div
- class="gl-display-flex gl-justify-content-space-between gl-align-items-center"
+ class="gl-align-items-center gl-display-flex gl-justify-content-space-between"
>
<h3
class="gl-font-lg"
>
Installation
</h3>
-
<div>
<div
class="gl-new-dropdown"
>
<button
- aria-controls="base-dropdown-10"
+ aria-controls="reference-1"
aria-haspopup="listbox"
- aria-labelledby="dropdown-toggle-btn-8"
+ aria-labelledby="reference-0"
class="btn btn-default btn-md gl-button gl-new-dropdown-toggle"
data-testid="base-dropdown-toggle"
- id="dropdown-toggle-btn-8"
+ id="reference-0"
type="button"
>
- <!---->
-
- <!---->
-
<span
class="gl-button-text"
>
<span
class="gl-new-dropdown-button-text"
>
-
- Show PyPi commands
-
+ Show PyPi commands
</span>
-
<svg
aria-hidden="true"
- class="gl-button-icon gl-new-dropdown-chevron gl-icon s16"
+ class="gl-button-icon gl-icon gl-new-dropdown-chevron s16"
data-testid="chevron-down-icon"
role="img"
>
@@ -51,24 +43,18 @@ exports[`PypiInstallation renders all the messages 1`] = `
</svg>
</span>
</button>
-
<div
class="gl-new-dropdown-panel gl-w-31!"
data-testid="base-dropdown-menu"
- id="base-dropdown-10"
+ id="reference-1"
>
<div
class="gl-new-dropdown-inner"
>
-
- <!---->
-
- <!---->
-
<ul
- aria-labelledby="dropdown-toggle-btn-8"
- class="gl-new-dropdown-contents gl-new-dropdown-contents-with-scrim-overlay gl-new-dropdown-contents"
- id="listbox-9"
+ aria-labelledby="reference-0"
+ class="gl-new-dropdown-contents gl-new-dropdown-contents-with-scrim-overlay"
+ id="reference-2"
role="listbox"
tabindex="-1"
>
@@ -81,11 +67,9 @@ exports[`PypiInstallation renders all the messages 1`] = `
class="top-scrim top-scrim-light"
/>
</li>
-
<li
aria-hidden="true"
/>
-
<li
aria-selected="true"
class="gl-new-dropdown-item"
@@ -94,11 +78,11 @@ exports[`PypiInstallation renders all the messages 1`] = `
tabindex="-1"
>
<span
- class="gl-new-dropdown-item-content gl-bg-gray-50!"
+ class="gl-bg-gray-50! gl-new-dropdown-item-content"
>
<svg
aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon gl-mt-3 gl-align-self-start"
+ class="gl-align-self-start gl-icon gl-mt-3 gl-new-dropdown-item-check-icon s16"
data-testid="dropdown-item-checkbox"
role="img"
>
@@ -106,25 +90,16 @@ exports[`PypiInstallation renders all the messages 1`] = `
href="file-mock#mobile-issue-close"
/>
</svg>
-
<span
class="gl-new-dropdown-item-text-wrapper"
>
-
- Show PyPi commands
-
+ Show PyPi commands
</span>
</span>
</li>
-
- <!---->
-
- <!---->
-
<li
aria-hidden="true"
/>
-
<li
aria-hidden="true"
class="bottom-scrim-wrapper"
@@ -135,56 +110,43 @@ exports[`PypiInstallation renders all the messages 1`] = `
/>
</li>
</ul>
-
- <!---->
-
</div>
</div>
</div>
</div>
</div>
-
<fieldset
class="form-group gl-form-group"
- id="installation-pip-command-group"
+ id="reference-3"
>
<legend
- class="bv-no-focus-ring col-form-label pt-0 col-form-label"
- id="installation-pip-command-group__BV_label_"
+ class="bv-no-focus-ring col-form-label pt-0"
+ id="reference-4"
tabindex="-1"
- >
-
-
-
- <!---->
-
- <!---->
- </legend>
+ />
<div>
<div
data-testid="pip-command"
- id="installation-pip-command"
+ id="reference-5"
>
<label
- for="instruction-input_11"
+ for="reference-6"
>
Pip Command
</label>
-
<div
class="gl-mb-3"
>
<div
- class="input-group gl-mb-3"
+ class="gl-mb-3 input-group"
>
<input
class="form-control gl-font-monospace"
data-testid="instruction-input"
- id="instruction-input_11"
- readonly="readonly"
+ id="reference-6"
+ readonly=""
type="text"
/>
-
<span
class="input-group-append"
data-testid="instruction-button"
@@ -192,15 +154,13 @@ exports[`PypiInstallation renders all the messages 1`] = `
<button
aria-label="Copy Pip command"
aria-live="polite"
- class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
+ class="btn btn-default btn-default-secondary btn-icon btn-md gl-button input-group-text"
data-clipboard-handle-tooltip="false"
data-clipboard-text="pip install @gitlab-org/package-15 --index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
- id="clipboard-button-12"
+ id="reference-7"
title="Copy Pip command"
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -211,21 +171,17 @@ exports[`PypiInstallation renders all the messages 1`] = `
href="file-mock#copy-to-clipboard"
/>
</svg>
-
- <!---->
</button>
</span>
</div>
</div>
</div>
- <!---->
- <!---->
<small
class="form-text text-muted"
- id="installation-pip-command-group__BV_description_"
+ id="reference-8"
tabindex="-1"
>
- You will need a
+ You will need a
<a
class="gl-link"
data-testid="access-token-link"
@@ -237,39 +193,31 @@ exports[`PypiInstallation renders all the messages 1`] = `
</small>
</div>
</fieldset>
-
<h3
class="gl-font-lg"
>
Registry setup
</h3>
-
<p>
- If you haven't already done so, you will need to add the below to your
+ If you haven't already done so, you will need to add the below to your
<code>
.pypirc
</code>
- file.
+ file.
</p>
-
<div
data-testid="pypi-setup-content"
>
- <!---->
-
<div>
<pre
class="gl-font-monospace"
data-testid="multiline-instruction"
>
- [gitlab]
-repository = http://gdk.test:3000/api/v4/projects/1/packages/pypi
-username = __token__
-password = &lt;your personal access token&gt;
+ [gitlab]repository = http://gdk.test:3000/api/v4/projects/1/packages/pypiusername = __token__password = &lt;your personal access token&gt;
</pre>
</div>
</div>
- For more information on the PyPi registry,
+ For more information on the PyPi registry,
<a
class="gl-link"
data-testid="pypi-docs-link"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index e0e6c101029..40fcd290b33 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -2,36 +2,35 @@
exports[`packages_list_row renders 1`] = `
<div
- class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100"
+ class="gl-border-b-1 gl-border-b-gray-100 gl-border-b-solid gl-border-t-1 gl-border-t-solid gl-border-t-transparent gl-display-flex gl-flex-direction-column"
data-testid="package-row"
>
<div
- class="gl-display-flex gl-align-items-center gl-py-3"
+ class="gl-align-items-center gl-display-flex gl-py-3"
>
<div
- class="gl-w-7 gl-display-flex gl-justify-content-start gl-pl-2"
+ class="gl-display-flex gl-justify-content-start gl-pl-2 gl-w-7"
>
<gl-form-checkbox-stub
class="gl-m-0"
- id="2"
+ id="reference-0"
/>
</div>
-
<div
- class="gl-display-flex gl-xs-flex-direction-column gl-justify-content-space-between gl-align-items-stretch gl-flex-grow-1"
+ class="gl-align-items-stretch gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-xs-flex-direction-column"
>
<div
- class="gl-display-flex gl-flex-direction-column gl-xs-mb-3 gl-min-w-0 gl-flex-grow-1"
+ class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-min-w-0 gl-xs-mb-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-text-body gl-font-weight-bold gl-min-h-6 gl-min-w-0"
+ class="gl-align-items-center gl-display-flex gl-font-weight-bold gl-min-h-6 gl-min-w-0 gl-text-body"
>
<div
- class="gl-display-flex gl-align-items-center gl-mr-3 gl-min-w-0"
+ class="gl-align-items-center gl-display-flex gl-min-w-0 gl-mr-3"
>
<router-link-stub
ariacurrentvalue="page"
- class="gl-text-body gl-min-w-0"
+ class="gl-min-w-0 gl-text-body"
data-testid="details-link"
event="click"
tag="a"
@@ -42,18 +41,13 @@ exports[`packages_list_row renders 1`] = `
text="@gitlab-org/package-15"
/>
</router-link-stub>
-
- <!---->
</div>
-
- <!---->
</div>
-
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-grow-1"
+ class="gl-align-items-center gl-display-flex gl-flex-grow-1 gl-min-h-6 gl-min-w-0 gl-text-gray-500"
>
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
data-testid="left-secondary-infos"
>
<gl-truncate-stub
@@ -62,7 +56,6 @@ exports[`packages_list_row renders 1`] = `
text="1.0.0"
withtooltip="true"
/>
-
<span
class="gl-ml-2"
data-testid="package-type"
@@ -72,25 +65,22 @@ exports[`packages_list_row renders 1`] = `
</div>
</div>
</div>
-
<div
- class="gl-display-flex gl-flex-direction-column gl-sm-align-items-flex-end gl-justify-content-space-between gl-text-gray-500 gl-flex-shrink-0"
+ class="gl-display-flex gl-flex-direction-column gl-flex-shrink-0 gl-justify-content-space-between gl-sm-align-items-flex-end gl-text-gray-500"
>
<div
- class="gl-display-flex gl-align-items-center gl-sm-text-body gl-sm-font-weight-bold gl-min-h-6"
+ class="gl-align-items-center gl-display-flex gl-min-h-6 gl-sm-font-weight-bold gl-sm-text-body"
>
<publish-method-stub />
</div>
-
<div
- class="gl-display-flex gl-align-items-center gl-min-h-6"
+ class="gl-align-items-center gl-display-flex gl-min-h-6"
>
<span
data-testid="right-secondary"
>
- Published
+ Published
<time
- class=""
datetime="2020-05-17T14:23:32Z"
title="May 17, 2020 2:23pm UTC"
>
@@ -100,9 +90,8 @@ exports[`packages_list_row renders 1`] = `
</div>
</div>
</div>
-
<div
- class="gl-w-9 gl-display-flex gl-justify-content-end gl-pr-1"
+ class="gl-display-flex gl-justify-content-end gl-pr-1 gl-w-9"
>
<gl-disclosure-dropdown-stub
autoclose="true"
@@ -125,15 +114,11 @@ exports[`packages_list_row renders 1`] = `
<span
class="gl-text-red-500"
>
-
Delete package
-
</span>
</gl-disclosure-dropdown-item-stub>
</gl-disclosure-dropdown-stub>
</div>
</div>
-
- <!---->
</div>
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap
index 4407c4a2003..f202635d717 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap
@@ -2,27 +2,24 @@
exports[`publish_method renders 1`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-icon-stub
class="gl-mr-2"
name="git-merge"
size="16"
/>
-
<span
class="gl-mr-2"
data-testid="pipeline-ref"
>
master
</span>
-
<gl-icon-stub
class="gl-mr-2"
name="commit"
size="16"
/>
-
<gl-link-stub
class="gl-mr-2"
data-testid="pipeline-sha"
@@ -30,7 +27,6 @@ exports[`publish_method renders 1`] = `
>
b83d6e39
</gl-link-stub>
-
<clipboard-button-stub
category="tertiary"
size="small"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
index fad8863e3d9..acf8b718400 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
@@ -37,7 +37,6 @@ describe('packages_list', () => {
const defaultProps = {
list: [firstPackage, secondPackage],
isLoading: false,
- pageInfo: {},
groupSettings: defaultPackageGroupSettings,
};
@@ -113,7 +112,6 @@ describe('packages_list', () => {
expect(findRegistryList().props()).toMatchObject({
title: '2 packages',
items: defaultProps.list,
- pagination: defaultProps.pageInfo,
hiddenDelete: false,
isLoading: false,
});
@@ -314,22 +312,4 @@ describe('packages_list', () => {
expect(emptySlot.exists()).toBe(true);
});
});
-
- describe('pagination', () => {
- beforeEach(() => {
- mountComponent({ props: { pageInfo: { hasPreviousPage: true } } });
- });
-
- it('emits prev-page events when the prev event is fired', () => {
- findRegistryList().vm.$emit('prev-page');
-
- expect(wrapper.emitted('prev-page')).toHaveLength(1);
- });
-
- it('emits next-page events when the next event is fired', () => {
- findRegistryList().vm.$emit('next-page');
-
- expect(wrapper.emitted('next-page')).toHaveLength(1);
- });
- });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
index 82fa5b76367..f4e36f51c27 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
@@ -3,19 +3,12 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { sortableFields } from '~/packages_and_registries/package_registry/utils';
import component from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import PackageTypeToken from '~/packages_and_registries/package_registry/components/list/tokens/package_type_token.vue';
-import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
-import UrlSync from '~/vue_shared/components/url_sync.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
+import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import { LIST_KEY_CREATED_AT } from '~/packages_and_registries/package_registry/constants';
-import { getQueryParams, extractFilterAndSorting } from '~/packages_and_registries/shared/utils';
import { TOKEN_TYPE_TYPE } from '~/vue_shared/components/filtered_search_bar/constants';
-jest.mock('~/packages_and_registries/shared/utils');
-
-useMockLocationHelper();
-
describe('Package Search', () => {
let wrapper;
@@ -24,8 +17,7 @@ describe('Package Search', () => {
sorting: { sort: 'desc' },
};
- const findRegistrySearch = () => wrapper.findComponent(RegistrySearch);
- const findUrlSync = () => wrapper.findComponent(UrlSync);
+ const findPersistedSearch = () => wrapper.findComponent(PersistedSearch);
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const mountComponent = (isGroupPage = false) => {
@@ -36,34 +28,23 @@ describe('Package Search', () => {
};
},
stubs: {
- UrlSync,
LocalStorageSync,
},
});
};
- beforeEach(() => {
- extractFilterAndSorting.mockReturnValue(defaultQueryParamsMock);
- });
-
it('has a registry search component', async () => {
mountComponent();
await nextTick();
- expect(findRegistrySearch().exists()).toBe(true);
+ expect(findPersistedSearch().exists()).toBe(true);
});
it('registry search is mounted after mount', () => {
mountComponent();
- expect(findRegistrySearch().exists()).toBe(false);
- });
-
- it('has a UrlSync component', () => {
- mountComponent();
-
- expect(findUrlSync().exists()).toBe(true);
+ expect(findPersistedSearch().exists()).toBe(false);
});
it('has a LocalStorageSync component', () => {
@@ -87,7 +68,7 @@ describe('Package Search', () => {
await nextTick();
- expect(findRegistrySearch().props()).toMatchObject({
+ expect(findPersistedSearch().props()).toMatchObject({
tokens: expect.arrayContaining([
expect.objectContaining({
token: PackageTypeToken,
@@ -99,85 +80,63 @@ describe('Package Search', () => {
});
});
- it('on sorting:changed emits update event and update internal sort', async () => {
- const payload = { sort: 'foo' };
+ it('on update event re-emits update event and updates internal sort', async () => {
+ const payload = {
+ sort: 'CREATED_FOO',
+ filters: defaultQueryParamsMock.filters,
+ sorting: { sort: 'foo', orderBy: 'created_at' },
+ };
mountComponent();
await nextTick();
- findRegistrySearch().vm.$emit('sorting:changed', payload);
+ findPersistedSearch().vm.$emit('update', payload);
await nextTick();
- expect(findRegistrySearch().props('sorting')).toEqual({ sort: 'foo', orderBy: 'created_at' });
+ expect(findLocalStorageSync().props('value')).toEqual({ sort: 'foo', orderBy: 'created_at' });
- // there is always a first call on mounted that emits up default values
- expect(wrapper.emitted('update')[1]).toEqual([
+ expect(wrapper.emitted('update')[0]).toEqual([
{
filters: {
packageName: '',
packageType: undefined,
},
- sort: 'CREATED_FOO',
+ sort: payload.sort,
+ sorting: payload.sorting,
},
]);
});
- it('on filter:changed updates the filters', async () => {
- const payload = ['foo'];
+ it('on update event, re-emits update event with formatted filters', async () => {
+ const payload = {
+ sort: 'CREATED_FOO',
+ filters: [
+ { type: 'type', value: { data: 'Generic', operator: '=' }, id: 'token-3' },
+ { id: 'token-4', type: 'filtered-search-term', value: { data: 'gl' } },
+ { id: 'token-5', type: 'filtered-search-term', value: { data: '' } },
+ ],
+ sorting: { sort: 'foo', orderBy: 'created_at' },
+ };
mountComponent();
await nextTick();
- findRegistrySearch().vm.$emit('filter:changed', payload);
+ findPersistedSearch().vm.$emit('update', payload);
await nextTick();
- expect(findRegistrySearch().props('filters')).toEqual(['foo']);
- });
-
- it('on filter:submit emits update event', async () => {
- mountComponent();
-
- await nextTick();
-
- findRegistrySearch().vm.$emit('filter:submit');
-
- expect(wrapper.emitted('update')[1]).toEqual([
+ expect(wrapper.emitted('update')[0]).toEqual([
{
filters: {
- packageName: '',
- packageType: undefined,
+ packageName: 'gl',
+ packageType: 'GENERIC',
},
- sort: 'CREATED_DESC',
+ sort: payload.sort,
+ sorting: payload.sorting,
},
]);
});
-
- it('on query:changed calls updateQuery from UrlSync', async () => {
- jest.spyOn(UrlSync.methods, 'updateQuery').mockImplementation(() => {});
-
- mountComponent();
-
- await nextTick();
-
- findRegistrySearch().vm.$emit('query:changed');
-
- expect(UrlSync.methods.updateQuery).toHaveBeenCalled();
- });
-
- it('sets the component sorting and filtering based on the querystring', async () => {
- mountComponent();
-
- await nextTick();
-
- expect(getQueryParams).toHaveBeenCalled();
-
- expect(findRegistrySearch().props()).toMatchObject({
- filters: defaultQueryParamsMock.filters,
- sorting: defaultQueryParamsMock.sorting,
- });
- });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
index 0d262036ee7..0ce2b86b9a4 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
@@ -17,7 +17,7 @@ import {
EMPTY_LIST_HELP_URL,
PACKAGE_HELP_URL,
} from '~/packages_and_registries/package_registry/constants';
-
+import PersistedPagination from '~/packages_and_registries/shared/components/persisted_pagination.vue';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
import destroyPackagesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_packages.mutation.graphql';
import { packagesListQuery, packageData, pagination } from '../mock_data';
@@ -53,6 +53,7 @@ describe('PackagesListApp', () => {
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findDeletePackages = () => wrapper.findComponent(DeletePackages);
const findSettingsLink = () => wrapper.findComponent(GlButton);
+ const findPagination = () => wrapper.findComponent(PersistedPagination);
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(packagesListQuery()),
@@ -99,6 +100,15 @@ describe('PackagesListApp', () => {
expect(resolver).not.toHaveBeenCalled();
});
+ it('has persisted pagination', async () => {
+ const resolver = jest.fn().mockResolvedValue(packagesListQuery());
+
+ mountComponent({ resolver });
+ await waitForFirstRequest();
+
+ expect(findPagination().props('pagination')).toEqual(pagination());
+ });
+
it('has a package title', async () => {
mountComponent();
@@ -194,7 +204,6 @@ describe('PackagesListApp', () => {
expect(findListComponent().props()).toMatchObject({
list: expect.arrayContaining([expect.objectContaining({ id: packageData().id })]),
isLoading: false,
- pageInfo: expect.objectContaining({ endCursor: pagination().endCursor }),
groupSettings: expect.objectContaining({
mavenPackageRequestsForwarding: true,
npmPackageRequestsForwarding: true,
@@ -203,9 +212,9 @@ describe('PackagesListApp', () => {
});
});
- it('when list emits next-page fetches the next set of records', async () => {
+ it('when pagination emits next event fetches the next set of records', async () => {
await waitForFirstRequest();
- findListComponent().vm.$emit('next-page');
+ findPagination().vm.$emit('next');
await waitForPromises();
expect(resolver).toHaveBeenCalledWith(
@@ -213,9 +222,9 @@ describe('PackagesListApp', () => {
);
});
- it('when list emits prev-page fetches the prev set of records', async () => {
+ it('when pagination emits prev event fetches the prev set of records', async () => {
await waitForFirstRequest();
- findListComponent().vm.$emit('prev-page');
+ findPagination().vm.$emit('prev');
await waitForPromises();
expect(resolver).toHaveBeenCalledWith(
diff --git a/spec/frontend/packages_and_registries/package_registry/utils_spec.js b/spec/frontend/packages_and_registries/package_registry/utils_spec.js
index 019f94aaec2..ecb5a8a77f1 100644
--- a/spec/frontend/packages_and_registries/package_registry/utils_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/utils_spec.js
@@ -1,4 +1,9 @@
-import { getPackageTypeLabel } from '~/packages_and_registries/package_registry/utils';
+import {
+ getPackageTypeLabel,
+ getNextPageParams,
+ getPreviousPageParams,
+ getPageParams,
+} from '~/packages_and_registries/package_registry/utils';
describe('Packages shared utils', () => {
describe('getPackageTypeLabel', () => {
@@ -21,3 +26,48 @@ describe('Packages shared utils', () => {
});
});
});
+
+describe('getNextPageParams', () => {
+ it('should return the next page params with the provided cursor', () => {
+ const cursor = 'abc123';
+ expect(getNextPageParams(cursor)).toEqual({
+ after: cursor,
+ first: 20,
+ });
+ });
+});
+
+describe('getPreviousPageParams', () => {
+ it('should return the previous page params with the provided cursor', () => {
+ const cursor = 'abc123';
+ expect(getPreviousPageParams(cursor)).toEqual({
+ first: null,
+ before: cursor,
+ last: 20,
+ });
+ });
+});
+
+describe('getPageParams', () => {
+ it('should return the previous page params if before cursor is available', () => {
+ const pageInfo = { before: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ first: null,
+ before: pageInfo.before,
+ last: 20,
+ });
+ });
+
+ it('should return the next page params if after cursor is available', () => {
+ const pageInfo = { after: 'abc123' };
+ expect(getPageParams(pageInfo)).toEqual({
+ after: pageInfo.after,
+ first: 20,
+ });
+ });
+
+ it('should return an empty object if both before and after cursors are not available', () => {
+ const pageInfo = {};
+ expect(getPageParams(pageInfo)).toEqual({});
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
index 5d08574234c..d3298984f9d 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Container Expiration Policy Settings Form Cadence matches snapshot 1`] = `
<expiration-dropdown-stub
- class="gl-mr-7 gl-mb-0!"
+ class="gl-mb-0! gl-mr-7"
data-testid="cadence-dropdown"
description=""
dropdownclass=""
diff --git a/spec/frontend/packages_and_registries/shared/components/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages_and_registries/shared/components/__snapshots__/publish_method_spec.js.snap
index 5f243799bae..084e4a2b2f3 100644
--- a/spec/frontend/packages_and_registries/shared/components/__snapshots__/publish_method_spec.js.snap
+++ b/spec/frontend/packages_and_registries/shared/components/__snapshots__/publish_method_spec.js.snap
@@ -2,27 +2,24 @@
exports[`publish_method renders 1`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-icon-stub
class="gl-mr-2"
name="git-merge"
size="16"
/>
-
<span
class="gl-mr-2"
data-testid="pipeline-ref"
>
branch-name
</span>
-
<gl-icon-stub
class="gl-mr-2"
name="commit"
size="16"
/>
-
<gl-link-stub
class="gl-mr-2"
data-testid="pipeline-sha"
@@ -30,7 +27,6 @@ exports[`publish_method renders 1`] = `
>
sha-baz
</gl-link-stub>
-
<clipboard-button-stub
category="tertiary"
size="small"
diff --git a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
index e9ee6ebdb5c..e67bded6a7e 100644
--- a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
+++ b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
@@ -12,34 +12,22 @@ exports[`Registry Breadcrumb when is not rootRoute renders 1`] = `
class="gl-breadcrumb-item"
>
<a
- class=""
target="_self"
>
- <!---->
- <span>
-
- </span>
+ <span />
</a>
</li>
-
- <!---->
<li
class="gl-breadcrumb-item"
>
<a
aria-current="page"
- class=""
href="#"
target="_self"
>
- <!---->
- <span>
-
- </span>
+ <span />
</a>
</li>
-
- <!---->
</ol>
</nav>
`;
@@ -57,17 +45,11 @@ exports[`Registry Breadcrumb when is rootRoute renders 1`] = `
>
<a
aria-current="page"
- class=""
target="_self"
>
- <!---->
- <span>
-
- </span>
+ <span />
</a>
</li>
-
- <!---->
</ol>
</nav>
`;
diff --git a/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js b/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
index 328f83394f9..9041cb757ab 100644
--- a/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/cli_commands_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown } from '@gitlab/ui';
+import { GlDisclosureDropdown } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
@@ -23,7 +23,7 @@ Vue.use(Vuex);
describe('cli_commands', () => {
let wrapper;
- const findDropdownButton = () => wrapper.findComponent(GlDropdown);
+ const findDropdownButton = () => wrapper.findComponent(GlDisclosureDropdown);
const findCodeInstruction = () => wrapper.findAllComponents(CodeInstruction);
const mountComponent = () => {
diff --git a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
index 296caf091d5..615fba2e282 100644
--- a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
@@ -86,6 +86,7 @@ describe('Persisted Search', () => {
after: '123',
before: null,
},
+ sorting: defaultQueryParamsMock.sorting,
},
]);
});
@@ -109,6 +110,7 @@ describe('Persisted Search', () => {
{
filters: [],
sort: 'TEST_DESC',
+ sorting: defaultQueryParamsMock.sorting,
pageInfo: {
before: '456',
after: null,
@@ -136,6 +138,7 @@ describe('Persisted Search', () => {
filters: ['foo'],
sort: 'TEST_DESC',
pageInfo: {},
+ sorting: payload,
},
]);
});
@@ -169,6 +172,7 @@ describe('Persisted Search', () => {
after: '123',
before: null,
},
+ sorting: defaultQueryParamsMock.sorting,
},
]);
});
diff --git a/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js b/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js
deleted file mode 100644
index 6cf30e84288..00000000000
--- a/spec/frontend/pages/admin/abuse_reports/abuse_reports_spec.js
+++ /dev/null
@@ -1,48 +0,0 @@
-import $ from 'jquery';
-import htmlAbuseReportsList from 'test_fixtures/abuse_reports/abuse_reports_list.html';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import AbuseReports from '~/pages/admin/abuse_reports/abuse_reports';
-
-describe('Abuse Reports', () => {
- const MAX_MESSAGE_LENGTH = 500;
-
- let $messages;
-
- const assertMaxLength = ($message) => {
- expect($message.text().length).toEqual(MAX_MESSAGE_LENGTH);
- };
- const findMessage = (searchText) =>
- $messages.filter((index, element) => element.innerText.indexOf(searchText) > -1).first();
-
- beforeEach(() => {
- setHTMLFixture(htmlAbuseReportsList);
- new AbuseReports(); // eslint-disable-line no-new
- $messages = $('.abuse-reports .message');
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('should truncate long messages', () => {
- const $longMessage = findMessage('LONG MESSAGE');
-
- expect($longMessage.data('originalMessage')).toEqual(expect.anything());
- assertMaxLength($longMessage);
- });
-
- it('should not truncate short messages', () => {
- const $shortMessage = findMessage('SHORT MESSAGE');
-
- expect($shortMessage.data('originalMessage')).not.toEqual(expect.anything());
- });
-
- it('should allow clicking a truncated message to expand and collapse the full message', () => {
- const $longMessage = findMessage('LONG MESSAGE');
- $longMessage.click();
-
- expect($longMessage.data('originalMessage').length).toEqual($longMessage.text().length);
- $longMessage.click();
- assertMaxLength($longMessage);
- });
-});
diff --git a/spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js b/spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js
index d94de48f238..2884e4ed521 100644
--- a/spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js
+++ b/spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js
@@ -2,12 +2,9 @@ import { GlButton } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { TEST_HOST } from 'helpers/test_constants';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import CancelJobs from '~/pages/admin/jobs/components/cancel_jobs.vue';
-import CancelJobsModal from '~/pages/admin/jobs/components/cancel_jobs_modal.vue';
-import {
- CANCEL_JOBS_MODAL_ID,
- CANCEL_BUTTON_TOOLTIP,
-} from '~/pages/admin/jobs/components/constants';
+import CancelJobs from '~/ci/admin/jobs_table/components/cancel_jobs.vue';
+import CancelJobsModal from '~/ci/admin/jobs_table/components/cancel_jobs_modal.vue';
+import { CANCEL_JOBS_MODAL_ID, CANCEL_BUTTON_TOOLTIP } from '~/ci/admin/jobs_table/constants';
describe('CancelJobs component', () => {
let wrapper;
diff --git a/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js b/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
index 71ebf64f43c..d14b78d2f4d 100644
--- a/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
+++ b/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
@@ -4,17 +4,17 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
-import JobsSkeletonLoader from '~/pages/admin/jobs/components/jobs_skeleton_loader.vue';
-import getAllJobsQuery from '~/pages/admin/jobs/components/table/graphql/queries/get_all_jobs.query.graphql';
-import getAllJobsCount from '~/pages/admin/jobs/components/table/graphql/queries/get_all_jobs_count.query.graphql';
-import getCancelableJobsQuery from '~/pages/admin/jobs/components/table/graphql/queries/get_cancelable_jobs_count.query.graphql';
-import AdminJobsTableApp from '~/pages/admin/jobs/components/table/admin_jobs_table_app.vue';
-import CancelJobs from '~/pages/admin/jobs/components/cancel_jobs.vue';
-import JobsTable from '~/jobs/components/table/jobs_table.vue';
+import JobsTableTabs from '~/ci/jobs_page/components/jobs_table_tabs.vue';
+import JobsSkeletonLoader from '~/ci/admin/jobs_table/components/jobs_skeleton_loader.vue';
+import getAllJobsQuery from '~/ci/admin/jobs_table/graphql/queries/get_all_jobs.query.graphql';
+import getAllJobsCount from '~/ci/admin/jobs_table/graphql/queries/get_all_jobs_count.query.graphql';
+import getCancelableJobsQuery from '~/ci/admin/jobs_table/graphql/queries/get_cancelable_jobs_count.query.graphql';
+import AdminJobsTableApp from '~/ci/admin/jobs_table/admin_jobs_table_app.vue';
+import CancelJobs from '~/ci/admin/jobs_table/components/cancel_jobs.vue';
+import JobsTable from '~/ci/jobs_page/components/jobs_table.vue';
import { createAlert } from '~/alert';
import { TEST_HOST } from 'spec/test_constants';
-import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
+import JobsFilteredSearch from '~/ci/common/private/jobs_filtered_search/app.vue';
import * as urlUtils from '~/lib/utils/url_utility';
import {
JOBS_FETCH_ERROR_MSG,
@@ -22,7 +22,8 @@ import {
LOADING_ARIA_LABEL,
RAW_TEXT_WARNING_ADMIN,
JOBS_COUNT_ERROR_MESSAGE,
-} from '~/pages/admin/jobs/components/constants';
+} from '~/ci/admin/jobs_table/constants';
+import { TOKEN_TYPE_JOBS_RUNNER_TYPE } from '~/vue_shared/components/filtered_search_bar/constants';
import {
mockAllJobsResponsePaginated,
mockCancelableJobsCountResponse,
@@ -30,7 +31,7 @@ import {
statuses,
mockFailedSearchToken,
mockAllJobsCountResponse,
-} from '../../../../../jobs/mock_data';
+} from 'jest/ci/jobs_mock_data';
Vue.use(VueApollo);
@@ -54,6 +55,11 @@ describe('Job table app', () => {
const findCancelJobsButton = () => wrapper.findComponent(CancelJobs);
const findFilteredSearch = () => wrapper.findComponent(JobsFilteredSearch);
+ const mockSearchTokenRunnerType = {
+ type: TOKEN_TYPE_JOBS_RUNNER_TYPE,
+ value: { data: 'INSTANCE_TYPE', operator: '=' },
+ };
+
const triggerInfiniteScroll = () =>
wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
@@ -73,6 +79,7 @@ describe('Job table app', () => {
countHandler = countSuccessHandler,
mountFn = shallowMount,
data = {},
+ provideOptions = {},
} = {}) => {
wrapper = mountFn(AdminJobsTableApp, {
data() {
@@ -82,6 +89,8 @@ describe('Job table app', () => {
},
provide: {
jobStatuses: statuses,
+ glFeatures: { adminJobsFilterRunnerType: true },
+ ...provideOptions,
},
apolloProvider: createMockApolloProvider(handler, cancelableHandler, countHandler),
});
@@ -304,24 +313,37 @@ describe('Job table app', () => {
},
);
- it('refetches jobs query when filtering', async () => {
- createComponent();
+ describe.each`
+ searchTokens | expectedQueryParams
+ ${[]} | ${{ runnerTypes: null, statuses: null }}
+ ${[mockFailedSearchToken]} | ${{ runnerTypes: null, statuses: 'FAILED' }}
+ ${[mockFailedSearchToken, mockSearchTokenRunnerType]} | ${{ runnerTypes: 'INSTANCE_TYPE', statuses: 'FAILED' }}
+ `('when filtering jobs by searchTokens', ({ searchTokens, expectedQueryParams }) => {
+ it(`refetches jobs query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent();
- expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(1);
- await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
- expect(successHandler).toHaveBeenCalledTimes(2);
- });
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ expect(successHandler).toHaveBeenNthCalledWith(2, { first: 50, ...expectedQueryParams });
+ });
- it('refetches jobs count query when filtering', async () => {
- createComponent();
+ it(`refetches jobs count query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent();
- expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
- await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
- expect(countSuccessHandler).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenNthCalledWith(2, expectedQueryParams);
+ });
});
it('shows raw text warning when user inputs raw text', async () => {
@@ -364,6 +386,7 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
first: 50,
statuses: 'FAILED',
+ runnerTypes: null,
});
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/?statuses=FAILED`,
@@ -378,6 +401,44 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
first: 50,
statuses: null,
+ runnerTypes: null,
+ });
+ });
+
+ describe('when feature flag `adminJobsFilterRunnerType` is disabled', () => {
+ const provideOptions = { glFeatures: { adminJobsFilterRunnerType: false } };
+
+ describe.each`
+ searchTokens | expectedQueryParams
+ ${[]} | ${{ statuses: null }}
+ ${[mockFailedSearchToken]} | ${{ statuses: 'FAILED' }}
+ ${[mockFailedSearchToken, mockSearchTokenRunnerType]} | ${{ statuses: 'FAILED' }}
+ `('when filtering jobs by searchTokens', ({ searchTokens, expectedQueryParams }) => {
+ it(`refetches jobs query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent({ provideOptions });
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ expect(successHandler).toHaveBeenNthCalledWith(2, { first: 50, ...expectedQueryParams });
+ });
+
+ it(`refetches jobs count query including filters ${JSON.stringify(
+ expectedQueryParams,
+ )}`, async () => {
+ createComponent({ provideOptions });
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
+
+ await findFilteredSearch().vm.$emit('filterJobsBySearch', searchTokens);
+
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenNthCalledWith(2, expectedQueryParams);
+ });
});
});
});
diff --git a/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js b/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
index 40d5dff9d06..50bc5bc590b 100644
--- a/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
+++ b/spec/frontend/pages/import/bitbucket_server/components/bitbucket_server_status_table_spec.js
@@ -12,11 +12,7 @@ const BitbucketStatusTableStub = {
describe('BitbucketServerStatusTable', () => {
let wrapper;
- const findReconfigureButton = () =>
- wrapper
- .findAllComponents(GlButton)
- .filter((w) => w.props().variant === 'info')
- .at(0);
+ const findReconfigureButton = () => wrapper.findComponent(GlButton);
function createComponent(bitbucketStatusTableStub = true) {
wrapper = shallowMount(BitbucketServerStatusTable, {
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
deleted file mode 100644
index e20c2fa47a7..00000000000
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
+++ /dev/null
@@ -1,92 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import Cookies from '~/lib/utils/cookies';
-import PipelineSchedulesCallout from '~/pages/projects/pipeline_schedules/shared/components/pipeline_schedules_callout.vue';
-
-const cookieKey = 'pipeline_schedules_callout_dismissed';
-const docsUrl = 'help/ci/scheduled_pipelines';
-const illustrationUrl = 'pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg';
-
-describe('Pipeline Schedule Callout', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = shallowMount(PipelineSchedulesCallout, {
- provide: {
- docsUrl,
- illustrationUrl,
- },
- });
- };
-
- const findInnerContentOfCallout = () => wrapper.find('[data-testid="innerContent"]');
- const findDismissCalloutBtn = () => wrapper.findComponent(GlButton);
-
- describe(`when ${cookieKey} cookie is set`, () => {
- beforeEach(async () => {
- Cookies.set(cookieKey, true);
- createComponent();
-
- await nextTick();
- });
-
- it('does not render the callout', () => {
- expect(findInnerContentOfCallout().exists()).toBe(false);
- });
- });
-
- describe('when cookie is not set', () => {
- beforeEach(() => {
- Cookies.remove(cookieKey);
- createComponent();
- });
-
- it('renders the callout container', () => {
- expect(findInnerContentOfCallout().exists()).toBe(true);
- });
-
- it('renders the callout title', () => {
- expect(wrapper.find('h4').text()).toBe('Scheduling Pipelines');
- });
-
- it('renders the callout text', () => {
- expect(wrapper.find('p').text()).toContain('runs pipelines in the future');
- });
-
- it('renders the documentation url', () => {
- expect(wrapper.find('a').attributes('href')).toBe(docsUrl);
- });
-
- describe('methods', () => {
- it('#dismissCallout sets calloutDismissed to true', async () => {
- expect(wrapper.vm.calloutDismissed).toBe(false);
-
- findDismissCalloutBtn().vm.$emit('click');
-
- await nextTick();
-
- expect(findInnerContentOfCallout().exists()).toBe(false);
- });
-
- it('sets cookie on dismiss', () => {
- const setCookiesSpy = jest.spyOn(Cookies, 'set');
-
- findDismissCalloutBtn().vm.$emit('click');
-
- expect(setCookiesSpy).toHaveBeenCalledWith('pipeline_schedules_callout_dismissed', true, {
- expires: 365,
- secure: false,
- });
- });
- });
-
- it('is hidden when close button is clicked', async () => {
- findDismissCalloutBtn().vm.$emit('click');
-
- await nextTick();
-
- expect(findInnerContentOfCallout().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/pipelines/components/dag/__snapshots__/dag_graph_spec.js.snap b/spec/frontend/pipelines/components/dag/__snapshots__/dag_graph_spec.js.snap
deleted file mode 100644
index cb5f6ff5307..00000000000
--- a/spec/frontend/pipelines/components/dag/__snapshots__/dag_graph_spec.js.snap
+++ /dev/null
@@ -1,230 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`The DAG graph in the basic case renders the graph svg 1`] = `
-"<svg viewBox=\\"0,0,1000,540\\" width=\\"1000\\" height=\\"540\\">
- <g fill=\\"none\\" stroke-opacity=\\"0.8\\">
- <g id=\\"dag-link43\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad53\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"116\\" x2=\\"361.3333333333333\\">
- <stop offset=\\"0%\\" stop-color=\\"#e17223\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#83ab4a\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip63\\">
- <path d=\\"
- M100, 129
- V158
- H377.3333333333333
- V100
- H100
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M108,129L190,129L190,129L369.3333333333333,129\\" stroke=\\"url(#dag-grad53)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip63)\\"></path>
- </g>
- <g id=\\"dag-link44\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad54\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"377.3333333333333\\" x2=\\"622.6666666666666\\">
- <stop offset=\\"0%\\" stop-color=\\"#83ab4a\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#6f3500\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip64\\">
- <path d=\\"
- M361.3333333333333, 129.0000000000002
- V158.0000000000002
- H638.6666666666666
- V100
- H361.3333333333333
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M369.3333333333333,129L509.3333333333333,129L509.3333333333333,129.0000000000002L630.6666666666666,129.0000000000002\\" stroke=\\"url(#dag-grad54)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip64)\\"></path>
- </g>
- <g id=\\"dag-link45\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad55\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"116\\" x2=\\"622.6666666666666\\">
- <stop offset=\\"0%\\" stop-color=\\"#5772ff\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#6f3500\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip65\\">
- <path d=\\"
- M100, 187.0000000000002
- V241.00000000000003
- H638.6666666666666
- V158.0000000000002
- H100
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M108,212.00000000000003L306,212.00000000000003L306,187.0000000000002L630.6666666666666,187.0000000000002\\" stroke=\\"url(#dag-grad55)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip65)\\"></path>
- </g>
- <g id=\\"dag-link46\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad56\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"116\\" x2=\\"361.3333333333333\\">
- <stop offset=\\"0%\\" stop-color=\\"#b24800\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#006887\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip66\\">
- <path d=\\"
- M100, 269.9999999999998
- V324
- H377.3333333333333
- V240.99999999999977
- H100
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M108,295L338.93333333333334,295L338.93333333333334,269.9999999999998L369.3333333333333,269.9999999999998\\" stroke=\\"url(#dag-grad56)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip66)\\"></path>
- </g>
- <g id=\\"dag-link47\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad57\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"116\\" x2=\\"361.3333333333333\\">
- <stop offset=\\"0%\\" stop-color=\\"#25d2d2\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#487900\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip67\\">
- <path d=\\"
- M100, 352.99999999999994
- V407.00000000000006
- H377.3333333333333
- V323.99999999999994
- H100
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M108,378.00000000000006L144.66666666666669,378.00000000000006L144.66666666666669,352.99999999999994L369.3333333333333,352.99999999999994\\" stroke=\\"url(#dag-grad57)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip67)\\"></path>
- </g>
- <g id=\\"dag-link48\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad58\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"377.3333333333333\\" x2=\\"622.6666666666666\\">
- <stop offset=\\"0%\\" stop-color=\\"#006887\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#d84280\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip68\\">
- <path d=\\"
- M361.3333333333333, 270.0000000000001
- V299.0000000000001
- H638.6666666666666
- V240.99999999999977
- H361.3333333333333
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M369.3333333333333,269.9999999999998L464,269.9999999999998L464,270.0000000000001L630.6666666666666,270.0000000000001\\" stroke=\\"url(#dag-grad58)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip68)\\"></path>
- </g>
- <g id=\\"dag-link49\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad59\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"377.3333333333333\\" x2=\\"622.6666666666666\\">
- <stop offset=\\"0%\\" stop-color=\\"#487900\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#d84280\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip69\\">
- <path d=\\"
- M361.3333333333333, 328.0000000000001
- V381.99999999999994
- H638.6666666666666
- V299.0000000000001
- H361.3333333333333
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M369.3333333333333,352.99999999999994L522,352.99999999999994L522,328.0000000000001L630.6666666666666,328.0000000000001\\" stroke=\\"url(#dag-grad59)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip69)\\"></path>
- </g>
- <g id=\\"dag-link50\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad60\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"377.3333333333333\\" x2=\\"622.6666666666666\\">
- <stop offset=\\"0%\\" stop-color=\\"#487900\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#3547de\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip70\\">
- <path d=\\"
- M361.3333333333333, 411
- V440
- H638.6666666666666
- V381.99999999999994
- H361.3333333333333
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M369.3333333333333,410.99999999999994L580,410.99999999999994L580,411L630.6666666666666,411\\" stroke=\\"url(#dag-grad60)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip70)\\"></path>
- </g>
- <g id=\\"dag-link51\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad61\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"638.6666666666666\\" x2=\\"884\\">
- <stop offset=\\"0%\\" stop-color=\\"#d84280\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#006887\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip71\\">
- <path d=\\"
- M622.6666666666666, 270.1890725105691
- V299.1890725105691
- H900
- V241.0000000000001
- H622.6666666666666
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M630.6666666666666,270.0000000000001L861.6,270.0000000000001L861.6,270.1890725105691L892,270.1890725105691\\" stroke=\\"url(#dag-grad61)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip71)\\"></path>
- </g>
- <g id=\\"dag-link52\\" class=\\"dag-link gl-transition-property-stroke-opacity gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\">
- <linearGradient id=\\"dag-grad62\\" gradientUnits=\\"userSpaceOnUse\\" x1=\\"638.6666666666666\\" x2=\\"884\\">
- <stop offset=\\"0%\\" stop-color=\\"#3547de\\"></stop>
- <stop offset=\\"100%\\" stop-color=\\"#275600\\"></stop>
- </linearGradient>
- <clipPath id=\\"dag-clip72\\">
- <path d=\\"
- M622.6666666666666, 411
- V440
- H900
- V382
- H622.6666666666666
- Z
- \\"></path>
- </clipPath>
- <path d=\\"M630.6666666666666,411L679.9999999999999,411L679.9999999999999,411L892,411\\" stroke=\\"url(#dag-grad62)\\" style=\\"stroke-linejoin: round;\\" stroke-width=\\"56\\" clip-path=\\"url(#dag-clip72)\\"></path>
- </g>
- </g>
- <g>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node73\\" stroke=\\"#e17223\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"108\\" x2=\\"108\\" y1=\\"104\\" y2=\\"154.00000000000003\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node74\\" stroke=\\"#83ab4a\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"369\\" x2=\\"369\\" y1=\\"104\\" y2=\\"154\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node75\\" stroke=\\"#5772ff\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"108\\" x2=\\"108\\" y1=\\"187.00000000000003\\" y2=\\"237.00000000000003\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node76\\" stroke=\\"#b24800\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"108\\" x2=\\"108\\" y1=\\"270\\" y2=\\"320.00000000000006\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node77\\" stroke=\\"#25d2d2\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"108\\" x2=\\"108\\" y1=\\"353.00000000000006\\" y2=\\"403.0000000000001\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node78\\" stroke=\\"#6f3500\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"630\\" x2=\\"630\\" y1=\\"104.0000000000002\\" y2=\\"212.00000000000009\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node79\\" stroke=\\"#006887\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"369\\" x2=\\"369\\" y1=\\"244.99999999999977\\" y2=\\"294.99999999999994\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node80\\" stroke=\\"#487900\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"369\\" x2=\\"369\\" y1=\\"327.99999999999994\\" y2=\\"436\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node81\\" stroke=\\"#d84280\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"630\\" x2=\\"630\\" y1=\\"245.00000000000009\\" y2=\\"353\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node82\\" stroke=\\"#3547de\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"630\\" x2=\\"630\\" y1=\\"386\\" y2=\\"436\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node83\\" stroke=\\"#006887\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"892\\" x2=\\"892\\" y1=\\"245.18907251056908\\" y2=\\"295.1890725105691\\"></line>
- <line class=\\"dag-node gl-transition-property-stroke gl-cursor-pointer gl-transition-duration-slow gl-transition-timing-function-ease\\" id=\\"dag-node84\\" stroke=\\"#275600\\" stroke-width=\\"16\\" stroke-linecap=\\"round\\" x1=\\"892\\" x2=\\"892\\" y1=\\"386\\" y2=\\"436\\"></line>
- </g>
- <g class=\\"gl-font-sm\\">
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58.00000000000003px\\" width=\\"84\\" x=\\"8\\" y=\\"100\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58.00000000000003px; text-align: right;\\">build_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"369.3333333333333\\" y=\\"75\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: left;\\">test_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58px\\" width=\\"84\\" x=\\"8\\" y=\\"183.00000000000003\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58px; text-align: right;\\">test_b</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58.00000000000006px\\" width=\\"84\\" x=\\"8\\" y=\\"266\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58.00000000000006px; text-align: right;\\">post_test_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58.00000000000006px\\" width=\\"84\\" x=\\"8\\" y=\\"349.00000000000006\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58.00000000000006px; text-align: right;\\">post_test_b</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"630.6666666666666\\" y=\\"75.0000000000002\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: right;\\">post_test_c</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"369.3333333333333\\" y=\\"215.99999999999977\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: left;\\">staging_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"369.3333333333333\\" y=\\"298.99999999999994\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: left;\\">staging_b</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"630.6666666666666\\" y=\\"216.00000000000009\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: right;\\">canary_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"25px\\" width=\\"84\\" x=\\"630.6666666666666\\" y=\\"357\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 25px; text-align: right;\\">canary_c</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58px\\" width=\\"84\\" x=\\"908\\" y=\\"241.18907251056908\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58px; text-align: left;\\">production_a</div>
- </foreignObject>
- <foreignObject requiredFeatures=\\"http://www.w3.org/TR/SVG11/feature#Extensibility\\" height=\\"58px\\" width=\\"84\\" x=\\"908\\" y=\\"382\\" class=\\"gl-overflow-visible\\">
- <div class=\\"gl-display-flex gl-pointer-events-none gl-flex-direction-column gl-justify-content-center gl-overflow-wrap-break\\" style=\\"height: 58px; text-align: left;\\">production_d</div>
- </foreignObject>
- </g>
-</svg>"
-`;
diff --git a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
deleted file mode 100644
index 82206e907ff..00000000000
--- a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
+++ /dev/null
@@ -1,30 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Links Inner component with a large number of needs matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M202,118C52,118,52,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M202,118C62,118,62,148,112,148\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M222,138C72,138,72,158,122,158\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M212,128C82,128,82,168,132,168\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M232,148C92,148,92,178,142,178\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- </svg> </div>"
-`;
-
-exports[`Links Inner component with a parallel need matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M192,108C32,108,32,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- </svg> </div>"
-`;
-
-exports[`Links Inner component with one need matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M202,118C52,118,52,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- </svg> </div>"
-`;
-
-exports[`Links Inner component with same stage needs matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
- <path d=\\"M192,108C32,108,32,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- <path d=\\"M202,118C42,118,42,128,92,128\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
- </svg> </div>"
-`;
diff --git a/spec/frontend/pipelines/notification/mock_data.js b/spec/frontend/pipelines/notification/mock_data.js
deleted file mode 100644
index e36f391a854..00000000000
--- a/spec/frontend/pipelines/notification/mock_data.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const randomWarning = {
- content: 'another random warning',
- id: 'gid://gitlab/Ci::PipelineMessage/272',
-};
-
-const rootTypeWarning = {
- content: 'root `types` will be removed in 15.0.',
- id: 'gid://gitlab/Ci::PipelineMessage/273',
-};
-
-const typeWarning = {
- content: '`type` will be removed in 15.0.',
- id: 'gid://gitlab/Ci::PipelineMessage/274',
-};
-
-function createWarningMock(warnings) {
- return {
- data: {
- project: {
- id: 'gid://gitlab/Project/28"',
- pipeline: {
- id: 'gid://gitlab/Ci::Pipeline/183',
- warningMessages: warnings,
- },
- },
- },
- };
-}
-
-export const mockWarningsWithoutDeprecation = createWarningMock([randomWarning]);
-export const mockWarningsRootType = createWarningMock([rootTypeWarning]);
-export const mockWarningsType = createWarningMock([typeWarning]);
-export const mockWarningsTypesAll = createWarningMock([rootTypeWarning, typeWarning]);
diff --git a/spec/frontend/profile/preferences/components/__snapshots__/diffs_colors_preview_spec.js.snap b/spec/frontend/profile/preferences/components/__snapshots__/diffs_colors_preview_spec.js.snap
index f675b6cf15c..7d5e0cccb38 100644
--- a/spec/frontend/profile/preferences/components/__snapshots__/diffs_colors_preview_spec.js.snap
+++ b/spec/frontend/profile/preferences/components/__snapshots__/diffs_colors_preview_spec.js.snap
@@ -7,7 +7,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
<label>
Preview
</label>
-
<table
class="code"
>
@@ -16,71 +15,66 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="1"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
<span
class="c1"
>
- #
+ #
<span
- class="idiff deletion"
+ class="deletion idiff"
>
Removed
</span>
- content
+ content
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="1"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
<span
class="c1"
>
- #
+ #
<span
- class="idiff addition"
+ class="addition idiff"
>
Added
</span>
- content
+ content
</span>
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="2"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
<span
@@ -88,13 +82,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
v
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="mi"
>
@@ -102,17 +94,15 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="2"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
<span
@@ -120,13 +110,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
v
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="mi"
>
@@ -135,20 +123,18 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="3"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
<span
@@ -156,13 +142,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
s
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="s"
>
@@ -170,17 +154,15 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="3"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
<span
@@ -188,13 +170,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
s
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="s"
>
@@ -203,52 +183,46 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="4"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span />
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="4"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span />
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="5"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
<span
@@ -256,19 +230,16 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
for
</span>
-
<span
class="n"
>
i
</span>
-
<span
class="ow"
>
in
</span>
-
<span
class="nb"
>
@@ -294,7 +265,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
,
</span>
-
<span
class="mi"
>
@@ -307,17 +277,15 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="5"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
<span
@@ -325,19 +293,16 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
for
</span>
-
<span
class="n"
>
i
</span>
-
<span
class="ow"
>
in
</span>
-
<span
class="nb"
>
@@ -363,7 +328,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
,
</span>
-
<span
class="mi"
>
@@ -377,25 +341,21 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="6"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="k"
>
@@ -411,13 +371,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
i
</span>
-
<span
class="o"
>
+
</span>
-
<span
class="mi"
>
@@ -430,22 +388,18 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="6"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="k"
>
@@ -461,13 +415,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
i
</span>
-
<span
class="o"
>
+
</span>
-
<span
class="mi"
>
@@ -481,52 +433,46 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="7"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span />
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="7"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span />
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="8"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
<span
@@ -534,7 +480,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
class
</span>
-
<span
class="nc"
>
@@ -557,17 +502,15 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="8"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
<span
@@ -575,7 +518,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
class
</span>
-
<span
class="nc"
>
@@ -599,31 +541,26 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="9"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="k"
>
def
</span>
-
<span
class="nf"
>
@@ -644,7 +581,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
,
</span>
-
<span
class="n"
>
@@ -657,28 +593,23 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="9"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="k"
>
def
</span>
-
<span
class="nf"
>
@@ -699,7 +630,6 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
,
</span>
-
<span
class="n"
>
@@ -713,25 +643,21 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="10"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="bp"
>
@@ -747,13 +673,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
val
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="n"
>
@@ -761,22 +685,18 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="10"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="bp"
>
@@ -792,13 +712,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
val
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="n"
>
@@ -807,25 +725,21 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</td>
</tr>
-
<tr
class="line_holder parallel"
>
<td
- class="old_line diff-line-num old"
+ class="diff-line-num old old_line"
>
<a
data-linenumber="11"
/>
</td>
-
<td
- class="line_content parallel left-side old"
+ class="left-side line_content old parallel"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="bp"
>
@@ -841,13 +755,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
next
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="bp"
>
@@ -855,22 +767,18 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
</span>
</span>
</td>
-
<td
- class="new_line diff-line-num new"
+ class="diff-line-num new new_line"
>
<a
data-linenumber="11"
/>
</td>
-
<td
- class="line_content parallel right-side new"
+ class="line_content new parallel right-side"
>
<span>
- <span>
-
- </span>
+ <span />
<span
class="bp"
>
@@ -886,13 +794,11 @@ exports[`DiffsColorsPreview component renders diff colors preview 1`] = `
>
next
</span>
-
<span
class="o"
>
=
</span>
-
<span
class="bp"
>
diff --git a/spec/frontend/projects/commit/components/form_modal_spec.js b/spec/frontend/projects/commit/components/form_modal_spec.js
index d40e2d7a48c..7ea3a74418d 100644
--- a/spec/frontend/projects/commit/components/form_modal_spec.js
+++ b/spec/frontend/projects/commit/components/form_modal_spec.js
@@ -72,11 +72,11 @@ describe('CommitFormModal', () => {
it('Shows modal', () => {
createComponent();
- const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+ const rootWrapper = createWrapper(wrapper.vm.$root);
- wrapper.vm.show();
+ eventHub.$emit(mockData.modalPropsData.openModal);
- expect(rootEmit).toHaveBeenCalledWith(BV_SHOW_MODAL, mockData.modalPropsData.modalId);
+ expect(rootWrapper.emitted(BV_SHOW_MODAL)[0]).toContain(mockData.modalPropsData.modalId);
});
it('Clears the modal state once modal is hidden', () => {
@@ -150,8 +150,9 @@ describe('CommitFormModal', () => {
it('Action primary button dispatches submit action', () => {
getByText(mockData.modalPropsData.i18n.actionPrimaryText).trigger('click');
+ const formSubmitSpy = jest.spyOn(findForm().element, 'submit');
- expect(wrapper.vm.$refs.form.$el.submit).toHaveBeenCalled();
+ expect(formSubmitSpy).toHaveBeenCalled();
});
it('Changes the start_branch input value', async () => {
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
deleted file mode 100644
index e289569f8ce..00000000000
--- a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import { createAlert } from '~/alert';
-import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import RevisionDropdown from '~/projects/compare/components/revision_dropdown_legacy.vue';
-
-const defaultProps = {
- refsProjectPath: 'some/refs/path',
- revisionText: 'Target',
- paramsName: 'from',
- paramsBranch: 'main',
-};
-
-jest.mock('~/alert');
-
-describe('RevisionDropdown component', () => {
- let wrapper;
- let axiosMock;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(RevisionDropdown, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
- createComponent();
- });
-
- afterEach(() => {
- axiosMock.restore();
- });
-
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
- const findBranchesDropdownItem = () =>
- wrapper.findAllComponents('[data-testid="branches-dropdown-item"]');
- const findTagsDropdownItem = () =>
- wrapper.findAllComponents('[data-testid="tags-dropdown-item"]');
-
- it('sets hidden input', () => {
- expect(wrapper.find('input[type="hidden"]').attributes('value')).toBe(
- defaultProps.paramsBranch,
- );
- });
-
- it('update the branches on success', async () => {
- const Branches = ['branch-1', 'branch-2'];
- const Tags = ['tag-1', 'tag-2', 'tag-3'];
-
- axiosMock.onGet(defaultProps.refsProjectPath).replyOnce(HTTP_STATUS_OK, {
- Branches,
- Tags,
- });
-
- createComponent();
-
- expect(findBranchesDropdownItem()).toHaveLength(0);
- expect(findTagsDropdownItem()).toHaveLength(0);
-
- await waitForPromises();
-
- Branches.forEach((branch, index) => {
- expect(findBranchesDropdownItem().at(index).text()).toBe(branch);
- });
-
- Tags.forEach((tag, index) => {
- expect(findTagsDropdownItem().at(index).text()).toBe(tag);
- });
-
- expect(findBranchesDropdownItem()).toHaveLength(Branches.length);
- expect(findTagsDropdownItem()).toHaveLength(Tags.length);
- });
-
- it('sets branches and tags to be an empty array when no tags or branches are given', async () => {
- axiosMock.onGet(defaultProps.refsProjectPath).replyOnce(HTTP_STATUS_OK, {
- Branches: undefined,
- Tags: undefined,
- });
-
- await waitForPromises();
-
- expect(findBranchesDropdownItem()).toHaveLength(0);
- expect(findTagsDropdownItem()).toHaveLength(0);
- });
-
- it('shows an alert on error', async () => {
- axiosMock.onGet('some/invalid/path').replyOnce(HTTP_STATUS_NOT_FOUND);
-
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalled();
- });
-
- describe('GlDropdown component', () => {
- it('renders props', () => {
- expect(wrapper.props()).toEqual(expect.objectContaining(defaultProps));
- });
-
- it('display default text', () => {
- createComponent({
- paramsBranch: null,
- });
- expect(findGlDropdown().props('text')).toBe('Select branch/tag');
- });
-
- it('display params branch text', () => {
- expect(findGlDropdown().props('text')).toBe(defaultProps.paramsBranch);
- });
-
- it('emits a "selectRevision" event when a revision is selected', async () => {
- const findGlDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
- const branchName = 'some-branch';
-
- axiosMock.onGet(defaultProps.refsProjectPath).replyOnce(HTTP_STATUS_OK, {
- Branches: [branchName],
- });
-
- createComponent();
- await waitForPromises();
-
- findFirstGlDropdownItem().vm.$emit('click');
-
- expect(wrapper.emitted()).toEqual({
- selectRevision: [[{ direction: 'from', revision: branchName }]],
- });
- });
- });
-});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 4893ee26178..479530c1d38 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -10,12 +10,10 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
type="hidden"
value="delete"
/>
-
<input
name="authenticity_token"
type="hidden"
/>
-
<delete-modal-stub
confirmphrase="foo"
forkscount="3"
@@ -23,7 +21,6 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
mergerequestscount="2"
starscount="4"
/>
-
<gl-button-stub
buttontextclasses=""
category="primary"
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
index 61bcd44724c..efce72271e0 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
@@ -7,7 +7,6 @@ exports[`CiCdAnalyticsAreaChart matches the snapshot 1`] = `
<p>
Some title
</p>
-
<glareachart-stub
annotations=""
data="[object Object],[object Object]"
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
index 5ec0ad794fb..16d291804cc 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap
@@ -6,7 +6,6 @@ exports[`StatisticsList displays the counts data with labels 1`] = `
<span>
Total:
</span>
-
<strong>
4 pipelines
</strong>
@@ -15,7 +14,6 @@ exports[`StatisticsList displays the counts data with labels 1`] = `
<span>
Successful:
</span>
-
<strong>
2 pipelines
</strong>
@@ -24,20 +22,16 @@ exports[`StatisticsList displays the counts data with labels 1`] = `
<span>
Failed:
</span>
-
<gl-link-stub
href="/flightjs/Flight/-/pipelines?page=1&scope=all&status=failed"
>
-
- 2 pipelines
-
+ 2 pipelines
</gl-link-stub>
</li>
<li>
<span>
Success ratio:
</span>
-
<strong>
50.00%
</strong>
diff --git a/spec/frontend/projects/settings/access_dropdown_spec.js b/spec/frontend/projects/settings/access_dropdown_spec.js
deleted file mode 100644
index a94d7669b2b..00000000000
--- a/spec/frontend/projects/settings/access_dropdown_spec.js
+++ /dev/null
@@ -1,204 +0,0 @@
-import $ from 'jquery';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import AccessDropdown from '~/projects/settings/access_dropdown';
-import { LEVEL_TYPES } from '~/projects/settings/constants';
-
-describe('AccessDropdown', () => {
- const defaultLabel = 'dummy default label';
- let dropdown;
-
- beforeEach(() => {
- setHTMLFixture(`
- <div id="dummy-dropdown">
- <span class="dropdown-toggle-text"></span>
- </div>
- `);
- const $dropdown = $('#dummy-dropdown');
- $dropdown.data('defaultLabel', defaultLabel);
- const options = {
- $dropdown,
- accessLevelsData: {
- roles: [
- {
- id: 42,
- text: 'Dummy Role',
- },
- ],
- },
- };
- dropdown = new AccessDropdown(options);
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- describe('toggleLabel', () => {
- let $dropdownToggleText;
- const dummyItems = [
- { type: LEVEL_TYPES.ROLE, access_level: 42 },
- { type: LEVEL_TYPES.USER },
- { type: LEVEL_TYPES.USER },
- { type: LEVEL_TYPES.GROUP },
- { type: LEVEL_TYPES.GROUP },
- { type: LEVEL_TYPES.GROUP },
- { type: LEVEL_TYPES.DEPLOY_KEY },
- { type: LEVEL_TYPES.DEPLOY_KEY },
- { type: LEVEL_TYPES.DEPLOY_KEY },
- ];
-
- beforeEach(() => {
- $dropdownToggleText = $('.dropdown-toggle-text');
- });
-
- it('displays number of items', () => {
- dropdown.setSelectedItems(dummyItems);
- $dropdownToggleText.addClass('is-default');
-
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('1 role, 2 users, 3 deploy keys, 3 groups');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
-
- describe('without selected items', () => {
- beforeEach(() => {
- dropdown.setSelectedItems([]);
- });
-
- it('falls back to default label', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe(defaultLabel);
- expect($dropdownToggleText).toHaveClass('is-default');
- });
- });
-
- describe('with only role', () => {
- beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.ROLE));
- $dropdownToggleText.addClass('is-default');
- });
-
- it('displays the role name', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('Dummy Role');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
- });
-
- describe('with only users', () => {
- beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.USER));
- $dropdownToggleText.addClass('is-default');
- });
-
- it('displays number of users', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('2 users');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
- });
-
- describe('with only groups', () => {
- beforeEach(() => {
- dropdown.setSelectedItems(dummyItems.filter((item) => item.type === LEVEL_TYPES.GROUP));
- $dropdownToggleText.addClass('is-default');
- });
-
- it('displays number of groups', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('3 groups');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
- });
-
- describe('with users and groups', () => {
- beforeEach(() => {
- const selectedTypes = [LEVEL_TYPES.GROUP, LEVEL_TYPES.USER];
- dropdown.setSelectedItems(dummyItems.filter((item) => selectedTypes.includes(item.type)));
- $dropdownToggleText.addClass('is-default');
- });
-
- it('displays number of groups', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('2 users, 3 groups');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
- });
-
- describe('with users and deploy keys', () => {
- beforeEach(() => {
- const selectedTypes = [LEVEL_TYPES.DEPLOY_KEY, LEVEL_TYPES.USER];
- dropdown.setSelectedItems(dummyItems.filter((item) => selectedTypes.includes(item.type)));
- $dropdownToggleText.addClass('is-default');
- });
-
- it('displays number of deploy keys', () => {
- const label = dropdown.toggleLabel();
-
- expect(label).toBe('2 users, 3 deploy keys');
- expect($dropdownToggleText).not.toHaveClass('is-default');
- });
- });
- });
-
- describe('userRowHtml', () => {
- it('escapes users name', () => {
- const user = {
- avatar_url: '',
- name: '<img src=x onerror=alert(document.domain)>',
- username: 'test',
- };
- const template = dropdown.userRowHtml(user);
-
- expect(template).not.toContain(user.name);
- });
-
- it('show user avatar correctly', () => {
- const user = {
- id: 613,
- avatar_url: 'some_valid_avatar.png',
- name: 'test',
- username: 'test',
- };
- const template = dropdown.userRowHtml(user);
-
- expect(template).toContain(user.avatar_url);
- expect(template).not.toContain('identicon');
- });
-
- it('show identicon when user do not have avatar', () => {
- const user = {
- id: 613,
- avatar_url: '',
- name: 'test',
- username: 'test',
- };
- const template = dropdown.userRowHtml(user);
-
- expect(template).toContain('identicon');
- });
- });
-
- describe('deployKeyRowHtml', () => {
- const deployKey = {
- id: 1,
- title: 'title <script>alert(document.domain)</script>',
- fullname: 'fullname <script>alert(document.domain)</script>',
- avatar_url: '',
- username: '',
- };
-
- it('escapes deploy key title and fullname', () => {
- const template = dropdown.deployKeyRowHtml(deployKey);
-
- expect(template).not.toContain(deployKey.title);
- expect(template).not.toContain(deployKey.fullname);
- });
- });
-});
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index ce696ee321b..0ed2e51e8c3 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -14,13 +14,11 @@ import AccessDropdown, { i18n } from '~/projects/settings/components/access_drop
import { ACCESS_LEVELS, LEVEL_TYPES } from '~/projects/settings/constants';
jest.mock('~/projects/settings/api/access_dropdown_api', () => ({
- getGroups: jest.fn().mockResolvedValue({
- data: [
- { id: 4, name: 'group4' },
- { id: 5, name: 'group5' },
- { id: 6, name: 'group6' },
- ],
- }),
+ getGroups: jest.fn().mockResolvedValue([
+ { id: 4, name: 'group4' },
+ { id: 5, name: 'group5' },
+ { id: 6, name: 'group6' },
+ ]),
getUsers: jest.fn().mockResolvedValue({
data: [
{ id: 7, name: 'user7' },
@@ -50,6 +48,7 @@ jest.mock('~/projects/settings/api/access_dropdown_api', () => ({
describe('Access Level Dropdown', () => {
let wrapper;
+ const defaultToggleClass = 'gl-text-gray-500!';
const mockAccessLevelsData = [
{
id: 1,
@@ -63,6 +62,10 @@ describe('Access Level Dropdown', () => {
id: 3,
text: 'role3',
},
+ {
+ id: 0,
+ text: 'No one',
+ },
];
const createComponent = ({
@@ -140,7 +143,7 @@ describe('Access Level Dropdown', () => {
});
it('renders dropdown item for each access level type', () => {
- expect(findAllDropdownItems()).toHaveLength(12);
+ expect(findAllDropdownItems()).toHaveLength(13);
});
it.each`
@@ -177,26 +180,26 @@ describe('Access Level Dropdown', () => {
const customLabel = 'Set the access level';
createComponent({ label: customLabel });
expect(findDropdownToggleLabel()).toBe(customLabel);
- expect(findDropdown().props('toggleClass')).toBe('gl-text-gray-500!');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(true);
});
it('when no items selected, displays a default fallback label and has default CSS class', () => {
- expect(findDropdownToggleLabel()).toBe(i18n.selectUsers);
- expect(findDropdown().props('toggleClass')).toBe('gl-text-gray-500!');
+ expect(findDropdownToggleLabel()).toBe(i18n.defaultLabel);
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(true);
});
- it('displays a number of selected items for each group level', async () => {
+ it('displays selected items for each group level', async () => {
dropdownItems.wrappers.forEach((item) => {
item.trigger('click');
});
await nextTick();
- expect(findDropdownToggleLabel()).toBe('3 roles, 3 users, 3 deploy keys, 3 groups');
+ expect(findDropdownToggleLabel()).toBe('No role, 3 users, 3 deploy keys, 3 groups');
});
it('with only role selected displays the role name and has no class applied', async () => {
await findItemByNameAndClick('role1');
expect(findDropdownToggleLabel()).toBe('role1');
- expect(findDropdown().props('toggleClass')).toBe('');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(false);
});
it('with only groups selected displays the number of selected groups', async () => {
@@ -204,14 +207,14 @@ describe('Access Level Dropdown', () => {
await findItemByNameAndClick('group5');
await findItemByNameAndClick('group6');
expect(findDropdownToggleLabel()).toBe('3 groups');
- expect(findDropdown().props('toggleClass')).toBe('');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(false);
});
it('with only users selected displays the number of selected users', async () => {
await findItemByNameAndClick('user7');
await findItemByNameAndClick('user8');
expect(findDropdownToggleLabel()).toBe('2 users');
- expect(findDropdown().props('toggleClass')).toBe('');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(false);
});
it('with users and groups selected displays the number of selected users & groups', async () => {
@@ -220,7 +223,7 @@ describe('Access Level Dropdown', () => {
await findItemByNameAndClick('user7');
await findItemByNameAndClick('user9');
expect(findDropdownToggleLabel()).toBe('2 users, 2 groups');
- expect(findDropdown().props('toggleClass')).toBe('');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(false);
});
it('with users and deploy keys selected displays the number of selected users & keys', async () => {
@@ -228,7 +231,7 @@ describe('Access Level Dropdown', () => {
await findItemByNameAndClick('key10');
await findItemByNameAndClick('key11');
expect(findDropdownToggleLabel()).toBe('1 user, 2 deploy keys');
- expect(findDropdown().props('toggleClass')).toBe('');
+ expect(findDropdown().props('toggleClass')[defaultToggleClass]).toBe(false);
});
});
@@ -393,4 +396,20 @@ describe('Access Level Dropdown', () => {
expect(wrapper.emitted('hidden')[0][0]).toStrictEqual([{ access_level: 2 }]);
});
});
+
+ describe('when no license and accessLevel is MERGE', () => {
+ beforeEach(async () => {
+ createComponent({ hasLicense: false, accessLevel: ACCESS_LEVELS.MERGE });
+ await waitForPromises();
+ });
+
+ it('dropdown is single-select', () => {
+ const dropdownItems = findAllDropdownItems();
+
+ findDropdownItemWithText(dropdownItems, mockAccessLevelsData[0].text).trigger('click');
+ findDropdownItemWithText(dropdownItems, mockAccessLevelsData[1].text).trigger('click');
+
+ expect(wrapper.emitted('select')[1]).toHaveLength(1);
+ });
+ });
});
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js
index ded8b181c4e..9b012995ea4 100644
--- a/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_form_spec.js
@@ -1,6 +1,8 @@
import { mount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { helpPagePath } from '~/helpers/help_page_helper';
import CustomEmailForm from '~/projects/settings_service_desk/components/custom_email_form.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { I18N_FORM_FORWARDING_CLIPBOARD_BUTTON_TITLE } from '~/projects/settings_service_desk/custom_email_constants';
@@ -15,6 +17,7 @@ describe('CustomEmailForm', () => {
const findForm = () => wrapper.find('form');
const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
+ const findLink = () => wrapper.findComponent(GlLink);
const findInputByTestId = (testId) => wrapper.findByTestId(testId).find('input');
const findCustomEmailInput = () => findInputByTestId('form-custom-email');
const findSmtpAddressInput = () => findInputByTestId('form-smtp-address');
@@ -35,6 +38,16 @@ describe('CustomEmailForm', () => {
wrapper = extendedWrapper(mount(CustomEmailForm, { propsData: { ...defaultProps, ...props } }));
};
+ it('displays help page link', () => {
+ createWrapper();
+
+ expect(findLink().attributes('href')).toBe(
+ helpPagePath('user/project/service_desk/configure.html', {
+ anchor: 'custom-email-address',
+ }),
+ );
+ });
+
it('renders a copy to clipboard button', () => {
createWrapper();
diff --git a/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
index e54d09cf82f..174e05ceeee 100644
--- a/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/custom_email_wrapper_spec.js
@@ -1,7 +1,8 @@
import { nextTick } from 'vue';
-import { GlLink, GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
import { HTTP_STATUS_OK, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
@@ -40,19 +41,21 @@ describe('CustomEmailWrapper', () => {
const showToast = jest.fn();
const createWrapper = (props = {}) => {
- wrapper = mount(CustomEmailWrapper, {
- propsData: { ...defaultProps, ...props },
- mocks: {
- $toast: {
- show: showToast,
+ wrapper = extendedWrapper(
+ mount(CustomEmailWrapper, {
+ propsData: { ...defaultProps, ...props },
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
},
- },
- });
+ }),
+ );
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findFeedbackLink = () => wrapper.findComponent(GlLink);
+ const findFeedbackLink = () => wrapper.findByTestId('feedback-link');
const findCustomEmailForm = () => wrapper.findComponent(CustomEmailForm);
const findCustomEmail = () => wrapper.findComponent(CustomEmail);
const findCustomEmailConfirmModal = () => wrapper.findComponent(CustomEmailConfirmModal);
diff --git a/spec/frontend/protected_branches/protected_branch_create_spec.js b/spec/frontend/protected_branches/protected_branch_create_spec.js
index 4b634c52b01..e2a0f02e0cf 100644
--- a/spec/frontend/protected_branches/protected_branch_create_spec.js
+++ b/spec/frontend/protected_branches/protected_branch_create_spec.js
@@ -1,5 +1,8 @@
+import MockAdapter from 'axios-mock-adapter';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import ProtectedBranchCreate from '~/protected_branches/protected_branch_create';
+import { ACCESS_LEVELS } from '~/protected_branches/constants';
+import axios from '~/lib/utils/axios_utils';
const FORCE_PUSH_TOGGLE_TESTID = 'force-push-toggle';
const CODE_OWNER_TOGGLE_TESTID = 'code-owner-toggle';
@@ -9,7 +12,12 @@ const IS_LOADING_CLASS = 'toggle-loading';
describe('ProtectedBranchCreate', () => {
beforeEach(() => {
- jest.spyOn(ProtectedBranchCreate.prototype, 'buildDropdowns').mockImplementation();
+ // eslint-disable-next-line no-unused-vars
+ const mock = new MockAdapter(axios);
+ window.gon = {
+ merge_access_levels: { roles: [] },
+ push_access_levels: { roles: [] },
+ };
});
const findForcePushToggle = () =>
@@ -34,6 +42,12 @@ describe('ProtectedBranchCreate', () => {
data-label="Toggle code owner approval"
data-is-checked="${codeOwnerToggleChecked}"
data-testid="${CODE_OWNER_TOGGLE_TESTID}"></span>
+ <div class="merge_access_levels-container">
+ <div class="js-allowed-to-merge"/>
+ </div>
+ <div class="push_access_levels-container">
+ <div class="js-allowed-to-push"/>
+ </div>
<input type="submit" />
</form>
`);
@@ -85,14 +99,6 @@ describe('ProtectedBranchCreate', () => {
forcePushToggleChecked: false,
codeOwnerToggleChecked: true,
});
-
- // Mock access levels. This should probably be improved in future iterations.
- protectedBranchCreate.merge_access_levels_dropdown = {
- getSelectedItems: () => [],
- };
- protectedBranchCreate.push_access_levels_dropdown = {
- getSelectedItems: () => [],
- };
});
afterEach(() => {
@@ -116,4 +122,31 @@ describe('ProtectedBranchCreate', () => {
});
});
});
+
+ describe('access dropdown', () => {
+ let protectedBranchCreate;
+
+ beforeEach(() => {
+ protectedBranchCreate = create();
+ });
+
+ it('should be initialized', () => {
+ expect(protectedBranchCreate[`${ACCESS_LEVELS.MERGE}_dropdown`]).toBeDefined();
+ expect(protectedBranchCreate[`${ACCESS_LEVELS.PUSH}_dropdown`]).toBeDefined();
+ });
+
+ describe('`select` event is emitted', () => {
+ const selected = ['foo', 'bar'];
+
+ it('should update selected merged access items', () => {
+ protectedBranchCreate[`${ACCESS_LEVELS.MERGE}_dropdown`].$emit('select', selected);
+ expect(protectedBranchCreate.selectedItems[ACCESS_LEVELS.MERGE]).toEqual(selected);
+ });
+
+ it('should update selected push access items', () => {
+ protectedBranchCreate[`${ACCESS_LEVELS.PUSH}_dropdown`].$emit('select', selected);
+ expect(protectedBranchCreate.selectedItems[ACCESS_LEVELS.PUSH]).toEqual(selected);
+ });
+ });
+ });
});
diff --git a/spec/frontend/protected_branches/protected_branch_edit_spec.js b/spec/frontend/protected_branches/protected_branch_edit_spec.js
index e1966908452..6422856ba22 100644
--- a/spec/frontend/protected_branches/protected_branch_edit_spec.js
+++ b/spec/frontend/protected_branches/protected_branch_edit_spec.js
@@ -20,7 +20,7 @@ describe('ProtectedBranchEdit', () => {
let mock;
beforeEach(() => {
- jest.spyOn(ProtectedBranchEdit.prototype, 'buildDropdowns').mockImplementation();
+ jest.spyOn(ProtectedBranchEdit.prototype, 'initDropdowns').mockImplementation();
mock = new MockAdapter(axios);
});
diff --git a/spec/frontend/protected_tags/mock_data.js b/spec/frontend/protected_tags/mock_data.js
new file mode 100644
index 00000000000..dacdecdfe74
--- /dev/null
+++ b/spec/frontend/protected_tags/mock_data.js
@@ -0,0 +1,18 @@
+export const mockAccessLevels = [
+ {
+ id: 30,
+ text: 'Developers + Maintainers',
+ },
+ {
+ id: 40,
+ text: 'Maintainers',
+ },
+ {
+ id: 60,
+ text: 'Instance admins',
+ },
+ {
+ id: 0,
+ text: 'No one',
+ },
+];
diff --git a/spec/frontend/protected_tags/protected_tag_edit_spec.js b/spec/frontend/protected_tags/protected_tag_edit_spec.js
new file mode 100644
index 00000000000..f56b3a70d1b
--- /dev/null
+++ b/spec/frontend/protected_tags/protected_tag_edit_spec.js
@@ -0,0 +1,113 @@
+import MockAdapter from 'axios-mock-adapter';
+import { ACCESS_LEVELS, LEVEL_TYPES } from '~/protected_tags/constants';
+import ProtectedTagEdit, { i18n } from '~/protected_tags/protected_tag_edit.vue';
+import AccessDropdown from '~/projects/settings/components/access_dropdown.vue';
+import axios from '~/lib/utils/axios_utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import { mockAccessLevels } from './mock_data';
+
+jest.mock('~/alert');
+
+describe('Protected Tag Edit', () => {
+ let wrapper;
+ let mockAxios;
+
+ const url = 'http://some.url';
+ const toggleClass = 'js-allowed-to-create gl-max-w-34';
+
+ const findAccessDropdown = () => wrapper.findComponent(AccessDropdown);
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ProtectedTagEdit, {
+ propsData: {
+ url,
+ accessLevelsData: mockAccessLevels,
+ searchEnabled: false,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ window.gon = {
+ api_version: 'v4',
+ deploy_access_levels: {
+ roles: [],
+ },
+ };
+ mockAxios = new MockAdapter(axios);
+ createComponent();
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('renders access dropdown with correct props', () => {
+ expect(findAccessDropdown().props()).toMatchObject({
+ toggleClass,
+ accessLevel: ACCESS_LEVELS.CREATE,
+ accessLevelsData: mockAccessLevels,
+ searchEnabled: false,
+ });
+ });
+
+ describe('when dropdown is closed and has no changes', () => {
+ it('does not make a patch request to update permission', () => {
+ jest.spyOn(axios, 'patch');
+
+ findAccessDropdown().vm.$emit('hidden', []);
+
+ expect(axios.patch).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when dropdown is closed and has changes', () => {
+ it('makes patch request to update permission', () => {
+ jest.spyOn(axios, 'patch');
+
+ const newPermissions = [{ id: 1, access_level: 30 }];
+ findAccessDropdown().vm.$emit('hidden', newPermissions);
+
+ expect(axios.patch).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when permission is updated successfully', () => {
+ beforeEach(async () => {
+ const updatedPermissions = [
+ { user_id: 1, id: 1 },
+ { group_id: 1, id: 2 },
+ { access_level: 3, id: 3 },
+ ];
+ mockAxios.onPatch().replyOnce(HTTP_STATUS_OK, { [ACCESS_LEVELS.CREATE]: updatedPermissions });
+ findAccessDropdown().vm.$emit('hidden', [{ user_id: 1 }]);
+ await waitForPromises();
+ });
+
+ it('should update selected items', () => {
+ const newPreselected = [
+ { user_id: 1, id: 1, type: LEVEL_TYPES.USER },
+ { group_id: 1, id: 2, type: LEVEL_TYPES.GROUP },
+ { access_level: 3, id: 3, type: LEVEL_TYPES.ROLE },
+ ];
+ expect(findAccessDropdown().props('preselectedItems')).toEqual(newPreselected);
+ });
+ });
+
+ describe('when permission update fails', () => {
+ beforeEach(async () => {
+ mockAxios.onPatch().replyOnce(HTTP_STATUS_BAD_REQUEST, {});
+ findAccessDropdown().vm.$emit('hidden', [{ user_id: 1 }]);
+ await waitForPromises();
+ });
+
+ it('should show error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: i18n.failureMessage,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index 79792a4a0ea..c02c1bb959c 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -54,7 +54,19 @@ Object {
},
"commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
"createdAt": 2019-01-03T00:00:00.000Z,
- "descriptionHtml": "<p data-sourcepos=\\"1:1-1:23\\" dir=\\"auto\\">An okay release <gl-emoji title=\\"shrug\\" data-name=\\"shrug\\" data-unicode-version=\\"9.0\\">🤷</gl-emoji></p>",
+ "descriptionHtml": <p
+ data-sourcepos="1:1-1:23"
+ dir="auto"
+ >
+ An okay release
+ <gl-emoji
+ data-name="shrug"
+ data-unicode-version="9.0"
+ title="shrug"
+ >
+ 🤷
+ </gl-emoji>
+ </p>,
"evidences": Array [],
"historicalRelease": false,
"milestones": Array [],
@@ -148,7 +160,22 @@ Object {
},
"commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
"createdAt": 2018-12-03T00:00:00.000Z,
- "descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
+ "descriptionHtml": <p
+ data-sourcepos="1:1-1:33"
+ dir="auto"
+ >
+ Best. Release.
+ <strong>
+ Ever.
+ </strong>
+ <gl-emoji
+ data-name="rocket"
+ data-unicode-version="6.0"
+ title="rocket"
+ >
+ 🚀
+ </gl-emoji>
+ </p>,
"evidences": Array [
Object {
"__typename": "ReleaseEvidence",
@@ -368,7 +395,22 @@ Object {
},
"commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
"createdAt": 2018-12-03T00:00:00.000Z,
- "descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
+ "descriptionHtml": <p
+ data-sourcepos="1:1-1:33"
+ dir="auto"
+ >
+ Best. Release.
+ <strong>
+ Ever.
+ </strong>
+ <gl-emoji
+ data-name="rocket"
+ data-unicode-version="6.0"
+ title="rocket"
+ >
+ 🚀
+ </gl-emoji>
+ </p>,
"evidences": Array [
Object {
"__typename": "ReleaseEvidence",
diff --git a/spec/frontend/releases/components/__snapshots__/issuable_stats_spec.js.snap b/spec/frontend/releases/components/__snapshots__/issuable_stats_spec.js.snap
index e53ea6b2ec6..8f811d31af8 100644
--- a/spec/frontend/releases/components/__snapshots__/issuable_stats_spec.js.snap
+++ b/spec/frontend/releases/components/__snapshots__/issuable_stats_spec.js.snap
@@ -1,9 +1,68 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`~/releases/components/issuable_stats.vue matches snapshot 1`] = `
-"<div class=\\"gl-display-flex gl-flex-direction-column gl-flex-shrink-0 gl-mr-6 gl-mb-5\\"><span class=\\"gl-mb-2\\">
+<div
+ class="gl-display-flex gl-flex-direction-column gl-flex-shrink-0 gl-mb-5 gl-mr-6"
+>
+ <span
+ class="gl-mb-2"
+ >
Items
- <span class=\\"badge badge-muted badge-pill gl-badge sm\\"><!----> 10</span></span>
- <div class=\\"gl-display-flex\\"><span data-testid=\\"open-stat\\" class=\\"gl-white-space-pre-wrap\\">Open: <a href=\\"path/to/opened/items\\" class=\\"gl-link\\">1</a></span> <span class=\\"gl-mx-2\\">•</span> <span data-testid=\\"merged-stat\\" class=\\"gl-white-space-pre-wrap\\">Merged: <a href=\\"path/to/merged/items\\" class=\\"gl-link\\">7</a></span> <span class=\\"gl-mx-2\\">•</span> <span data-testid=\\"closed-stat\\" class=\\"gl-white-space-pre-wrap\\">Closed: <a href=\\"path/to/closed/items\\" class=\\"gl-link\\">2</a></span></div>
-</div>"
+ <span
+ class="badge badge-muted badge-pill gl-badge sm"
+ >
+ 10
+ </span>
+ </span>
+ <div
+ class="gl-display-flex"
+ >
+ <span
+ class="gl-white-space-pre-wrap"
+ data-testid="open-stat"
+ >
+ Open:
+ <a
+ class="gl-link"
+ href="path/to/opened/items"
+ >
+ 1
+ </a>
+ </span>
+ <span
+ class="gl-mx-2"
+ >
+ •
+ </span>
+ <span
+ class="gl-white-space-pre-wrap"
+ data-testid="merged-stat"
+ >
+ Merged:
+ <a
+ class="gl-link"
+ href="path/to/merged/items"
+ >
+ 7
+ </a>
+ </span>
+ <span
+ class="gl-mx-2"
+ >
+ •
+ </span>
+ <span
+ class="gl-white-space-pre-wrap"
+ data-testid="closed-stat"
+ >
+ Closed:
+ <a
+ class="gl-link"
+ href="path/to/closed/items"
+ >
+ 2
+ </a>
+ </span>
+ </div>
+</div>
`;
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index b8030ae1fd2..26068b392d1 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -41,10 +41,10 @@ describe('Release block milestone info', () => {
const progressBar = milestoneProgressBarContainer().findComponent(GlProgressBar);
expect(progressBar.exists()).toBe(true);
- expect(progressBar.attributes()).toEqual(
+ expect(progressBar.vm.$attrs).toEqual(
expect.objectContaining({
- value: '4',
- max: '9',
+ value: 4,
+ max: 9,
}),
);
});
diff --git a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
index 836ae5c22e6..02f75edd57a 100644
--- a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
@@ -2,14 +2,13 @@
exports[`Repository directory download links component renders downloads links for path app 1`] = `
<section
- class="border-top pt-1 mt-1"
+ class="border-top mt-1 pt-1"
>
<h5
- class="m-0 dropdown-bold-header"
+ class="dropdown-bold-header m-0"
>
Download this directory
</h5>
-
<div
class="dropdown-menu-content"
>
@@ -24,9 +23,7 @@ exports[`Repository directory download links component renders downloads links f
size="small"
variant="confirm"
>
-
zip
-
</gl-button-stub>
<gl-button-stub
buttontextclasses=""
@@ -36,9 +33,7 @@ exports[`Repository directory download links component renders downloads links f
size="small"
variant="default"
>
-
tar
-
</gl-button-stub>
</div>
</div>
@@ -47,14 +42,13 @@ exports[`Repository directory download links component renders downloads links f
exports[`Repository directory download links component renders downloads links for path app/assets 1`] = `
<section
- class="border-top pt-1 mt-1"
+ class="border-top mt-1 pt-1"
>
<h5
- class="m-0 dropdown-bold-header"
+ class="dropdown-bold-header m-0"
>
Download this directory
</h5>
-
<div
class="dropdown-menu-content"
>
@@ -69,9 +63,7 @@ exports[`Repository directory download links component renders downloads links f
size="small"
variant="confirm"
>
-
zip
-
</gl-button-stub>
<gl-button-stub
buttontextclasses=""
@@ -81,9 +73,7 @@ exports[`Repository directory download links component renders downloads links f
size="small"
variant="default"
>
-
tar
-
</gl-button-stub>
</div>
</div>
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index ede04390586..3f901dc61b8 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Repository last commit component renders commit widget 1`] = `
<div
- class="well-segment commit gl-p-5 gl-w-full gl-display-flex"
+ class="commit gl-display-flex gl-p-5 gl-w-full well-segment"
>
<user-avatar-link-stub
- class="gl-my-2 gl-mr-4"
+ class="gl-mr-4 gl-my-2"
imgalt=""
imgcssclasses=""
imgcsswrapperclasses=""
@@ -18,9 +18,8 @@ exports[`Repository last commit component renders commit widget 1`] = `
tooltiptext=""
username=""
/>
-
<div
- class="commit-detail flex-list gl-display-flex gl-justify-content-space-between gl-align-items-center gl-flex-grow-1 gl-min-w-0"
+ class="commit-detail flex-list gl-align-items-center gl-display-flex gl-flex-grow-1 gl-justify-content-space-between gl-min-w-0"
>
<div
class="commit-content"
@@ -32,9 +31,6 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
Commit title
</gl-link-stub>
-
- <!---->
-
<div
class="committer"
>
@@ -42,12 +38,9 @@ exports[`Repository last commit component renders commit widget 1`] = `
class="commit-author-link js-user-link"
href="/test"
>
-
- Test
+ Test
</gl-link-stub>
-
- authored
-
+ authored
<timeago-tooltip-stub
cssclass=""
datetimeformat="DATE_WITH_TIME_FORMAT"
@@ -55,36 +48,24 @@ exports[`Repository last commit component renders commit widget 1`] = `
tooltipplacement="bottom"
/>
</div>
-
- <!---->
</div>
-
<div
class="gl-flex-grow-1"
/>
-
<div
- class="commit-actions gl-display-flex gl-flex-align gl-align-items-center gl-flex-direction-row"
+ class="commit-actions gl-align-items-center gl-display-flex gl-flex-align gl-flex-direction-row"
>
- <!---->
-
<div
class="ci-status-link"
>
- <gl-link-stub
+ <ci-badge-link-stub
+ aria-label="Pipeline: failed"
class="js-commit-pipeline"
- href="https://test.com/pipeline"
- title="Pipeline: failed"
- >
- <ci-icon-stub
- aria-label="Pipeline: failed"
- cssclasses=""
- size="24"
- status="[object Object]"
- />
- </gl-link-stub>
+ details-path="https://test.com/pipeline"
+ size="lg"
+ status="[object Object]"
+ />
</div>
-
<gl-button-group-stub
class="gl-ml-4 js-commit-sha-group"
>
@@ -100,7 +81,6 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
12345678
</gl-button-stub>
-
<clipboard-button-stub
category="secondary"
class="input-group-text"
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 5ac2627dc5d..cc077e20e0b 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -8,11 +8,11 @@ import MockAdapter from 'axios-mock-adapter';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
import BlobContent from '~/blob/components/blob_content.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
-import WebIdeLink from 'ee_else_ce/vue_shared/components/web_ide_link.vue';
import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
import { loadViewer } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
@@ -20,8 +20,6 @@ import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
import projectInfoQuery from '~/repository/queries/project_info.query.graphql';
-import userInfoQuery from '~/repository/queries/user_info.query.graphql';
-import applicationInfoQuery from '~/repository/queries/application_info.query.graphql';
import CodeIntelligence from '~/code_navigation/components/app.vue';
import * as urlUtility from '~/lib/utils/url_utility';
import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
@@ -34,8 +32,6 @@ import {
simpleViewerMock,
richViewerMock,
projectMock,
- userInfoMock,
- applicationInfoMock,
userPermissionsMock,
propsMock,
refMock,
@@ -46,12 +42,11 @@ jest.mock('~/repository/components/blob_viewers');
jest.mock('~/lib/utils/url_utility');
jest.mock('~/lib/utils/common_utils');
jest.mock('~/blob/line_highlighter');
+jest.mock('~/alert');
let wrapper;
let blobInfoMockResolver;
-let userInfoMockResolver;
let projectInfoMockResolver;
-let applicationInfoMockResolver;
Vue.use(Vuex);
@@ -95,7 +90,7 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
const projectInfo = {
__typename: 'Project',
- id: '123',
+ id: projectMock.id,
userPermissions: {
pushCode,
forkProject,
@@ -121,19 +116,9 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
data: { isBinary, project: blobInfo },
});
- userInfoMockResolver = jest.fn().mockResolvedValue({
- data: { ...userInfoMock },
- });
-
- applicationInfoMockResolver = jest.fn().mockResolvedValue({
- data: { ...applicationInfoMock },
- });
-
const fakeApollo = createMockApollo([
[blobInfoQuery, blobInfoMockResolver],
- [userInfoQuery, userInfoMockResolver],
[projectInfoQuery, projectInfoMockResolver],
- [applicationInfoQuery, applicationInfoMockResolver],
]);
wrapper = extendedWrapper(
@@ -167,7 +152,6 @@ const execImmediately = (callback) => {
describe('Blob content viewer component', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findBlobHeader = () => wrapper.findComponent(BlobHeader);
- const findWebIdeLink = () => wrapper.findComponent(WebIdeLink);
const findBlobContent = () => wrapper.findComponent(BlobContent);
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion);
@@ -197,9 +181,22 @@ describe('Blob content viewer component', () => {
expect(findBlobHeader().props('hasRenderError')).toEqual(false);
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(true);
expect(findBlobHeader().props('blob')).toEqual(simpleViewerMock);
+ expect(findBlobHeader().props('showForkSuggestion')).toEqual(false);
+ expect(findBlobHeader().props('projectPath')).toEqual(propsMock.projectPath);
+ expect(findBlobHeader().props('projectId')).toEqual(projectMock.id);
expect(mockRouterPush).not.toHaveBeenCalled();
});
+ it('creates an alert when the BlobHeader component emits an error', async () => {
+ await createComponent();
+
+ findBlobHeader().vm.$emit('error');
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'An error occurred while loading the file. Please try again.',
+ });
+ });
+
it('copies blob text to clipboard', async () => {
jest.spyOn(navigator.clipboard, 'writeText');
await createComponent();
@@ -401,45 +398,6 @@ describe('Blob content viewer component', () => {
});
describe('BlobHeader action slot', () => {
- const { ideEditPath, editBlobPath } = simpleViewerMock;
-
- it('renders WebIdeLink button in simple viewer', async () => {
- await createComponent({ inject: { BlobContent: true, BlobReplace: true } }, mount);
-
- expect(findWebIdeLink().props()).toMatchObject({
- editUrl: editBlobPath,
- webIdeUrl: ideEditPath,
- showEditButton: true,
- showGitpodButton: applicationInfoMock.gitpodEnabled,
- gitpodEnabled: userInfoMock.currentUser.gitpodEnabled,
- showPipelineEditorButton: true,
- gitpodUrl: simpleViewerMock.gitpodBlobUrl,
- pipelineEditorUrl: simpleViewerMock.pipelineEditorPath,
- userPreferencesGitpodPath: userInfoMock.currentUser.preferencesGitpodPath,
- userProfileEnableGitpodPath: userInfoMock.currentUser.profileEnableGitpodPath,
- });
- });
-
- it('renders WebIdeLink button in rich viewer', async () => {
- await createComponent({ blob: richViewerMock }, mount);
-
- expect(findWebIdeLink().props()).toMatchObject({
- editUrl: editBlobPath,
- webIdeUrl: ideEditPath,
- showEditButton: true,
- });
- });
-
- it('renders WebIdeLink button for binary files', async () => {
- mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, axiosMockResponse);
- await createComponent({}, mount);
- expect(findWebIdeLink().props()).toMatchObject({
- editUrl: editBlobPath,
- webIdeUrl: ideEditPath,
- showEditButton: false,
- });
- });
-
describe('blob header binary file', () => {
it('passes the correct isBinary value when viewing a binary file', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, axiosMockResponse);
@@ -465,7 +423,6 @@ describe('Blob content viewer component', () => {
expect(findBlobHeader().props('hideViewerSwitcher')).toBe(true);
expect(findBlobHeader().props('isBinary')).toBe(true);
- expect(findWebIdeLink().props('showEditButton')).toBe(false);
});
});
@@ -538,12 +495,12 @@ describe('Blob content viewer component', () => {
beforeEach(() => createComponent({}, mount));
it('simple edit redirects to the simple editor', () => {
- findWebIdeLink().vm.$emit('edit', 'simple');
+ findBlobHeader().vm.$emit('edit', 'simple');
expect(urlUtility.redirectTo).toHaveBeenCalledWith(simpleViewerMock.editBlobPath); // eslint-disable-line import/no-deprecated
});
it('IDE edit redirects to the IDE editor', () => {
- findWebIdeLink().vm.$emit('edit', 'ide');
+ findBlobHeader().vm.$emit('edit', 'ide');
expect(urlUtility.redirectTo).toHaveBeenCalledWith(simpleViewerMock.ideEditPath); // eslint-disable-line import/no-deprecated
});
@@ -572,7 +529,7 @@ describe('Blob content viewer component', () => {
mount,
);
- findWebIdeLink().vm.$emit('edit', 'simple');
+ findBlobHeader().vm.$emit('edit', 'simple');
await nextTick();
expect(findForkSuggestion().exists()).toBe(showForkSuggestion);
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index 85bf683fdf6..17ebdf8725d 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -5,10 +5,10 @@ exports[`Repository table row component renders a symlink table row 1`] = `
class="tree-item"
>
<td
- class="tree-item-file-name cursor-default position-relative"
+ class="cursor-default position-relative tree-item-file-name"
>
<a
- class="tree-item-link str-truncated"
+ class="str-truncated tree-item-link"
data-qa-selector="file_name_link"
href="https://test.com"
title="test"
@@ -26,11 +26,6 @@ exports[`Repository table row component renders a symlink table row 1`] = `
test
</span>
</a>
-
- <!---->
-
- <!---->
-
<gl-icon-stub
class="ml-1"
name="lock"
@@ -38,30 +33,25 @@ exports[`Repository table row component renders a symlink table row 1`] = `
title="Locked by Root"
/>
</td>
-
<td
- class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
+ class="cursor-default d-none d-sm-table-cell gl-text-secondary tree-commit"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link gl-text-secondary"
+ class="gl-text-secondary str-truncated-100 tree-commit-link"
/>
-
- <gl-intersection-observer-stub>
- <!---->
- </gl-intersection-observer-stub>
+ <gl-intersection-observer-stub />
</td>
-
<td
- class="tree-time-ago text-right cursor-default gl-text-secondary"
+ class="cursor-default gl-text-secondary text-right tree-time-ago"
>
- <timeago-tooltip-stub
- cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
- time="2019-01-01"
- tooltipplacement="top"
- />
-
- <!---->
+ <gl-intersection-observer-stub>
+ <timeago-tooltip-stub
+ cssclass=""
+ datetimeformat="DATE_WITH_TIME_FORMAT"
+ time="2019-01-01"
+ tooltipplacement="top"
+ />
+ </gl-intersection-observer-stub>
</td>
</tr>
`;
@@ -71,10 +61,10 @@ exports[`Repository table row component renders table row 1`] = `
class="tree-item"
>
<td
- class="tree-item-file-name cursor-default position-relative"
+ class="cursor-default position-relative tree-item-file-name"
>
<a
- class="tree-item-link str-truncated"
+ class="str-truncated tree-item-link"
data-qa-selector="file_name_link"
href="https://test.com"
title="test"
@@ -92,11 +82,6 @@ exports[`Repository table row component renders table row 1`] = `
test
</span>
</a>
-
- <!---->
-
- <!---->
-
<gl-icon-stub
class="ml-1"
name="lock"
@@ -104,30 +89,25 @@ exports[`Repository table row component renders table row 1`] = `
title="Locked by Root"
/>
</td>
-
<td
- class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
+ class="cursor-default d-none d-sm-table-cell gl-text-secondary tree-commit"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link gl-text-secondary"
+ class="gl-text-secondary str-truncated-100 tree-commit-link"
/>
-
- <gl-intersection-observer-stub>
- <!---->
- </gl-intersection-observer-stub>
+ <gl-intersection-observer-stub />
</td>
-
<td
- class="tree-time-ago text-right cursor-default gl-text-secondary"
+ class="cursor-default gl-text-secondary text-right tree-time-ago"
>
- <timeago-tooltip-stub
- cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
- time="2019-01-01"
- tooltipplacement="top"
- />
-
- <!---->
+ <gl-intersection-observer-stub>
+ <timeago-tooltip-stub
+ cssclass=""
+ datetimeformat="DATE_WITH_TIME_FORMAT"
+ time="2019-01-01"
+ tooltipplacement="top"
+ />
+ </gl-intersection-observer-stub>
</td>
</tr>
`;
@@ -137,10 +117,10 @@ exports[`Repository table row component renders table row for path with special
class="tree-item"
>
<td
- class="tree-item-file-name cursor-default position-relative"
+ class="cursor-default position-relative tree-item-file-name"
>
<a
- class="tree-item-link str-truncated"
+ class="str-truncated tree-item-link"
data-qa-selector="file_name_link"
href="https://test.com"
title="test"
@@ -158,11 +138,6 @@ exports[`Repository table row component renders table row for path with special
test
</span>
</a>
-
- <!---->
-
- <!---->
-
<gl-icon-stub
class="ml-1"
name="lock"
@@ -170,30 +145,25 @@ exports[`Repository table row component renders table row for path with special
title="Locked by Root"
/>
</td>
-
<td
- class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
+ class="cursor-default d-none d-sm-table-cell gl-text-secondary tree-commit"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link gl-text-secondary"
+ class="gl-text-secondary str-truncated-100 tree-commit-link"
/>
-
- <gl-intersection-observer-stub>
- <!---->
- </gl-intersection-observer-stub>
+ <gl-intersection-observer-stub />
</td>
-
<td
- class="tree-time-ago text-right cursor-default gl-text-secondary"
+ class="cursor-default gl-text-secondary text-right tree-time-ago"
>
- <timeago-tooltip-stub
- cssclass=""
- datetimeformat="DATE_WITH_TIME_FORMAT"
- time="2019-01-01"
- tooltipplacement="top"
- />
-
- <!---->
+ <gl-intersection-observer-stub>
+ <timeago-tooltip-stub
+ cssclass=""
+ datetimeformat="DATE_WITH_TIME_FORMAT"
+ time="2019-01-01"
+ tooltipplacement="top"
+ />
+ </gl-intersection-observer-stub>
</td>
</tr>
`;
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
index e20849d1085..c60b6ace598 100644
--- a/spec/frontend/repository/mock_data.js
+++ b/spec/frontend/repository/mock_data.js
@@ -73,17 +73,6 @@ export const projectMock = {
},
};
-export const userInfoMock = {
- currentUser: {
- id: '123',
- gitpodEnabled: true,
- preferencesGitpodPath: '/-/profile/preferences#user_gitpod_enabled',
- profileEnableGitpodPath: '/-/profile?user%5Bgitpod_enabled%5D=true',
- },
-};
-
-export const applicationInfoMock = { gitpodEnabled: true };
-
export const propsMock = { path: 'some_file.js', projectPath: 'some/path' };
export const refMock = 'default-ref';
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index a063f20aca6..7bddc4b1c48 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -194,7 +194,7 @@ export const MOCK_DATA_FOR_NAVIGATION_ACTION_MUTATION = {
label: 'Projects',
scope: 'projects',
link: '/search?scope=projects&search=et',
- count_link: '/search/count?scope=projects&search=et',
+ count_link: null,
},
};
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index a4559c2dc34..8e23f9c1680 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -2,14 +2,26 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
+import {
+ SEARCH_TYPE_ZOEKT,
+ SEARCH_TYPE_ADVANCED,
+ SEARCH_TYPE_BASIC,
+} from '~/search/sidebar/constants';
import { MOCK_QUERY } from 'jest/search/mock_data';
+import { toggleSuperSidebarCollapsed } from '~/super_sidebar/super_sidebar_collapsed_state_manager';
import GlobalSearchSidebar from '~/search/sidebar/components/app.vue';
import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
import MergeRequestsFilters from '~/search/sidebar/components/merge_requests_filters.vue';
import BlobsFilters from '~/search/sidebar/components/blobs_filters.vue';
import ProjectsFilters from '~/search/sidebar/components/projects_filters.vue';
+import NotesFilters from '~/search/sidebar/components/notes_filters.vue';
+import CommitsFilters from '~/search/sidebar/components/commits_filters.vue';
import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
+import SmallScreenDrawerNavigation from '~/search/sidebar/components/small_screen_drawer_navigation.vue';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
+import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
+
+jest.mock('~/super_sidebar/super_sidebar_collapsed_state_manager');
Vue.use(Vuex);
@@ -20,7 +32,7 @@ describe('GlobalSearchSidebar', () => {
currentScope: jest.fn(() => 'issues'),
};
- const createComponent = (initialState = {}, ff = false) => {
+ const createComponent = (initialState = {}) => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
@@ -33,7 +45,8 @@ describe('GlobalSearchSidebar', () => {
store,
provide: {
glFeatures: {
- searchProjectsHideArchived: ff,
+ searchNotesHideArchivedProjects: true,
+ searchCommitsHideArchivedProjects: true,
},
},
});
@@ -44,69 +57,111 @@ describe('GlobalSearchSidebar', () => {
const findMergeRequestsFilters = () => wrapper.findComponent(MergeRequestsFilters);
const findBlobsFilters = () => wrapper.findComponent(BlobsFilters);
const findProjectsFilters = () => wrapper.findComponent(ProjectsFilters);
+ const findNotesFilters = () => wrapper.findComponent(NotesFilters);
+ const findCommitsFilters = () => wrapper.findComponent(CommitsFilters);
const findScopeLegacyNavigation = () => wrapper.findComponent(ScopeLegacyNavigation);
+ const findSmallScreenDrawerNavigation = () => wrapper.findComponent(SmallScreenDrawerNavigation);
const findScopeSidebarNavigation = () => wrapper.findComponent(ScopeSidebarNavigation);
+ const findDomElementListener = () => wrapper.findComponent(DomElementListener);
describe('renders properly', () => {
describe('always', () => {
beforeEach(() => {
createComponent();
});
+
it(`shows section`, () => {
expect(findSidebarSection().exists()).toBe(true);
});
});
describe.each`
- scope | filter
- ${'issues'} | ${findIssuesFilters}
- ${'merge_requests'} | ${findMergeRequestsFilters}
- ${'blobs'} | ${findBlobsFilters}
- `('with sidebar $scope scope:', ({ scope, filter }) => {
+ scope | filter | searchType | isShown
+ ${'issues'} | ${findIssuesFilters} | ${SEARCH_TYPE_BASIC} | ${true}
+ ${'merge_requests'} | ${findMergeRequestsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
+ ${'projects'} | ${findProjectsFilters} | ${SEARCH_TYPE_BASIC} | ${true}
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
+ ${'blobs'} | ${findBlobsFilters} | ${SEARCH_TYPE_ZOEKT} | ${false}
+ ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'notes'} | ${findNotesFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
+ ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_BASIC} | ${false}
+ ${'commits'} | ${findCommitsFilters} | ${SEARCH_TYPE_ADVANCED} | ${true}
+ `('with sidebar $scope scope:', ({ scope, filter, searchType, isShown }) => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => scope);
- createComponent({ urlQuery: { scope } });
+ createComponent({ urlQuery: { scope }, searchType });
});
- it(`shows filter ${filter.name.replace('find', '')}`, () => {
- expect(filter().exists()).toBe(true);
+ it(`renders correctly filter ${filter.name.replace(
+ 'find',
+ '',
+ )} when search_type ${searchType}`, () => {
+ expect(filter().exists()).toBe(isShown);
});
});
- describe.each`
- featureFlag
- ${false}
- ${true}
- `('with sidebar $scope scope:', ({ featureFlag }) => {
+ describe('filters for blobs will not load if zoekt is enabled', () => {
+ beforeEach(() => {
+ createComponent({ urlQuery: { scope: 'blobs' }, searchType: SEARCH_TYPE_ZOEKT });
+ });
+
+ it("doesn't render blobs filters", () => {
+ expect(findBlobsFilters().exists()).toBe(false);
+ });
+ });
+
+ describe('with sidebar scope: projects', () => {
beforeEach(() => {
getterSpies.currentScope = jest.fn(() => 'projects');
- createComponent({ urlQuery: { scope: 'projects' } }, featureFlag);
+ createComponent({ urlQuery: { scope: 'projects' } });
});
- it(`shows filter ProjectsFilters}`, () => {
- expect(findProjectsFilters().exists()).toBe(featureFlag);
+ it(`shows filter ProjectsFilters`, () => {
+ expect(findProjectsFilters().exists()).toBe(true);
});
});
describe.each`
currentScope | sidebarNavShown | legacyNavShown
${'issues'} | ${false} | ${true}
- ${''} | ${false} | ${false}
+ ${'test'} | ${false} | ${true}
${'issues'} | ${true} | ${false}
- ${''} | ${true} | ${false}
- `('renders navigation', ({ currentScope, sidebarNavShown, legacyNavShown }) => {
- beforeEach(() => {
- getterSpies.currentScope = jest.fn(() => currentScope);
- createComponent({ useSidebarNavigation: sidebarNavShown });
- });
+ ${'test'} | ${true} | ${false}
+ `(
+ 'renders navigation for scope $currentScope',
+ ({ currentScope, sidebarNavShown, legacyNavShown }) => {
+ beforeEach(() => {
+ getterSpies.currentScope = jest.fn(() => currentScope);
+ createComponent({ useSidebarNavigation: sidebarNavShown });
+ });
- it(`${!legacyNavShown ? 'hides' : 'shows'} the legacy navigation`, () => {
- expect(findScopeLegacyNavigation().exists()).toBe(legacyNavShown);
- });
+ it(`renders navigation correctly with legacyNavShown ${legacyNavShown}`, () => {
+ expect(findScopeLegacyNavigation().exists()).toBe(legacyNavShown);
+ expect(findSmallScreenDrawerNavigation().exists()).toBe(legacyNavShown);
+ });
- it(`${!sidebarNavShown ? 'hides' : 'shows'} the sidebar navigation`, () => {
- expect(findScopeSidebarNavigation().exists()).toBe(sidebarNavShown);
- });
+ it(`renders navigation correctly with sidebarNavShown ${sidebarNavShown}`, () => {
+ expect(findScopeSidebarNavigation().exists()).toBe(sidebarNavShown);
+ });
+ },
+ );
+ });
+
+ describe('when useSidebarNavigation=true', () => {
+ beforeEach(() => {
+ createComponent({ useSidebarNavigation: true });
+ });
+
+ it('toggles super sidebar when button is clicked', () => {
+ const elListener = findDomElementListener();
+
+ expect(toggleSuperSidebarCollapsed).not.toHaveBeenCalled();
+
+ elListener.vm.$emit('click');
+
+ expect(toggleSuperSidebarCollapsed).toHaveBeenCalledTimes(1);
+ expect(elListener.props('selector')).toBe('#js-open-mobile-filters');
});
});
});
diff --git a/spec/frontend/search/sidebar/components/blobs_filters_spec.js b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
index ff93e6f32e4..729fae44c19 100644
--- a/spec/frontend/search/sidebar/components/blobs_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/blobs_filters_spec.js
@@ -1,28 +1,93 @@
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { MOCK_QUERY } from 'jest/search/mock_data';
import BlobsFilters from '~/search/sidebar/components/blobs_filters.vue';
import LanguageFilter from '~/search/sidebar/components/language_filter/index.vue';
-import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import { SEARCH_TYPE_ADVANCED } from '~/search/sidebar/constants';
+
+Vue.use(Vuex);
describe('GlobalSearch BlobsFilters', () => {
let wrapper;
- const findLanguageFilter = () => wrapper.findComponent(LanguageFilter);
- const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+ const defaultGetters = {
+ currentScope: () => 'blobs',
+ };
- const createComponent = () => {
- wrapper = shallowMount(BlobsFilters);
+ const createComponent = ({ initialState = {}, searchBlobsHideArchivedProjects = true } = {}) => {
+ const store = new Vuex.Store({
+ state: {
+ urlQuery: MOCK_QUERY,
+ useSidebarNavigation: false,
+ searchType: SEARCH_TYPE_ADVANCED,
+ ...initialState,
+ },
+ getters: defaultGetters,
+ });
+
+ wrapper = shallowMount(BlobsFilters, {
+ store,
+ provide: {
+ glFeatures: {
+ searchBlobsHideArchivedProjects,
+ },
+ },
+ });
};
- describe('Renders correctly', () => {
+ const findLanguageFilter = () => wrapper.findComponent(LanguageFilter);
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findDividers = () => wrapper.findAll('hr');
+
+ describe.each`
+ description | searchBlobsHideArchivedProjects
+ ${'Renders correctly with Archived Filter enabled'} | ${true}
+ ${'Renders correctly with Archived Filter disabled'} | ${false}
+ `('$description', ({ searchBlobsHideArchivedProjects }) => {
+ beforeEach(() => {
+ createComponent({
+ searchBlobsHideArchivedProjects,
+ });
+ });
+
+ it('renders LanguageFilter', () => {
+ expect(findLanguageFilter().exists()).toBe(true);
+ });
+
+ it(`renders correctly ArchivedFilter when searchBlobsHideArchivedProjects is ${searchBlobsHideArchivedProjects}`, () => {
+ expect(findArchivedFilter().exists()).toBe(searchBlobsHideArchivedProjects);
+ });
+
+ it('renders divider correctly', () => {
+ const dividersCount = searchBlobsHideArchivedProjects ? 1 : 0;
+ expect(findDividers()).toHaveLength(dividersCount);
+ });
+ });
+
+ describe('Renders correctly in new nav', () => {
beforeEach(() => {
- createComponent();
+ createComponent({
+ initialState: {
+ searchType: SEARCH_TYPE_ADVANCED,
+ useSidebarNavigation: true,
+ },
+ searchBlobsHideArchivedProjects: true,
+ });
});
- it('renders FiltersTemplate', () => {
+
+ it('renders correctly LanguageFilter', () => {
expect(findLanguageFilter().exists()).toBe(true);
});
- it('renders ConfidentialityFilter', () => {
- expect(findFiltersTemplate().exists()).toBe(true);
+ it('renders correctly ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render dividers", () => {
+ expect(findDividers()).toHaveLength(0);
});
});
});
diff --git a/spec/frontend/search/sidebar/components/commits_filters_spec.js b/spec/frontend/search/sidebar/components/commits_filters_spec.js
new file mode 100644
index 00000000000..cb47c6833ef
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/commits_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import CommitsFilters from '~/search/sidebar/components/projects_filters.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch CommitsFilters', () => {
+ let wrapper;
+
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(CommitsFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/issues_filters_spec.js b/spec/frontend/search/sidebar/components/issues_filters_spec.js
index 84c4258cbdb..39d10cbb8b4 100644
--- a/spec/frontend/search/sidebar/components/issues_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/issues_filters_spec.js
@@ -7,6 +7,8 @@ import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter/index.vue';
import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import { SEARCH_TYPE_ADVANCED, SEARCH_TYPE_BASIC } from '~/search/sidebar/constants';
Vue.use(Vuex);
@@ -17,10 +19,16 @@ describe('GlobalSearch IssuesFilters', () => {
currentScope: () => 'issues',
};
- const createComponent = (initialState, ff = true) => {
+ const createComponent = ({
+ initialState = {},
+ searchIssueLabelAggregation = true,
+ searchIssuesHideArchivedProjects = true,
+ } = {}) => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
+ useSidebarNavigation: false,
+ searchType: SEARCH_TYPE_ADVANCED,
...initialState,
},
getters: defaultGetters,
@@ -30,7 +38,8 @@ describe('GlobalSearch IssuesFilters', () => {
store,
provide: {
glFeatures: {
- searchIssueLabelAggregation: ff,
+ searchIssueLabelAggregation,
+ searchIssuesHideArchivedProjects,
},
},
});
@@ -39,12 +48,23 @@ describe('GlobalSearch IssuesFilters', () => {
const findStatusFilter = () => wrapper.findComponent(StatusFilter);
const findConfidentialityFilter = () => wrapper.findComponent(ConfidentialityFilter);
const findLabelFilter = () => wrapper.findComponent(LabelFilter);
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
const findDividers = () => wrapper.findAll('hr');
- describe('Renders correctly with FF enabled', () => {
+ describe.each`
+ description | searchIssueLabelAggregation | searchIssuesHideArchivedProjects
+ ${'Renders correctly with Label Filter disabled'} | ${false} | ${true}
+ ${'Renders correctly with Archived Filter disabled'} | ${true} | ${false}
+ ${'Renders correctly with Archived Filter and Label Filter disabled'} | ${false} | ${false}
+ ${'Renders correctly with Archived Filter and Label Filter enabled'} | ${true} | ${true}
+ `('$description', ({ searchIssueLabelAggregation, searchIssuesHideArchivedProjects }) => {
beforeEach(() => {
- createComponent({ urlQuery: MOCK_QUERY });
+ createComponent({
+ searchIssueLabelAggregation,
+ searchIssuesHideArchivedProjects,
+ });
});
+
it('renders StatusFilter', () => {
expect(findStatusFilter().exists()).toBe(true);
});
@@ -53,18 +73,30 @@ describe('GlobalSearch IssuesFilters', () => {
expect(findConfidentialityFilter().exists()).toBe(true);
});
- it('renders LabelFilter', () => {
- expect(findLabelFilter().exists()).toBe(true);
+ it(`renders correctly LabelFilter when searchIssueLabelAggregation is ${searchIssueLabelAggregation}`, () => {
+ expect(findLabelFilter().exists()).toBe(searchIssueLabelAggregation);
});
- it('renders dividers correctly', () => {
- expect(findDividers()).toHaveLength(2);
+ it(`renders correctly ArchivedFilter when searchIssuesHideArchivedProjects is ${searchIssuesHideArchivedProjects}`, () => {
+ expect(findArchivedFilter().exists()).toBe(searchIssuesHideArchivedProjects);
+ });
+
+ it('renders divider correctly', () => {
+ // one divider can't be disabled
+ let dividersCount = 1;
+ if (searchIssueLabelAggregation) {
+ dividersCount += 1;
+ }
+ if (searchIssuesHideArchivedProjects) {
+ dividersCount += 1;
+ }
+ expect(findDividers()).toHaveLength(dividersCount);
});
});
- describe('Renders correctly with FF disabled', () => {
+ describe('Renders correctly with basic search', () => {
beforeEach(() => {
- createComponent({ urlQuery: MOCK_QUERY }, false);
+ createComponent({ initialState: { searchType: SEARCH_TYPE_BASIC } });
});
it('renders StatusFilter', () => {
expect(findStatusFilter().exists()).toBe(true);
@@ -78,15 +110,51 @@ describe('GlobalSearch IssuesFilters', () => {
expect(findLabelFilter().exists()).toBe(false);
});
- it('renders divider correctly', () => {
+ it("doesn't render ArchivedFilter", () => {
+ expect(findArchivedFilter().exists()).toBe(false);
+ });
+
+ it('renders 1 divider', () => {
expect(findDividers()).toHaveLength(1);
});
});
+ describe('Renders correctly in new nav', () => {
+ beforeEach(() => {
+ createComponent({
+ initialState: {
+ searchType: SEARCH_TYPE_ADVANCED,
+ useSidebarNavigation: true,
+ },
+ searchIssueLabelAggregation: true,
+ searchIssuesHideArchivedProjects: true,
+ });
+ });
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders ConfidentialityFilter', () => {
+ expect(findConfidentialityFilter().exists()).toBe(true);
+ });
+
+ it('renders LabelFilter', () => {
+ expect(findLabelFilter().exists()).toBe(true);
+ });
+
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render dividers", () => {
+ expect(findDividers()).toHaveLength(0);
+ });
+ });
+
describe('Renders correctly with wrong scope', () => {
beforeEach(() => {
- defaultGetters.currentScope = () => 'blobs';
- createComponent({ urlQuery: MOCK_QUERY });
+ defaultGetters.currentScope = () => 'test';
+ createComponent();
});
it("doesn't render StatusFilter", () => {
expect(findStatusFilter().exists()).toBe(false);
@@ -100,6 +168,10 @@ describe('GlobalSearch IssuesFilters', () => {
expect(findLabelFilter().exists()).toBe(false);
});
+ it("doesn't render ArchivedFilter", () => {
+ expect(findArchivedFilter().exists()).toBe(false);
+ });
+
it("doesn't render dividers", () => {
expect(findDividers()).toHaveLength(0);
});
diff --git a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
index 0932f8e47d2..b50f348be69 100644
--- a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
@@ -1,28 +1,131 @@
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { MOCK_QUERY } from 'jest/search/mock_data';
import MergeRequestsFilters from '~/search/sidebar/components/merge_requests_filters.vue';
import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
-import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import { SEARCH_TYPE_ADVANCED, SEARCH_TYPE_BASIC } from '~/search/sidebar/constants';
+
+Vue.use(Vuex);
describe('GlobalSearch MergeRequestsFilters', () => {
let wrapper;
- const findStatusFilter = () => wrapper.findComponent(StatusFilter);
- const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+ const defaultGetters = {
+ currentScope: () => 'merge_requests',
+ };
- const createComponent = () => {
- wrapper = shallowMount(MergeRequestsFilters);
+ const createComponent = ({
+ initialState = {},
+ searchMergeRequestsHideArchivedProjects = true,
+ } = {}) => {
+ const store = new Vuex.Store({
+ state: {
+ urlQuery: MOCK_QUERY,
+ useSidebarNavigation: false,
+ searchType: SEARCH_TYPE_ADVANCED,
+ ...initialState,
+ },
+ getters: defaultGetters,
+ });
+
+ wrapper = shallowMount(MergeRequestsFilters, {
+ store,
+ provide: {
+ glFeatures: {
+ searchMergeRequestsHideArchivedProjects,
+ },
+ },
+ });
};
- describe('Renders correctly', () => {
+ const findStatusFilter = () => wrapper.findComponent(StatusFilter);
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findDividers = () => wrapper.findAll('hr');
+
+ describe.each`
+ description | searchMergeRequestsHideArchivedProjects
+ ${'Renders correctly with Archived Filter disabled'} | ${false}
+ ${'Renders correctly with Archived Filter enabled'} | ${true}
+ `('$description', ({ searchMergeRequestsHideArchivedProjects }) => {
beforeEach(() => {
- createComponent();
+ createComponent({
+ searchMergeRequestsHideArchivedProjects,
+ });
+ });
+
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it(`renders correctly ArchivedFilter when searchMergeRequestsHideArchivedProjects is ${searchMergeRequestsHideArchivedProjects}`, () => {
+ expect(findArchivedFilter().exists()).toBe(searchMergeRequestsHideArchivedProjects);
});
- it('renders ConfidentialityFilter', () => {
+
+ it('renders divider correctly', () => {
+ const dividersCount = searchMergeRequestsHideArchivedProjects ? 1 : 0;
+ expect(findDividers()).toHaveLength(dividersCount);
+ });
+ });
+
+ describe('Renders correctly with basic search', () => {
+ beforeEach(() => {
+ createComponent({ initialState: { searchType: SEARCH_TYPE_BASIC } });
+ });
+
+ it('renders StatusFilter', () => {
expect(findStatusFilter().exists()).toBe(true);
});
- it('renders FiltersTemplate', () => {
- expect(findFiltersTemplate().exists()).toBe(true);
+ it("doesn't render ArchivedFilter", () => {
+ expect(findArchivedFilter().exists()).toBe(false);
+ });
+
+ it('renders 1 divider', () => {
+ expect(findDividers()).toHaveLength(0);
+ });
+ });
+
+ describe('Renders correctly in new nav', () => {
+ beforeEach(() => {
+ createComponent({
+ initialState: {
+ searchType: SEARCH_TYPE_ADVANCED,
+ useSidebarNavigation: true,
+ },
+ searchMergeRequestsHideArchivedProjects: true,
+ });
+ });
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render divider", () => {
+ expect(findDividers()).toHaveLength(0);
+ });
+ });
+
+ describe('Renders correctly with wrong scope', () => {
+ beforeEach(() => {
+ defaultGetters.currentScope = () => 'test';
+ createComponent();
+ });
+ it("doesn't render StatusFilter", () => {
+ expect(findStatusFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render ArchivedFilter", () => {
+ expect(findArchivedFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render dividers", () => {
+ expect(findDividers()).toHaveLength(0);
});
});
});
diff --git a/spec/frontend/search/sidebar/components/notes_filters_spec.js b/spec/frontend/search/sidebar/components/notes_filters_spec.js
new file mode 100644
index 00000000000..2fb8e731ef5
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/notes_filters_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import NotesFilters from '~/search/sidebar/components/projects_filters.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
+import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
+
+describe('GlobalSearch ProjectsFilters', () => {
+ let wrapper;
+
+ const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
+ const findFiltersTemplate = () => wrapper.findComponent(FiltersTemplate);
+
+ const createComponent = () => {
+ wrapper = shallowMount(NotesFilters);
+ };
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders ArchivedFilter', () => {
+ expect(findArchivedFilter().exists()).toBe(true);
+ });
+
+ it('renders FiltersTemplate', () => {
+ expect(findFiltersTemplate().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/projects_filters_specs.js b/spec/frontend/search/sidebar/components/projects_filters_spec.js
index 15e3254e289..930b7263ea4 100644
--- a/spec/frontend/search/sidebar/components/projects_filters_specs.js
+++ b/spec/frontend/search/sidebar/components/projects_filters_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import ProjectsFilters from '~/search/sidebar/components/projects_filters.vue';
-import ArchivedFilter from '~/search/sidebar/components/language_filter/index.vue';
+import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
import FiltersTemplate from '~/search/sidebar/components/filters_template.vue';
describe('GlobalSearch ProjectsFilters', () => {
diff --git a/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js b/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js
new file mode 100644
index 00000000000..5ab4afba7f0
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/small_screen_drawer_navigation_spec.js
@@ -0,0 +1,68 @@
+import { nextTick } from 'vue';
+import { GlDrawer } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
+import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
+import SmallScreenDrawerNavigation from '~/search/sidebar/components/small_screen_drawer_navigation.vue';
+
+describe('ScopeLegacyNavigation', () => {
+ let wrapper;
+ let closeSpy;
+ let toggleSpy;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(SmallScreenDrawerNavigation, {
+ slots: {
+ default: '<div data-testid="default-slot-content">test</div>',
+ },
+ });
+ };
+
+ const findGlDrawer = () => wrapper.findComponent(GlDrawer);
+ const findTitle = () => wrapper.findComponent('h2');
+ const findSlot = () => wrapper.findByTestId('default-slot-content');
+ const findDomElementListener = () => wrapper.findComponent(DomElementListener);
+
+ describe('small screen navigation', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders drawer', () => {
+ expect(findGlDrawer().exists()).toBe(true);
+ expect(findGlDrawer().attributes('zindex')).toBe(DRAWER_Z_INDEX.toString());
+ expect(findGlDrawer().attributes('headerheight')).toBe('0');
+ });
+
+ it('renders title', () => {
+ expect(findTitle().exists()).toBe(true);
+ });
+
+ it('renders slots', () => {
+ expect(findSlot().exists()).toBe(true);
+ });
+ });
+
+ describe('actions', () => {
+ beforeEach(() => {
+ closeSpy = jest.spyOn(SmallScreenDrawerNavigation.methods, 'closeSmallScreenFilters');
+ toggleSpy = jest.spyOn(SmallScreenDrawerNavigation.methods, 'toggleSmallScreenFilters');
+ createComponent();
+ });
+
+ it('calls onClose', () => {
+ findGlDrawer().vm.$emit('close');
+ expect(closeSpy).toHaveBeenCalled();
+ });
+
+ it('calls toggleSmallScreenFilters', async () => {
+ expect(findGlDrawer().props('open')).toBe(false);
+
+ findDomElementListener().vm.$emit('click');
+ await nextTick();
+
+ expect(toggleSpy).toHaveBeenCalled();
+ expect(findGlDrawer().props('open')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index cc9c555b6c7..889260fc478 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import { mapValues } from 'lodash';
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
import { createAlert } from '~/alert';
@@ -312,6 +313,21 @@ describe('Global Search Store Actions', () => {
});
});
+ describe('fetchSidebarCount with no count_link', () => {
+ beforeEach(() => {
+ state.navigation = mapValues(MOCK_NAVIGATION_DATA, (navItem) => ({
+ ...navItem,
+ count_link: null,
+ }));
+ });
+
+ it('should not request anything', async () => {
+ await testAction({ action: actions.fetchSidebarCount, state, expectedMutations: [] });
+
+ expect(mock.history.get.length).toBe(0);
+ });
+ });
+
describe.each`
action | axiosMock | type | expectedMutations | errorLogs
${actions.fetchAllAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_OK }} | ${'success'} | ${MOCK_RECEIVE_AGGREGATIONS_SUCCESS_MUTATION} | ${0}
diff --git a/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js b/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js
new file mode 100644
index 00000000000..84a468e4dd8
--- /dev/null
+++ b/spec/frontend/security_configuration/components/continuous_vulnerability_scan_spec.js
@@ -0,0 +1,124 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlBadge, GlToggle } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import ProjectSetContinuousVulnerabilityScanning from '~/security_configuration/graphql/project_set_continuous_vulnerability_scanning.graphql';
+import ContinuousVulnerabilityScan from '~/security_configuration/components/continuous_vulnerability_scan.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+Vue.use(VueApollo);
+
+const setCVSMockResponse = {
+ data: {
+ projectSetContinuousVulnerabilityScanning: {
+ continuousVulnerabilityScanningEnabled: true,
+ errors: [],
+ },
+ },
+};
+
+const defaultProvide = {
+ continuousVulnerabilityScansEnabled: true,
+ projectFullPath: 'project/full/path',
+};
+
+describe('ContinuousVulnerabilityScan', () => {
+ let wrapper;
+ let apolloProvider;
+ let requestHandlers;
+
+ const createComponent = (options) => {
+ requestHandlers = {
+ setCVSMutationHandler: jest.fn().mockResolvedValue(setCVSMockResponse),
+ };
+
+ apolloProvider = createMockApollo([
+ [ProjectSetContinuousVulnerabilityScanning, requestHandlers.setCVSMutationHandler],
+ ]);
+
+ wrapper = shallowMount(ContinuousVulnerabilityScan, {
+ propsData: {
+ feature: {
+ available: true,
+ configured: true,
+ },
+ },
+ provide: {
+ glFeatures: {
+ dependencyScanningOnAdvisoryIngestion: true,
+ },
+ ...defaultProvide,
+ },
+ apolloProvider,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ apolloProvider = null;
+ });
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ it('renders the component', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('renders the correct title', () => {
+ expect(wrapper.text()).toContain('Continuous Vulnerability Scan');
+ });
+
+ it('renders the badge and toggle component with correct values', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe('Experiment');
+
+ expect(findToggle().exists()).toBe(true);
+ expect(findToggle().props('value')).toBe(defaultProvide.continuousVulnerabilityScansEnabled);
+ });
+
+ it('should disable toggle when feature is not configured', () => {
+ createComponent({
+ propsData: {
+ feature: {
+ available: true,
+ configured: false,
+ },
+ },
+ });
+ expect(findToggle().props('disabled')).toBe(true);
+ });
+
+ it('calls mutation on toggle change with correct payload', () => {
+ findToggle().vm.$emit('change', true);
+
+ expect(requestHandlers.setCVSMutationHandler).toHaveBeenCalledWith({
+ input: {
+ projectPath: 'project/full/path',
+ enable: true,
+ },
+ });
+ });
+
+ describe('when feature flag is disabled', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: {
+ dependencyScanningOnAdvisoryIngestion: false,
+ },
+ ...defaultProvide,
+ },
+ });
+ });
+
+ it('should not render toggle and badge', () => {
+ expect(findToggle().exists()).toBe(false);
+ expect(findBadge().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index 983a66a7fd3..c715d01dd58 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -1,5 +1,6 @@
import { GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import Vue from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { securityFeatures } from '~/security_configuration/components/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
@@ -13,6 +14,10 @@ import {
import { manageViaMRErrorMessage } from '../constants';
import { makeFeature } from './utils';
+const MockComponent = Vue.component('MockComponent', {
+ render: (createElement) => createElement('span'),
+});
+
describe('FeatureCard component', () => {
let feature;
let wrapper;
@@ -389,4 +394,17 @@ describe('FeatureCard component', () => {
});
});
});
+
+ describe('when a slot component is passed', () => {
+ beforeEach(() => {
+ feature = makeFeature({
+ slotComponent: MockComponent,
+ });
+ createComponent({ feature });
+ });
+
+ it('renders the component properly', () => {
+ expect(wrapper.findComponent(MockComponent).exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js
index 6e731e45da2..ea04e9e7993 100644
--- a/spec/frontend/security_configuration/utils_spec.js
+++ b/spec/frontend/security_configuration/utils_spec.js
@@ -34,6 +34,33 @@ describe('augmentFeatures', () => {
},
];
+ const mockSecurityFeaturesDast = [
+ {
+ name: 'DAST',
+ type: 'dast',
+ },
+ ];
+
+ const mockValidCustomFeatureWithOnDemandAvailableFalse = [
+ {
+ name: 'DAST',
+ type: 'dast',
+ customField: 'customvalue',
+ onDemandAvailable: false,
+ badge: {},
+ },
+ ];
+
+ const mockValidCustomFeatureWithOnDemandAvailableTrue = [
+ {
+ name: 'DAST',
+ type: 'dast',
+ customField: 'customvalue',
+ onDemandAvailable: true,
+ badge: {},
+ },
+ ];
+
const mockValidCustomFeatureSnakeCase = [
{
name: 'SAST',
@@ -54,6 +81,29 @@ describe('augmentFeatures', () => {
augmentedSecurityFeatures: mockValidCustomFeature,
};
+ const expectedOutputCustomFeatureWithOnDemandAvailableFalse = {
+ augmentedSecurityFeatures: [
+ {
+ name: 'DAST',
+ type: 'dast',
+ customField: 'customvalue',
+ onDemandAvailable: false,
+ },
+ ],
+ };
+
+ const expectedOutputCustomFeatureWithOnDemandAvailableTrue = {
+ augmentedSecurityFeatures: [
+ {
+ name: 'DAST',
+ type: 'dast',
+ customField: 'customvalue',
+ onDemandAvailable: true,
+ badge: {},
+ },
+ ],
+ };
+
describe('returns an object with augmentedSecurityFeatures when', () => {
it('given an empty array', () => {
expect(augmentFeatures(mockSecurityFeatures, [])).toEqual(expectedOutputDefault);
@@ -85,6 +135,20 @@ describe('augmentFeatures', () => {
);
});
});
+
+ describe('follows onDemandAvailable', () => {
+ it('deletes badge when false', () => {
+ expect(
+ augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableFalse),
+ ).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableFalse);
+ });
+
+ it('keeps badge when true', () => {
+ expect(
+ augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableTrue),
+ ).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableTrue);
+ });
+ });
});
describe('translateScannerNames', () => {
diff --git a/spec/frontend/sentry/index_spec.js b/spec/frontend/sentry/index_spec.js
deleted file mode 100644
index 3130e01cc9e..00000000000
--- a/spec/frontend/sentry/index_spec.js
+++ /dev/null
@@ -1,104 +0,0 @@
-import index from '~/sentry/index';
-
-import LegacySentryConfig from '~/sentry/legacy_sentry_config';
-import SentryConfig from '~/sentry/sentry_config';
-
-describe('Sentry init', () => {
- const version = '1.0.0';
- const dsn = 'https://123@sentry.gitlab.test/123';
- const environment = 'test';
- const currentUserId = '1';
- const gitlabUrl = 'gitlabUrl';
- const revision = 'revision';
- const featureCategory = 'my_feature_category';
-
- beforeEach(() => {
- window.gon = {
- version,
- sentry_dsn: dsn,
- sentry_environment: environment,
- current_user_id: currentUserId,
- gitlab_url: gitlabUrl,
- revision,
- feature_category: featureCategory,
- };
-
- jest.spyOn(LegacySentryConfig, 'init').mockImplementation();
- jest.spyOn(SentryConfig, 'init').mockImplementation();
- });
-
- it('exports new version of Sentry in the global object', () => {
- // eslint-disable-next-line no-underscore-dangle
- expect(window._Sentry.SDK_VERSION).not.toMatch(/^5\./);
- });
-
- describe('when called', () => {
- beforeEach(() => {
- index();
- });
-
- it('configures sentry', () => {
- expect(SentryConfig.init).toHaveBeenCalledTimes(1);
- expect(SentryConfig.init).toHaveBeenCalledWith({
- dsn,
- currentUserId,
- allowUrls: [gitlabUrl, 'webpack-internal://'],
- environment,
- release: version,
- tags: {
- revision,
- feature_category: featureCategory,
- },
- });
- });
-
- it('does not configure legacy sentry', () => {
- expect(LegacySentryConfig.init).not.toHaveBeenCalled();
- });
- });
-
- describe('with "data-page" attr in body', () => {
- const mockPage = 'projects:show';
-
- beforeEach(() => {
- document.body.dataset.page = mockPage;
-
- index();
- });
-
- afterEach(() => {
- delete document.body.dataset.page;
- });
-
- it('configures sentry with a "page" tag', () => {
- expect(SentryConfig.init).toHaveBeenCalledTimes(1);
- expect(SentryConfig.init).toHaveBeenCalledWith(
- expect.objectContaining({
- tags: {
- revision,
- page: mockPage,
- feature_category: featureCategory,
- },
- }),
- );
- });
- });
-
- describe('with no tags configuration', () => {
- beforeEach(() => {
- window.gon.revision = undefined;
- window.gon.feature_category = undefined;
-
- index();
- });
-
- it('configures sentry with no tags', () => {
- expect(SentryConfig.init).toHaveBeenCalledTimes(1);
- expect(SentryConfig.init).toHaveBeenCalledWith(
- expect.objectContaining({
- tags: {},
- }),
- );
- });
- });
-});
diff --git a/spec/frontend/sentry/init_sentry_spec.js b/spec/frontend/sentry/init_sentry_spec.js
new file mode 100644
index 00000000000..e31068b935b
--- /dev/null
+++ b/spec/frontend/sentry/init_sentry_spec.js
@@ -0,0 +1,177 @@
+import {
+ BrowserClient,
+ defaultStackParser,
+ makeFetchTransport,
+ defaultIntegrations,
+
+ // exports
+ captureException,
+ captureMessage,
+ withScope,
+ SDK_VERSION,
+} from 'sentrybrowser';
+import * as Sentry from 'sentrybrowser';
+
+import { initSentry } from '~/sentry/init_sentry';
+
+const mockDsn = 'https://123@sentry.gitlab.test/123';
+const mockEnvironment = 'development';
+const mockCurrentUserId = 1;
+const mockGitlabUrl = 'https://gitlab.com';
+const mockVersion = '1.0.0';
+const mockRevision = '00112233';
+const mockFeatureCategory = 'my_feature_category';
+const mockPage = 'index:page';
+const mockSentryClientsideTracesSampleRate = 0.1;
+
+jest.mock('sentrybrowser', () => {
+ return {
+ ...jest.createMockFromModule('sentrybrowser'),
+
+ // unmock actual configuration options
+ defaultStackParser: jest.requireActual('sentrybrowser').defaultStackParser,
+ makeFetchTransport: jest.requireActual('sentrybrowser').makeFetchTransport,
+ defaultIntegrations: jest.requireActual('sentrybrowser').defaultIntegrations,
+ };
+});
+
+describe('SentryConfig', () => {
+ let mockBindClient;
+ let mockSetTags;
+ let mockSetUser;
+ let mockBrowserClient;
+ let mockStartSession;
+ let mockCaptureSession;
+
+ beforeEach(() => {
+ window.gon = {
+ sentry_dsn: mockDsn,
+ sentry_environment: mockEnvironment,
+ current_user_id: mockCurrentUserId,
+ gitlab_url: mockGitlabUrl,
+ version: mockVersion,
+ revision: mockRevision,
+ feature_category: mockFeatureCategory,
+ sentry_clientside_traces_sample_rate: mockSentryClientsideTracesSampleRate,
+ };
+
+ document.body.dataset.page = mockPage;
+
+ mockBindClient = jest.fn();
+ mockSetTags = jest.fn();
+ mockSetUser = jest.fn();
+ mockStartSession = jest.fn();
+ mockCaptureSession = jest.fn();
+ mockBrowserClient = jest.spyOn(Sentry, 'BrowserClient');
+
+ jest.spyOn(Sentry, 'getCurrentHub').mockReturnValue({
+ bindClient: mockBindClient,
+ setTags: mockSetTags,
+ setUser: mockSetUser,
+ startSession: mockStartSession,
+ captureSession: mockCaptureSession,
+ });
+ });
+
+ afterEach(() => {
+ // eslint-disable-next-line no-underscore-dangle
+ window._Sentry = undefined;
+ });
+
+ describe('initSentry', () => {
+ describe('when sentry is initialized', () => {
+ beforeEach(() => {
+ initSentry();
+ });
+
+ it('creates BrowserClient with gon values and configuration', () => {
+ expect(mockBrowserClient).toHaveBeenCalledWith(
+ expect.objectContaining({
+ dsn: mockDsn,
+ release: mockVersion,
+ allowUrls: [mockGitlabUrl, 'webpack-internal://'],
+ environment: mockEnvironment,
+ tracesSampleRate: mockSentryClientsideTracesSampleRate,
+ tracePropagationTargets: [/^\//],
+
+ transport: makeFetchTransport,
+ stackParser: defaultStackParser,
+ integrations: defaultIntegrations,
+ }),
+ );
+ });
+
+ it('binds the BrowserClient to the hub', () => {
+ expect(mockBindClient).toHaveBeenCalledTimes(1);
+ expect(mockBindClient).toHaveBeenCalledWith(expect.any(BrowserClient));
+ });
+
+ it('calls Sentry.setTags with gon values', () => {
+ expect(mockSetTags).toHaveBeenCalledTimes(1);
+ expect(mockSetTags).toHaveBeenCalledWith({
+ page: mockPage,
+ revision: mockRevision,
+ feature_category: mockFeatureCategory,
+ });
+ });
+
+ it('calls Sentry.setUser with gon values', () => {
+ expect(mockSetUser).toHaveBeenCalledTimes(1);
+ expect(mockSetUser).toHaveBeenCalledWith({
+ id: mockCurrentUserId,
+ });
+ });
+
+ it('sets global sentry', () => {
+ // eslint-disable-next-line no-underscore-dangle
+ expect(window._Sentry).toEqual({
+ captureException,
+ captureMessage,
+ withScope,
+ SDK_VERSION,
+ });
+ });
+ });
+
+ describe('when user is not logged in', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = undefined;
+ initSentry();
+ });
+
+ it('does not call Sentry.setUser', () => {
+ expect(mockSetUser).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when gon is not defined', () => {
+ beforeEach(() => {
+ window.gon = undefined;
+ initSentry();
+ });
+
+ it('Sentry.init is not called', () => {
+ expect(mockBrowserClient).not.toHaveBeenCalled();
+ expect(mockBindClient).not.toHaveBeenCalled();
+
+ // eslint-disable-next-line no-underscore-dangle
+ expect(window._Sentry).toBe(undefined);
+ });
+ });
+
+ describe('when dsn is not configured', () => {
+ beforeEach(() => {
+ window.gon.sentry_dsn = undefined;
+ initSentry();
+ });
+
+ it('Sentry.init is not called', () => {
+ expect(mockBrowserClient).not.toHaveBeenCalled();
+ expect(mockBindClient).not.toHaveBeenCalled();
+
+ // eslint-disable-next-line no-underscore-dangle
+ expect(window._Sentry).toBe(undefined);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sentry/legacy_index_spec.js b/spec/frontend/sentry/legacy_index_spec.js
index 493b4dfde67..fad1760ffc5 100644
--- a/spec/frontend/sentry/legacy_index_spec.js
+++ b/spec/frontend/sentry/legacy_index_spec.js
@@ -1,7 +1,6 @@
import index from '~/sentry/legacy_index';
import LegacySentryConfig from '~/sentry/legacy_sentry_config';
-import SentryConfig from '~/sentry/sentry_config';
describe('Sentry init', () => {
const dsn = 'https://123@sentry.gitlab.test/123';
@@ -22,7 +21,6 @@ describe('Sentry init', () => {
};
jest.spyOn(LegacySentryConfig, 'init').mockImplementation();
- jest.spyOn(SentryConfig, 'init').mockImplementation();
});
it('exports legacy version of Sentry in the global object', () => {
@@ -49,9 +47,5 @@ describe('Sentry init', () => {
},
});
});
-
- it('does not configure new sentry', () => {
- expect(SentryConfig.init).not.toHaveBeenCalled();
- });
});
});
diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js
deleted file mode 100644
index 34c5221ef0d..00000000000
--- a/spec/frontend/sentry/sentry_config_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import * as Sentry from 'sentrybrowser7';
-
-import SentryConfig from '~/sentry/sentry_config';
-
-describe('SentryConfig', () => {
- describe('init', () => {
- const options = {
- currentUserId: 1,
- };
-
- beforeEach(() => {
- jest.spyOn(SentryConfig, 'configure');
- jest.spyOn(SentryConfig, 'setUser');
-
- SentryConfig.init(options);
- });
-
- it('should set the options property', () => {
- expect(SentryConfig.options).toEqual(options);
- });
-
- it('should call the configure method', () => {
- expect(SentryConfig.configure).toHaveBeenCalled();
- });
-
- it('should call setUser', () => {
- expect(SentryConfig.setUser).toHaveBeenCalled();
- });
-
- it('should not call setUser if there is no current user ID', () => {
- SentryConfig.setUser.mockClear();
- SentryConfig.init({ currentUserId: undefined });
-
- expect(SentryConfig.setUser).not.toHaveBeenCalled();
- });
- });
-
- describe('configure', () => {
- const sentryConfig = {};
- const options = {
- dsn: 'https://123@sentry.gitlab.test/123',
- allowUrls: ['//gitlabUrl', 'webpack-internal://'],
- environment: 'test',
- release: 'revision',
- tags: {
- revision: 'revision',
- feature_category: 'my_feature_category',
- },
- };
-
- beforeEach(() => {
- jest.spyOn(Sentry, 'init').mockImplementation();
- jest.spyOn(Sentry, 'setTags').mockImplementation();
-
- sentryConfig.options = options;
-
- SentryConfig.configure.call(sentryConfig);
- });
-
- it('should call Sentry.init', () => {
- expect(Sentry.init).toHaveBeenCalledWith({
- dsn: options.dsn,
- release: options.release,
- allowUrls: options.allowUrls,
- environment: options.environment,
- });
- });
-
- it('should call Sentry.setTags', () => {
- expect(Sentry.setTags).toHaveBeenCalledWith(options.tags);
- });
-
- it('should set environment from options', () => {
- sentryConfig.options.environment = 'development';
-
- SentryConfig.configure.call(sentryConfig);
-
- expect(Sentry.init).toHaveBeenCalledWith({
- dsn: options.dsn,
- release: options.release,
- allowUrls: options.allowUrls,
- environment: 'development',
- });
- });
- });
-
- describe('setUser', () => {
- let sentryConfig;
-
- beforeEach(() => {
- sentryConfig = { options: { currentUserId: 1 } };
- jest.spyOn(Sentry, 'setUser');
-
- SentryConfig.setUser.call(sentryConfig);
- });
-
- it('should call .setUser', () => {
- expect(Sentry.setUser).toHaveBeenCalledWith({
- id: sentryConfig.options.currentUserId,
- });
- });
- });
-});
diff --git a/spec/frontend/service_desk/components/service_desk_list_app_spec.js b/spec/frontend/service_desk/components/service_desk_list_app_spec.js
deleted file mode 100644
index bdb6a48895e..00000000000
--- a/spec/frontend/service_desk/components/service_desk_list_app_spec.js
+++ /dev/null
@@ -1,376 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
-import { cloneDeep } from 'lodash';
-import VueRouter from 'vue-router';
-import * as Sentry from '@sentry/browser';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import setWindowLocation from 'helpers/set_window_location_helper';
-import { TEST_HOST } from 'helpers/test_constants';
-import waitForPromises from 'helpers/wait_for_promises';
-import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
-import { issuableListTabs } from '~/vue_shared/issuable/list/constants';
-import { TYPENAME_USER } from '~/graphql_shared/constants';
-import { convertToGraphQLId } from '~/graphql_shared/utils';
-import { STATUS_CLOSED, STATUS_OPEN, STATUS_ALL } from '~/service_desk/constants';
-import getServiceDeskIssuesQuery from 'ee_else_ce/service_desk/queries/get_service_desk_issues.query.graphql';
-import getServiceDeskIssuesCountsQuery from 'ee_else_ce/service_desk/queries/get_service_desk_issues_counts.query.graphql';
-import ServiceDeskListApp from '~/service_desk/components/service_desk_list_app.vue';
-import InfoBanner from '~/service_desk/components/info_banner.vue';
-import EmptyStateWithAnyIssues from '~/service_desk/components/empty_state_with_any_issues.vue';
-import EmptyStateWithoutAnyIssues from '~/service_desk/components/empty_state_without_any_issues.vue';
-
-import {
- TOKEN_TYPE_ASSIGNEE,
- TOKEN_TYPE_AUTHOR,
- TOKEN_TYPE_CONFIDENTIAL,
- TOKEN_TYPE_LABEL,
- TOKEN_TYPE_MILESTONE,
- TOKEN_TYPE_MY_REACTION,
- TOKEN_TYPE_RELEASE,
- TOKEN_TYPE_SEARCH_WITHIN,
-} from '~/vue_shared/components/filtered_search_bar/constants';
-import {
- getServiceDeskIssuesQueryResponse,
- getServiceDeskIssuesQueryEmptyResponse,
- getServiceDeskIssuesCountsQueryResponse,
- filteredTokens,
- urlParams,
- locationSearch,
-} from '../mock_data';
-
-jest.mock('@sentry/browser');
-
-describe('CE ServiceDeskListApp', () => {
- let wrapper;
- let router;
-
- Vue.use(VueApollo);
- Vue.use(VueRouter);
-
- const defaultProvide = {
- releasesPath: 'releases/path',
- autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
- hasIterationsFeature: true,
- hasIssueWeightsFeature: true,
- hasIssuableHealthStatusFeature: true,
- groupPath: 'group/path',
- emptyStateSvgPath: 'empty-state.svg',
- isProject: true,
- isSignedIn: true,
- fullPath: 'path/to/project',
- isServiceDeskSupported: true,
- hasAnyIssues: true,
- initialSort: '',
- issuablesLoading: false,
- };
-
- let defaultQueryResponse = getServiceDeskIssuesQueryResponse;
- if (IS_EE) {
- defaultQueryResponse = cloneDeep(getServiceDeskIssuesQueryResponse);
- defaultQueryResponse.data.project.issues.nodes[0].healthStatus = null;
- defaultQueryResponse.data.project.issues.nodes[0].weight = 5;
- }
-
- const mockServiceDeskIssuesQueryResponseHandler = jest
- .fn()
- .mockResolvedValue(defaultQueryResponse);
- const mockServiceDeskIssuesQueryEmptyResponseHandler = jest
- .fn()
- .mockResolvedValue(getServiceDeskIssuesQueryEmptyResponse);
- const mockServiceDeskIssuesCountsQueryResponseHandler = jest
- .fn()
- .mockResolvedValue(getServiceDeskIssuesCountsQueryResponse);
-
- const findIssuableList = () => wrapper.findComponent(IssuableList);
- const findInfoBanner = () => wrapper.findComponent(InfoBanner);
- const findLabelsToken = () =>
- findIssuableList()
- .props('searchTokens')
- .find((token) => token.type === TOKEN_TYPE_LABEL);
-
- const createComponent = ({
- provide = {},
- serviceDeskIssuesQueryResponseHandler = mockServiceDeskIssuesQueryResponseHandler,
- serviceDeskIssuesCountsQueryResponseHandler = mockServiceDeskIssuesCountsQueryResponseHandler,
- } = {}) => {
- const requestHandlers = [
- [getServiceDeskIssuesQuery, serviceDeskIssuesQueryResponseHandler],
- [getServiceDeskIssuesCountsQuery, serviceDeskIssuesCountsQueryResponseHandler],
- ];
-
- router = new VueRouter({ mode: 'history' });
-
- return shallowMount(ServiceDeskListApp, {
- apolloProvider: createMockApollo(
- requestHandlers,
- {},
- {
- typePolicies: {
- Query: {
- fields: {
- project: {
- merge: true,
- },
- },
- },
- },
- },
- ),
- router,
- provide: {
- ...defaultProvide,
- ...provide,
- },
- });
- };
-
- beforeEach(() => {
- setWindowLocation(TEST_HOST);
- wrapper = createComponent();
- return waitForPromises();
- });
-
- it('renders the issuable list with skeletons while fetching service desk issues', async () => {
- wrapper = createComponent();
- await nextTick();
-
- expect(findIssuableList().props('issuablesLoading')).toBe(true);
-
- await waitForPromises();
-
- expect(findIssuableList().props('issuablesLoading')).toBe(false);
- });
-
- it('fetches service desk issues and renders them in the issuable list', () => {
- expect(findIssuableList().props()).toMatchObject({
- namespace: 'service-desk',
- recentSearchesStorageKey: 'service-desk-issues',
- issuables: defaultQueryResponse.data.project.issues.nodes,
- tabs: issuableListTabs,
- currentTab: STATUS_OPEN,
- tabCounts: {
- opened: 1,
- closed: 1,
- all: 1,
- },
- });
- });
-
- describe('InfoBanner', () => {
- it('renders when Service Desk is supported and has any number of issues', () => {
- expect(findInfoBanner().exists()).toBe(true);
- });
-
- it('does not render when Service Desk is not supported and has any number of issues', () => {
- wrapper = createComponent({ provide: { isServiceDeskSupported: false } });
-
- expect(findInfoBanner().exists()).toBe(false);
- });
-
- it('does not render, when there are no issues', () => {
- wrapper = createComponent({
- serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
- });
-
- expect(findInfoBanner().exists()).toBe(false);
- });
- });
-
- describe('Empty states', () => {
- describe('when there are issues', () => {
- it('shows EmptyStateWithAnyIssues component', () => {
- setWindowLocation(locationSearch);
- wrapper = createComponent({
- serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
- });
-
- expect(wrapper.findComponent(EmptyStateWithAnyIssues).props()).toEqual({
- hasSearch: true,
- isOpenTab: true,
- });
- });
- });
-
- describe('when there are no issues', () => {
- it('shows EmptyStateWithoutAnyIssues component', () => {
- wrapper = createComponent({
- provide: { hasAnyIssues: false },
- serviceDeskIssuesQueryResponseHandler: mockServiceDeskIssuesQueryEmptyResponseHandler,
- });
-
- expect(wrapper.findComponent(EmptyStateWithoutAnyIssues).exists()).toBe(true);
- });
- });
- });
-
- describe('Initial url params', () => {
- describe('search', () => {
- it('is set from the url params', () => {
- setWindowLocation(locationSearch);
- wrapper = createComponent();
-
- expect(router.history.current.query).toMatchObject({ search: 'find issues' });
- });
- });
-
- describe('state', () => {
- it('is set from the url params', async () => {
- const initialState = STATUS_ALL;
- setWindowLocation(`?state=${initialState}`);
- wrapper = createComponent();
- await waitForPromises();
-
- expect(findIssuableList().props('currentTab')).toBe(initialState);
- });
- });
-
- describe('filter tokens', () => {
- it('are set from the url params', () => {
- setWindowLocation(locationSearch);
- wrapper = createComponent();
-
- expect(findIssuableList().props('initialFilterValue')).toEqual(filteredTokens);
- });
- });
- });
-
- describe('Tokens', () => {
- const mockCurrentUser = {
- id: 1,
- name: 'Administrator',
- username: 'root',
- avatar_url: 'avatar/url',
- };
-
- describe('when user is signed out', () => {
- beforeEach(() => {
- wrapper = createComponent({ provide: { isSignedIn: false } });
- return waitForPromises();
- });
-
- it('does not render My-Reaction or Confidential tokens', () => {
- expect(findIssuableList().props('searchTokens')).not.toMatchObject([
- { type: TOKEN_TYPE_AUTHOR, preloadedUsers: [mockCurrentUser] },
- { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers: [mockCurrentUser] },
- { type: TOKEN_TYPE_MY_REACTION },
- { type: TOKEN_TYPE_CONFIDENTIAL },
- ]);
- });
- });
-
- describe('when all tokens are available', () => {
- beforeEach(() => {
- window.gon = {
- current_user_id: mockCurrentUser.id,
- current_user_fullname: mockCurrentUser.name,
- current_username: mockCurrentUser.username,
- current_user_avatar_url: mockCurrentUser.avatar_url,
- };
-
- wrapper = createComponent();
- return waitForPromises();
- });
-
- it('renders all tokens alphabetically', () => {
- const preloadedUsers = [
- { ...mockCurrentUser, id: convertToGraphQLId(TYPENAME_USER, mockCurrentUser.id) },
- ];
-
- expect(findIssuableList().props('searchTokens')).toMatchObject([
- { type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
- { type: TOKEN_TYPE_CONFIDENTIAL },
- { type: TOKEN_TYPE_LABEL },
- { type: TOKEN_TYPE_MILESTONE },
- { type: TOKEN_TYPE_MY_REACTION },
- { type: TOKEN_TYPE_RELEASE },
- { type: TOKEN_TYPE_SEARCH_WITHIN },
- ]);
- });
- });
- });
-
- describe('Events', () => {
- describe('when "click-tab" event is emitted by IssuableList', () => {
- beforeEach(async () => {
- wrapper = createComponent();
- router.push = jest.fn();
- await waitForPromises();
-
- findIssuableList().vm.$emit('click-tab', STATUS_CLOSED);
- });
-
- it('updates ui to the new tab', () => {
- expect(findIssuableList().props('currentTab')).toBe(STATUS_CLOSED);
- });
-
- it('updates url to the new tab', () => {
- expect(router.push).toHaveBeenCalledWith({
- query: expect.objectContaining({ state: STATUS_CLOSED }),
- });
- });
- });
-
- describe('when "filter" event is emitted by IssuableList', () => {
- it('updates IssuableList with url params', async () => {
- wrapper = createComponent();
- router.push = jest.fn();
- await waitForPromises();
-
- findIssuableList().vm.$emit('filter', filteredTokens);
- await nextTick();
-
- expect(router.push).toHaveBeenCalledWith({
- query: expect.objectContaining(urlParams),
- });
- });
- });
- });
-
- describe('Errors', () => {
- describe.each`
- error | responseHandler
- ${'fetching issues'} | ${'serviceDeskIssuesQueryResponseHandler'}
- ${'fetching issue counts'} | ${'serviceDeskIssuesCountsQueryResponseHandler'}
- `('when there is an error $error', ({ responseHandler }) => {
- beforeEach(() => {
- wrapper = createComponent({
- [responseHandler]: jest.fn().mockRejectedValue(new Error('ERROR')),
- });
- return waitForPromises();
- });
-
- it('shows an error message', () => {
- expect(Sentry.captureException).toHaveBeenCalledWith(new Error('ERROR'));
- });
- });
- });
-
- describe('When providing token for labels', () => {
- it('passes function to fetchLatestLabels property if frontend caching is enabled', async () => {
- wrapper = createComponent({
- provide: {
- glFeatures: {
- frontendCaching: true,
- },
- },
- });
- await waitForPromises();
-
- expect(typeof findLabelsToken().fetchLatestLabels).toBe('function');
- });
-
- it('passes null to fetchLatestLabels property if frontend caching is disabled', async () => {
- wrapper = createComponent({
- provide: {
- glFeatures: {
- frontendCaching: false,
- },
- },
- });
- await waitForPromises();
-
- expect(findLabelsToken().fetchLatestLabels).toBe(null);
- });
- });
-});
diff --git a/spec/frontend/sidebar/components/assignees/assignees_spec.js b/spec/frontend/sidebar/components/assignees/assignees_spec.js
index 65a07382ebc..2767d36ac3d 100644
--- a/spec/frontend/sidebar/components/assignees/assignees_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignees_spec.js
@@ -145,7 +145,7 @@ describe('Assignee component', () => {
});
expect(findAllAvatarLinks()).toHaveLength(users.length);
- expect(wrapper.find('.user-list-more').exists()).toBe(false);
+ expect(wrapper.find('[data-testid="user-list-more"]').exists()).toBe(false);
});
it('shows sorted assignee where "can merge" users are sorted first', () => {
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
index 501048bf056..8c42e61548f 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
@@ -26,7 +26,7 @@ describe('Sidebar invite members component', () => {
});
it('has expected attributes on the trigger', () => {
- expect(findDirectInviteLink().props('triggerSource')).toBe('issue-assignee-dropdown');
+ expect(findDirectInviteLink().props('triggerSource')).toBe('issue_assignee_dropdown');
});
});
});
diff --git a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
index c74a714cca4..9e7a198d32c 100644
--- a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
@@ -24,7 +24,7 @@ describe('UncollapsedAssigneeList component', () => {
});
}
- const findMoreButton = () => wrapper.find('.user-list-more button');
+ const findMoreButton = () => wrapper.find('[data-testid="user-list-more-button"]');
describe('One assignee/user', () => {
let user;
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
index 1ca20dad1c6..3588e92d515 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
@@ -4,7 +4,7 @@ import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import SidebarConfidentialityForm from '~/sidebar/components/confidential/sidebar_confidentiality_form.vue';
-import { confidentialityQueries } from '~/sidebar/constants';
+import { confidentialityQueries } from '~/sidebar/queries/constants';
jest.mock('~/alert');
@@ -38,6 +38,23 @@ describe('Sidebar Confidentiality Form', () => {
});
};
+ const confidentialityMutation = (confidential, workspacePath) => {
+ return {
+ mutation: confidentialityQueries[wrapper.vm.issuableType].mutation,
+ variables: {
+ input: {
+ confidential,
+ iid: '1',
+ ...workspacePath,
+ },
+ },
+ };
+ };
+
+ const clickConfidentialToggle = () => {
+ findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
+ };
+
it('emits a `closeForm` event when Cancel button is clicked', () => {
createComponent();
findCancelButton().vm.$emit('click');
@@ -94,17 +111,10 @@ describe('Sidebar Confidentiality Form', () => {
});
it('calls a mutation to set confidential to true on button click', () => {
- findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: confidentialityQueries[wrapper.vm.issuableType].mutation,
- variables: {
- input: {
- confidential: true,
- iid: '1',
- projectPath: 'group/project',
- },
- },
- });
+ clickConfidentialToggle();
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
+ confidentialityMutation(true, { projectPath: 'group/project' }),
+ );
});
});
@@ -150,17 +160,49 @@ describe('Sidebar Confidentiality Form', () => {
});
it('calls a mutation to set epic confidentiality with correct parameters', () => {
- findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
+ clickConfidentialToggle();
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
+ confidentialityMutation(false, { groupPath: 'group/project' }),
+ );
+ });
+ });
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: confidentialityQueries[wrapper.vm.issuableType].mutation,
- variables: {
- input: {
- confidential: false,
- iid: '1',
- groupPath: 'group/project',
- },
- },
+ describe('when issuable type is `test_case`', () => {
+ describe('when test case is confidential', () => {
+ beforeEach(() => {
+ createComponent({ props: { confidential: true, issuableType: 'test_case' } });
+ });
+
+ it('renders a message about making a test case non-confidential', () => {
+ expect(findWarningMessage().text()).toBe(
+ 'You are going to turn off the confidentiality. This means everyone will be able to see this test case.',
+ );
+ });
+
+ it('calls a mutation to set confidential to false on button click', () => {
+ clickConfidentialToggle();
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
+ confidentialityMutation(false, { projectPath: 'group/project' }),
+ );
+ });
+ });
+
+ describe('when test case is not confidential', () => {
+ beforeEach(() => {
+ createComponent({ props: { issuableType: 'test_case' } });
+ });
+
+ it('renders a message about making a test case confidential', () => {
+ expect(findWarningMessage().text()).toBe(
+ 'You are going to turn on confidentiality. Only project members with at least the Reporter role can view or be notified about this test case.',
+ );
+ });
+
+ it('calls a mutation to set confidential to true on button click', () => {
+ clickConfidentialToggle();
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
+ confidentialityMutation(true, { projectPath: 'group/project' }),
+ );
});
});
});
diff --git a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
index 00b57b4916e..f3d50f17e2d 100644
--- a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
+++ b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
@@ -11,11 +11,8 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import SidebarEscalationStatus from '~/sidebar/components/incidents/sidebar_escalation_status.vue';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
-import {
- escalationStatusQuery,
- escalationStatusMutation,
- STATUS_ACKNOWLEDGED,
-} from '~/sidebar/constants';
+import { STATUS_ACKNOWLEDGED } from '~/sidebar/constants';
+import { escalationStatusQuery, escalationStatusMutation } from '~/sidebar/queries/constants';
import waitForPromises from 'helpers/wait_for_promises';
import EscalationStatus from 'ee_else_ce/sidebar/components/incidents/escalation_status.vue';
import { createAlert } from '~/alert';
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
index 9c8d9656955..5e2ff73878f 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
@@ -5,11 +5,14 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
-import { workspaceLabelsQueries } from '~/sidebar/constants';
+import { workspaceLabelsQueries, workspaceCreateLabelMutation } from '~/sidebar/queries/constants';
import DropdownContentsCreateView from '~/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view.vue';
-import createLabelMutation from '~/sidebar/components/labels/labels_select_widget/graphql/create_label.mutation.graphql';
import { DEFAULT_LABEL_COLOR } from '~/sidebar/components/labels/labels_select_widget/constants';
import {
+ mockCreateLabelResponse as createAbuseReportLabelSuccessfulResponse,
+ mockLabelsQueryResponse as abuseReportLabelsQueryResponse,
+} from '../../../../admin/abuse_report/mock_data';
+import {
mockRegularLabel,
mockSuggestedColors,
createLabelSuccessfulResponse,
@@ -38,6 +41,9 @@ const titleTakenError = {
};
const createLabelSuccessHandler = jest.fn().mockResolvedValue(createLabelSuccessfulResponse);
+const createAbuseReportLabelSuccessHandler = jest
+ .fn()
+ .mockResolvedValue(createAbuseReportLabelSuccessfulResponse);
const createLabelUserRecoverableErrorHandler = jest.fn().mockResolvedValue(userRecoverableError);
const createLabelDuplicateErrorHandler = jest.fn().mockResolvedValue(titleTakenError);
const createLabelErrorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
@@ -66,6 +72,7 @@ describe('DropdownContentsCreateView', () => {
labelsResponse = workspaceLabelsQueryResponse,
searchTerm = '',
} = {}) => {
+ const createLabelMutation = workspaceCreateLabelMutation[workspaceType];
const mockApollo = createMockApollo([[createLabelMutation, mutationHandler]]);
mockApollo.clients.defaultClient.cache.writeQuery({
query: workspaceLabelsQueries[workspaceType].query,
@@ -203,6 +210,22 @@ describe('DropdownContentsCreateView', () => {
});
});
+ it('calls the correct mutation when workspaceType is `abuseReport`', () => {
+ createComponent({
+ mutationHandler: createAbuseReportLabelSuccessHandler,
+ labelCreateType: '',
+ workspaceType: 'abuseReport',
+ labelsResponse: abuseReportLabelsQueryResponse,
+ });
+ fillLabelAttributes();
+ findCreateButton().vm.$emit('click');
+
+ expect(createAbuseReportLabelSuccessHandler).toHaveBeenCalledWith({
+ color: '#009966',
+ title: 'Test title',
+ });
+ });
+
it('calls createAlert is mutation has a user-recoverable error', async () => {
createComponent({ mutationHandler: createLabelUserRecoverableErrorHandler });
fillLabelAttributes();
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_footer_spec.js b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_footer_spec.js
index ad1edaa6671..7a1131b8cce 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_footer_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_footer_spec.js
@@ -1,12 +1,11 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import DropdownFooter from '~/sidebar/components/labels/labels_select_widget/dropdown_footer.vue';
describe('DropdownFooter', () => {
let wrapper;
const createComponent = ({ props = {}, injected = {} } = {}) => {
- wrapper = shallowMount(DropdownFooter, {
+ wrapper = shallowMountExtended(DropdownFooter, {
propsData: {
footerCreateLabelTitle: 'create',
footerManageLabelTitle: 'manage',
@@ -20,7 +19,8 @@ describe('DropdownFooter', () => {
});
};
- const findCreateLabelButton = () => wrapper.find('[data-testid="create-label-button"]');
+ const findCreateLabelButton = () => wrapper.findByTestId('create-label-button');
+ const findManageLabelsButton = () => wrapper.findByTestId('manage-labels-button');
describe('Labels view', () => {
beforeEach(() => {
@@ -42,12 +42,37 @@ describe('DropdownFooter', () => {
expect(findCreateLabelButton().exists()).toBe(true);
});
- it('emits `toggleDropdownContentsCreateView` event on create label button click', async () => {
+ it('emits `toggleDropdownContentsCreateView` event on create label button click', () => {
findCreateLabelButton().trigger('click');
- await nextTick();
expect(wrapper.emitted('toggleDropdownContentsCreateView')).toEqual([[]]);
});
});
+
+ describe('manage labels button', () => {
+ it('is rendered', () => {
+ expect(findManageLabelsButton().exists()).toBe(true);
+ });
+
+ describe('when footerManageLabelTitle is not given', () => {
+ beforeEach(() => {
+ createComponent({ props: { footerManageLabelTitle: undefined } });
+ });
+
+ it('does not render manage labels button', () => {
+ expect(findManageLabelsButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when labelsManagePath is not provided', () => {
+ beforeEach(() => {
+ createComponent({ injected: { labelsManagePath: '' } });
+ });
+
+ it('does not render manage labels button', () => {
+ expect(findManageLabelsButton().exists()).toBe(false);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/sidebar/components/lock/__snapshots__/edit_form_spec.js.snap b/spec/frontend/sidebar/components/lock/__snapshots__/edit_form_spec.js.snap
index 18d4df297df..d5bbd3bb3c9 100644
--- a/spec/frontend/sidebar/components/lock/__snapshots__/edit_form_spec.js.snap
+++ b/spec/frontend/sidebar/components/lock/__snapshots__/edit_form_spec.js.snap
@@ -12,7 +12,6 @@ exports[`Edit Form Dropdown In issue page when locked the appropriate warning te
message="Unlock this %{issuableDisplayName}? %{strongStart}Everyone%{strongEnd} will be able to comment."
/>
</p>
-
<edit-form-buttons-stub
islocked="true"
issuabledisplayname="issue"
@@ -32,7 +31,6 @@ exports[`Edit Form Dropdown In issue page when unlocked the appropriate warning
message="Lock this %{issuableDisplayName}? Only %{strongStart}project members%{strongEnd} will be able to comment."
/>
</p>
-
<edit-form-buttons-stub
issuabledisplayname="issue"
/>
@@ -51,7 +49,6 @@ exports[`Edit Form Dropdown In merge request page when locked the appropriate wa
message="Unlock this %{issuableDisplayName}? %{strongStart}Everyone%{strongEnd} will be able to comment."
/>
</p>
-
<edit-form-buttons-stub
islocked="true"
issuabledisplayname="merge request"
@@ -71,7 +68,6 @@ exports[`Edit Form Dropdown In merge request page when unlocked the appropriate
message="Lock this %{issuableDisplayName}? Only %{strongStart}project members%{strongEnd} will be able to comment."
/>
</p>
-
<edit-form-buttons-stub
issuabledisplayname="merge request"
/>
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index f43fb17ca37..5dd54d4867e 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -120,11 +120,11 @@ describe('Issuable Time Tracker', () => {
describe('Remaining meter', () => {
it('should display the remaining meter with the correct width', () => {
- expect(findTimeRemainingProgress().attributes('value')).toBe('5');
+ expect(findTimeRemainingProgress().vm.$attrs.value).toBe(5);
});
it('should display the remaining meter with the correct background color when within estimate', () => {
- expect(findTimeRemainingProgress().attributes('variant')).toBe('primary');
+ expect(findTimeRemainingProgress().vm.$attrs.variant).toBe('primary');
});
it('should display the remaining meter with the correct background color when over estimate', () => {
@@ -138,7 +138,7 @@ describe('Issuable Time Tracker', () => {
},
});
- expect(findTimeRemainingProgress().attributes('variant')).toBe('danger');
+ expect(findTimeRemainingProgress().vm.$attrs.variant).toBe('danger');
});
});
});
diff --git a/spec/frontend/sidebar/components/todo_toggle/__snapshots__/todo_spec.js.snap b/spec/frontend/sidebar/components/todo_toggle/__snapshots__/todo_spec.js.snap
index fd525474923..b5d8d31f88f 100644
--- a/spec/frontend/sidebar/components/todo_toggle/__snapshots__/todo_spec.js.snap
+++ b/spec/frontend/sidebar/components/todo_toggle/__snapshots__/todo_spec.js.snap
@@ -3,7 +3,7 @@
exports[`SidebarTodo template renders component container element with proper data attributes 1`] = `
<button
aria-label="Mark as done"
- class="gl-button btn btn-default btn-todo issuable-header-btn float-right"
+ class="btn btn-default btn-todo float-right gl-button issuable-header-btn"
data-issuable-id="1"
data-issuable-type="epic"
type="button"
@@ -14,13 +14,11 @@ exports[`SidebarTodo template renders component container element with proper da
size="16"
style="display: none;"
/>
-
<span
class="issuable-todo-inner"
>
Mark as done
</span>
-
<gl-loading-icon-stub
color="dark"
inline="true"
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 05a7f504fd4..9d8392ad5f0 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -414,6 +414,33 @@ export const searchQueryResponse = {
},
};
+export const searchAutocompleteQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: '',
+ users: [
+ {
+ id: '1',
+ avatarUrl: '/avatar',
+ name: 'root',
+ username: 'root',
+ webUrl: 'root',
+ status: null,
+ },
+ {
+ id: '2',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ ],
+ },
+ },
+};
+
export const updateIssueAssigneesMutationResponse = {
data: {
issuableSetAssignees: {
@@ -545,6 +572,29 @@ export const searchResponseOnMR = {
},
};
+export const searchAutocompleteResponseOnMR = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: '1',
+ users: [
+ {
+ ...mockUser1,
+ mergeRequestInteraction: {
+ canMerge: true,
+ },
+ },
+ {
+ ...mockUser2,
+ mergeRequestInteraction: {
+ canMerge: false,
+ },
+ },
+ ],
+ },
+ },
+};
+
export const projectMembersResponse = {
data: {
workspace: {
@@ -585,6 +635,36 @@ export const projectMembersResponse = {
},
};
+export const projectAutocompleteMembersResponse = {
+ data: {
+ workspace: {
+ id: '1',
+ __typename: 'Project',
+ users: [
+ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ null,
+ null,
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ mockUser1,
+ mockUser1,
+ mockUser2,
+ {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/2',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: {
+ availability: 'BUSY',
+ },
+ },
+ ],
+ },
+ },
+};
+
export const groupMembersResponse = {
data: {
workspace: {
diff --git a/spec/frontend/silent_mode_settings/components/app_spec.js b/spec/frontend/silent_mode_settings/components/app_spec.js
new file mode 100644
index 00000000000..5997bfd1b5f
--- /dev/null
+++ b/spec/frontend/silent_mode_settings/components/app_spec.js
@@ -0,0 +1,133 @@
+import { GlToggle, GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import toast from '~/vue_shared/plugins/global_toast';
+import { updateApplicationSettings } from '~/rest_api';
+import SilentModeSettingsApp from '~/silent_mode_settings/components/app.vue';
+
+jest.mock('~/rest_api.js');
+jest.mock('~/alert');
+jest.mock('~/vue_shared/plugins/global_toast');
+
+const MOCK_DEFAULT_SILENT_MODE_ENABLED = false;
+
+describe('SilentModeSettingsApp', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ const defaultProps = {
+ isSilentModeEnabled: MOCK_DEFAULT_SILENT_MODE_ENABLED,
+ };
+
+ wrapper = shallowMount(SilentModeSettingsApp, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findGlToggle = () => wrapper.findComponent(GlToggle);
+ const findGlBadge = () => wrapper.findComponent(GlBadge);
+
+ describe('template', () => {
+ describe('experiment badge', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders properly', () => {
+ expect(findGlBadge().exists()).toBe(true);
+ });
+ });
+
+ describe('when silent mode is already enabled', () => {
+ beforeEach(() => {
+ createComponent({ isSilentModeEnabled: true });
+ });
+
+ it('renders the component with the GlToggle set to true', () => {
+ expect(findGlToggle().attributes('value')).toBe('true');
+ });
+ });
+
+ describe('when silent mode is no already enabled', () => {
+ beforeEach(() => {
+ createComponent({ isSilentModeEnabled: false });
+ });
+
+ it('renders the component with the GlToggle set to undefined', () => {
+ expect(findGlToggle().attributes('value')).toBeUndefined();
+ });
+ });
+ });
+
+ describe.each`
+ enabled | message
+ ${false} | ${'Silent mode disabled'}
+ ${true} | ${'Silent mode enabled'}
+ `(`toast message`, ({ enabled, message }) => {
+ beforeEach(() => {
+ updateApplicationSettings.mockImplementation(() => Promise.resolve());
+ createComponent();
+ });
+
+ it(`when successfully toggled to ${enabled}, toast message is ${message}`, async () => {
+ await findGlToggle().vm.$emit('change', enabled);
+ await waitForPromises();
+
+ expect(toast).toHaveBeenCalledWith(message);
+ });
+ });
+
+ describe.each`
+ description | mockApi | toastMsg | error
+ ${'onSuccess'} | ${() => Promise.resolve()} | ${'Silent mode enabled'} | ${false}
+ ${'onError'} | ${() => Promise.reject()} | ${false} | ${'There was an error updating the Silent Mode Settings.'}
+ `(`when submitting the form $description`, ({ mockApi, toastMsg, error }) => {
+ beforeEach(() => {
+ updateApplicationSettings.mockImplementation(mockApi);
+
+ createComponent();
+ });
+
+ it('calls updateApplicationSettings correctly', () => {
+ findGlToggle().vm.$emit('change', !MOCK_DEFAULT_SILENT_MODE_ENABLED);
+
+ expect(updateApplicationSettings).toHaveBeenCalledWith({
+ silent_mode_enabled: !MOCK_DEFAULT_SILENT_MODE_ENABLED,
+ });
+ });
+
+ it('handles the loading icon correctly', async () => {
+ expect(findGlToggle().props('isLoading')).toBe(false);
+
+ await findGlToggle().vm.$emit('change', !MOCK_DEFAULT_SILENT_MODE_ENABLED);
+
+ expect(findGlToggle().props('isLoading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findGlToggle().props('isLoading')).toBe(false);
+ });
+
+ it(`does ${toastMsg ? '' : 'not '}render an success toast message`, async () => {
+ await findGlToggle().vm.$emit('change', !MOCK_DEFAULT_SILENT_MODE_ENABLED);
+ await waitForPromises();
+
+ return toastMsg
+ ? expect(toast).toHaveBeenCalledWith(toastMsg)
+ : expect(toast).not.toHaveBeenCalled();
+ });
+
+ it(`does ${error ? '' : 'not '}render an error message`, async () => {
+ await findGlToggle().vm.$emit('change', !MOCK_DEFAULT_SILENT_MODE_ENABLED);
+ await waitForPromises();
+
+ return error
+ ? expect(createAlert).toHaveBeenCalledWith({ message: error })
+ : expect(createAlert).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 76e84fa183c..1c60c3af310 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -8,11 +8,10 @@ exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<blob-header-edit-stub
candelete="true"
data-qa-selector="file_name_field"
- id="blob_local_7_file_path"
+ id="reference-0"
showdelete="true"
value="foo/bar/test.md"
/>
-
<source-editor-stub
debouncevalue="250"
editoroptions="[object Object]"
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index e783927f87b..5ed3b520b70 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -5,16 +5,15 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
class="form-group js-description-input"
>
<label
- for="snippet-description"
+ for="reference-0"
>
Description (optional)
</label>
-
<div
class="js-collapsible-input"
>
<div
- class="js-collapsed d-none"
+ class="d-none js-collapsed"
>
<gl-form-input-stub
class="form-control"
@@ -22,9 +21,8 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
placeholder="Describe what your snippet does or how to use it…"
/>
</div>
-
<div
- class="js-vue-markdown-field md-area position-relative gfm-form gl-overflow-hidden js-expanded"
+ class="gfm-form gl-overflow-hidden js-expanded js-vue-markdown-field md-area position-relative"
data-uploads-path=""
>
<markdown-header-stub
@@ -36,23 +34,22 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
suggestionstartindex="0"
uploadspath=""
/>
-
<div
class="md-write-holder"
>
<div
- class="zen-backdrop div-dropzone-wrapper"
+ class="div-dropzone-wrapper zen-backdrop"
>
<div
class="div-dropzone js-invalid-dropzone"
>
<textarea
aria-label="Description"
- class="note-textarea js-gfm-input js-autosize markdown-area js-gfm-input-initialized"
+ class="js-autosize js-gfm-input js-gfm-input-initialized markdown-area note-textarea"
data-qa-selector="snippet_description_field"
data-supports-quick-actions="false"
dir="auto"
- id="snippet-description"
+ id="reference-0"
placeholder="Write a comment or drag your files here…"
style="overflow-x: hidden; word-wrap: break-word; overflow-y: hidden;"
/>
@@ -68,10 +65,9 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
</svg>
</div>
</div>
-
<a
aria-label="Leave zen mode"
- class="zen-control zen-control-leave js-zen-leave gl-text-gray-500"
+ class="gl-text-gray-500 js-zen-leave zen-control zen-control-leave"
href="#"
>
<gl-icon-stub
@@ -79,7 +75,6 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
size="16"
/>
</a>
-
<markdown-toolbar-stub
canattachfile="true"
markdowndocspath="help/"
@@ -87,19 +82,14 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
/>
</div>
</div>
-
<div
- class="js-vue-md-preview md-preview-holder gl-px-5"
+ class="gl-px-5 js-vue-md-preview md-preview-holder"
style="display: none;"
>
<div
class="md"
/>
</div>
-
- <!---->
-
- <!---->
</div>
</div>
</div>
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
index 9fb43815cbc..2b2335036f6 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_view_spec.js.snap
@@ -6,7 +6,7 @@ exports[`Snippet Description component matches the snapshot 1`] = `
data-qa-selector="snippet_description_content"
>
<div
- class="md js-snippet-description"
+ class="js-snippet-description md"
>
<h2>
The property of Thor
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
index ed54582ca29..3274f41e4af 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap
@@ -5,9 +5,7 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
class="form-group"
>
<label>
-
Visibility level
-
<gl-link-stub
href="/foo/bar"
target="_blank"
@@ -18,10 +16,9 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
/>
</gl-link-stub>
</label>
-
<gl-form-group-stub
class="gl-mb-0"
- id="visibility-level-setting"
+ id="reference-0"
labeldescription=""
optionaltext="(optional)"
>
@@ -39,15 +36,14 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
value="private"
>
<div
- class="d-flex align-items-center"
+ class="align-items-center d-flex"
>
<gl-icon-stub
name="lock"
size="16"
/>
-
<span
- class="font-weight-bold ml-1 js-visibility-option"
+ class="font-weight-bold js-visibility-option ml-1"
data-qa-selector="visibility_content"
data-qa-visibility="Private"
>
@@ -60,15 +56,14 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
value="internal"
>
<div
- class="d-flex align-items-center"
+ class="align-items-center d-flex"
>
<gl-icon-stub
name="shield"
size="16"
/>
-
<span
- class="font-weight-bold ml-1 js-visibility-option"
+ class="font-weight-bold js-visibility-option ml-1"
data-qa-selector="visibility_content"
data-qa-visibility="Internal"
>
@@ -81,15 +76,14 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
value="public"
>
<div
- class="d-flex align-items-center"
+ class="align-items-center d-flex"
>
<gl-icon-stub
name="earth"
size="16"
/>
-
<span
- class="font-weight-bold ml-1 js-visibility-option"
+ class="font-weight-bold js-visibility-option ml-1"
data-qa-selector="visibility_content"
data-qa-visibility="Public"
>
@@ -99,12 +93,9 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] =
</gl-form-radio-stub>
</gl-form-radio-group-stub>
</gl-form-group-stub>
-
<div
class="text-muted"
data-testid="restricted-levels-info"
- >
- <!---->
- </div>
+ />
</div>
`;
diff --git a/spec/frontend/snippets/components/embed_dropdown_spec.js b/spec/frontend/snippets/components/embed_dropdown_spec.js
index d8c6ad3278a..cb9b9800bfe 100644
--- a/spec/frontend/snippets/components/embed_dropdown_spec.js
+++ b/spec/frontend/snippets/components/embed_dropdown_spec.js
@@ -1,6 +1,6 @@
import { GlFormInputGroup } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import { escape as esc } from 'lodash';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { TEST_HOST } from 'helpers/test_constants';
import EmbedDropdown from '~/snippets/components/embed_dropdown.vue';
@@ -10,56 +10,24 @@ describe('snippets/components/embed_dropdown', () => {
let wrapper;
const createComponent = () => {
- wrapper = mount(EmbedDropdown, {
+ wrapper = shallowMountExtended(EmbedDropdown, {
propsData: {
url: TEST_URL,
},
});
};
- const findSectionsData = () => {
- const sections = [];
- let current = {};
-
- wrapper.findAll('[data-testid="header"],[data-testid="input"]').wrappers.forEach((x) => {
- const type = x.attributes('data-testid');
-
- if (type === 'header') {
- current = {
- header: x.text(),
- };
-
- sections.push(current);
- } else {
- const value = x.findComponent(GlFormInputGroup).props('value');
- const copyValue = x.find('button[title="Copy"]').attributes('data-clipboard-text');
-
- Object.assign(current, {
- value,
- copyValue,
- });
- }
- });
-
- return sections;
- };
+ const findEmbedSection = () => wrapper.findByTestId('section-Embed');
+ const findShareSection = () => wrapper.findByTestId('section-Share');
it('renders dropdown items', () => {
createComponent();
const embedValue = `<script src="${esc(TEST_URL)}.js"></script>`;
- expect(findSectionsData()).toEqual([
- {
- header: 'Embed',
- value: embedValue,
- copyValue: embedValue,
- },
- {
- header: 'Share',
- value: TEST_URL,
- copyValue: TEST_URL,
- },
- ]);
+ expect(findEmbedSection().text()).toBe('Embed');
+ expect(findShareSection().text()).toBe('Share');
+ expect(findEmbedSection().findComponent(GlFormInputGroup).attributes('value')).toBe(embedValue);
+ expect(findShareSection().findComponent(GlFormInputGroup).attributes('value')).toBe(TEST_URL);
});
});
diff --git a/spec/frontend/super_sidebar/components/context_header_spec.js b/spec/frontend/super_sidebar/components/context_header_spec.js
deleted file mode 100644
index 943b659c997..00000000000
--- a/spec/frontend/super_sidebar/components/context_header_spec.js
+++ /dev/null
@@ -1,50 +0,0 @@
-import { GlAvatar } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ContextHeader from '~/super_sidebar/components/context_header.vue';
-
-describe('ContextHeader component', () => {
- let wrapper;
-
- const context = {
- id: 1,
- title: 'Title',
- avatar: '/path/to/avatar.png',
- };
-
- const findGlAvatar = () => wrapper.getComponent(GlAvatar);
-
- const createWrapper = (props = {}) => {
- wrapper = shallowMountExtended(ContextHeader, {
- propsData: {
- context,
- expanded: false,
- ...props,
- },
- });
- };
-
- describe('with an avatar', () => {
- it('passes the correct props to GlAvatar', () => {
- createWrapper();
- const avatar = findGlAvatar();
-
- expect(avatar.props('shape')).toBe('rect');
- expect(avatar.props('entityName')).toBe(context.title);
- expect(avatar.props('entityId')).toBe(context.id);
- expect(avatar.props('src')).toBe(context.avatar);
- });
-
- it('renders the avatar with a custom shape', () => {
- const customShape = 'circle';
- createWrapper({
- context: {
- ...context,
- avatar_shape: customShape,
- },
- });
- const avatar = findGlAvatar();
-
- expect(avatar.props('shape')).toBe(customShape);
- });
- });
-});
diff --git a/spec/frontend/super_sidebar/components/context_switcher_spec.js b/spec/frontend/super_sidebar/components/context_switcher_spec.js
deleted file mode 100644
index dd8f39e7cb7..00000000000
--- a/spec/frontend/super_sidebar/components/context_switcher_spec.js
+++ /dev/null
@@ -1,302 +0,0 @@
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
-import { GlDisclosureDropdown, GlSearchBoxByType, GlLoadingIcon, GlAlert } from '@gitlab/ui';
-import * as Sentry from '@sentry/browser';
-import { s__ } from '~/locale';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ContextSwitcher from '~/super_sidebar/components/context_switcher.vue';
-import ContextSwitcherToggle from '~/super_sidebar/components/context_switcher_toggle.vue';
-import NavItem from '~/super_sidebar/components/nav_item.vue';
-import ProjectsList from '~/super_sidebar/components/projects_list.vue';
-import GroupsList from '~/super_sidebar/components/groups_list.vue';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import searchUserProjectsAndGroupsQuery from '~/super_sidebar/graphql/queries/search_user_groups_and_projects.query.graphql';
-import { trackContextAccess, formatContextSwitcherItems } from '~/super_sidebar/utils';
-import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
-import waitForPromises from 'helpers/wait_for_promises';
-import { stubComponent } from 'helpers/stub_component';
-import { contextSwitcherLinks, searchUserProjectsAndGroupsResponseMock } from '../mock_data';
-
-jest.mock('~/super_sidebar/utils', () => ({
- getStorageKeyFor: jest.requireActual('~/super_sidebar/utils').getStorageKeyFor,
- getTopFrequentItems: jest.requireActual('~/super_sidebar/utils').getTopFrequentItems,
- formatContextSwitcherItems: jest.requireActual('~/super_sidebar/utils')
- .formatContextSwitcherItems,
- trackContextAccess: jest.fn(),
-}));
-const focusInputMock = jest.fn();
-
-const username = 'root';
-const projectsPath = 'projectsPath';
-const groupsPath = 'groupsPath';
-const contextHeader = { avatar_shape: 'circle' };
-
-Vue.use(VueApollo);
-
-describe('ContextSwitcher component', () => {
- let wrapper;
- let mockApollo;
-
- const findDisclosureDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
- const findContextSwitcherToggle = () => wrapper.findComponent(ContextSwitcherToggle);
- const findNavItems = () => wrapper.findAllComponents(NavItem);
- const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
- const findProjectsList = () => wrapper.findComponent(ProjectsList);
- const findGroupsList = () => wrapper.findComponent(GroupsList);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findAlert = () => wrapper.findComponent(GlAlert);
-
- const triggerSearchQuery = async () => {
- findSearchBox().vm.$emit('input', 'foo');
- await nextTick();
- jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
- return waitForPromises();
- };
-
- const searchUserProjectsAndGroupsHandlerSuccess = jest
- .fn()
- .mockResolvedValue(searchUserProjectsAndGroupsResponseMock);
-
- const createWrapper = ({ props = {}, requestHandlers = {} } = {}) => {
- mockApollo = createMockApollo([
- [
- searchUserProjectsAndGroupsQuery,
- requestHandlers.searchUserProjectsAndGroupsQueryHandler ??
- searchUserProjectsAndGroupsHandlerSuccess,
- ],
- ]);
-
- wrapper = shallowMountExtended(ContextSwitcher, {
- apolloProvider: mockApollo,
- provide: {
- contextSwitcherLinks,
- },
- propsData: {
- username,
- projectsPath,
- groupsPath,
- contextHeader,
- ...props,
- },
- stubs: {
- GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
- template: `
- <div>
- <slot name="toggle" />
- <slot />
- </div>
- `,
- }),
- GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
- props: ['placeholder'],
- methods: { focusInput: focusInputMock },
- }),
- ProjectsList: stubComponent(ProjectsList, {
- props: ['username', 'viewAllLink', 'isSearch', 'searchResults'],
- }),
- GroupsList: stubComponent(GroupsList, {
- props: ['username', 'viewAllLink', 'isSearch', 'searchResults'],
- }),
- },
- });
- };
-
- describe('default', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it('renders the context switcher links', () => {
- const navItems = findNavItems();
- const firstNavItem = navItems.at(0);
-
- expect(navItems.length).toBe(contextSwitcherLinks.length);
- expect(firstNavItem.props('item')).toBe(contextSwitcherLinks[0]);
- expect(firstNavItem.props('linkClasses')).toEqual({
- [contextSwitcherLinks[0].link_classes]: contextSwitcherLinks[0].link_classes,
- });
- });
-
- it('passes the placeholder to the search box', () => {
- expect(findSearchBox().props('placeholder')).toBe(
- s__('Navigation|Search your projects or groups'),
- );
- });
-
- it('passes the correct props to the frequent projects list', () => {
- expect(findProjectsList().props()).toEqual({
- username,
- viewAllLink: projectsPath,
- isSearch: false,
- searchResults: [],
- });
- });
-
- it('passes the correct props to the frequent groups list', () => {
- expect(findGroupsList().props()).toEqual({
- username,
- viewAllLink: groupsPath,
- isSearch: false,
- searchResults: [],
- });
- });
-
- it('does not trigger the search query on mount', () => {
- expect(searchUserProjectsAndGroupsHandlerSuccess).not.toHaveBeenCalled();
- });
-
- it('shows a loading spinner when search query is typed in', async () => {
- findSearchBox().vm.$emit('input', 'foo');
- await nextTick();
-
- expect(findLoadingIcon().exists()).toBe(true);
- });
-
- it('passes the correct props to the toggle', () => {
- expect(findContextSwitcherToggle().props('context')).toEqual(contextHeader);
- expect(findContextSwitcherToggle().props('expanded')).toEqual(false);
- });
-
- it('does not emit the `toggle` event initially', () => {
- expect(wrapper.emitted('toggle')).toBe(undefined);
- });
- });
-
- describe('visibility changes', () => {
- beforeEach(() => {
- createWrapper();
- findDisclosureDropdown().vm.$emit('shown');
- });
-
- it('emits the `toggle` event, focuses the search input and puts the toggle in the expanded state when opened', () => {
- expect(wrapper.emitted('toggle')).toHaveLength(1);
- expect(wrapper.emitted('toggle')[0]).toEqual([true]);
- expect(focusInputMock).toHaveBeenCalledTimes(1);
- expect(findContextSwitcherToggle().props('expanded')).toBe(true);
- });
-
- it("emits the `toggle` event, does not attempt to focus the input, and resets the toggle's `expanded` props to `false` when closed", async () => {
- findDisclosureDropdown().vm.$emit('hidden');
- await nextTick();
-
- expect(wrapper.emitted('toggle')).toHaveLength(2);
- expect(wrapper.emitted('toggle')[1]).toEqual([false]);
- expect(focusInputMock).toHaveBeenCalledTimes(1);
- expect(findContextSwitcherToggle().props('expanded')).toBe(false);
- });
- });
-
- describe('item access tracking', () => {
- it('does not track anything if not within a trackable context', () => {
- createWrapper();
-
- expect(trackContextAccess).not.toHaveBeenCalled();
- });
-
- it('tracks item access if within a trackable context', () => {
- const currentContext = { namespace: 'groups' };
- createWrapper({
- props: {
- currentContext,
- },
- });
-
- expect(trackContextAccess).toHaveBeenCalledWith(username, currentContext);
- });
- });
-
- describe('on search', () => {
- beforeEach(() => {
- createWrapper();
- return triggerSearchQuery();
- });
-
- it('hides persistent links', () => {
- expect(findNavItems().length).toBe(0);
- });
-
- it('triggers the search query on search', () => {
- expect(searchUserProjectsAndGroupsHandlerSuccess).toHaveBeenCalled();
- });
-
- it('hides the loading spinner', () => {
- expect(findLoadingIcon().exists()).toBe(false);
- });
-
- it('passes the projects to the frequent projects list', () => {
- expect(findProjectsList().props('isSearch')).toBe(true);
- expect(findProjectsList().props('searchResults')).toEqual(
- formatContextSwitcherItems(searchUserProjectsAndGroupsResponseMock.data.projects.nodes),
- );
- });
-
- it('passes the groups to the frequent groups list', () => {
- expect(findGroupsList().props('isSearch')).toBe(true);
- expect(findGroupsList().props('searchResults')).toEqual(
- formatContextSwitcherItems(searchUserProjectsAndGroupsResponseMock.data.user.groups.nodes),
- );
- });
- });
-
- describe('when search query does not match any items', () => {
- beforeEach(() => {
- createWrapper({
- requestHandlers: {
- searchUserProjectsAndGroupsQueryHandler: jest.fn().mockResolvedValue({
- data: {
- projects: {
- nodes: [],
- },
- user: {
- id: '1',
- groups: {
- nodes: [],
- },
- },
- },
- }),
- },
- });
- return triggerSearchQuery();
- });
-
- it('passes empty results to the lists', () => {
- expect(findProjectsList().props('isSearch')).toBe(true);
- expect(findProjectsList().props('searchResults')).toEqual([]);
- expect(findGroupsList().props('isSearch')).toBe(true);
- expect(findGroupsList().props('searchResults')).toEqual([]);
- });
- });
-
- describe('when search query fails', () => {
- beforeEach(() => {
- jest.spyOn(Sentry, 'captureException');
- });
-
- it('captures exception and shows an alert if response is formatted incorrectly', async () => {
- createWrapper({
- requestHandlers: {
- searchUserProjectsAndGroupsQueryHandler: jest.fn().mockResolvedValue({
- data: {},
- }),
- },
- });
- await triggerSearchQuery();
-
- expect(Sentry.captureException).toHaveBeenCalled();
- expect(findAlert().exists()).toBe(true);
- });
-
- it('captures exception and shows an alert if query fails', async () => {
- createWrapper({
- requestHandlers: {
- searchUserProjectsAndGroupsQueryHandler: jest.fn().mockRejectedValue(),
- },
- });
- await triggerSearchQuery();
-
- expect(Sentry.captureException).toHaveBeenCalled();
- expect(findAlert().exists()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js b/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js
deleted file mode 100644
index c20d3c2745f..00000000000
--- a/spec/frontend/super_sidebar/components/context_switcher_toggle_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { GlIcon } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ContextSwitcherToggle from '~/super_sidebar/components/context_switcher_toggle.vue';
-
-describe('ContextSwitcherToggle component', () => {
- let wrapper;
-
- const context = {
- id: 1,
- title: 'Title',
- avatar: '/path/to/avatar.png',
- };
-
- const findGlIcon = () => wrapper.getComponent(GlIcon);
-
- const createWrapper = (props = {}) => {
- wrapper = shallowMountExtended(ContextSwitcherToggle, {
- propsData: {
- context,
- expanded: false,
- ...props,
- },
- });
- };
-
- it('renders "chevron-down" icon when not expanded', () => {
- createWrapper();
-
- expect(findGlIcon().props('name')).toBe('chevron-down');
- });
-
- it('renders "chevron-up" icon when expanded', () => {
- createWrapper({
- expanded: true,
- });
-
- expect(findGlIcon().props('name')).toBe('chevron-up');
- });
-});
diff --git a/spec/frontend/super_sidebar/components/create_menu_spec.js b/spec/frontend/super_sidebar/components/create_menu_spec.js
index 510a3f5b913..b967fb18a39 100644
--- a/spec/frontend/super_sidebar/components/create_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/create_menu_spec.js
@@ -1,7 +1,6 @@
import { nextTick } from 'vue';
import {
GlDisclosureDropdown,
- GlTooltip,
GlDisclosureDropdownGroup,
GlDisclosureDropdownItem,
} from '@gitlab/ui';
@@ -9,6 +8,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import { __ } from '~/locale';
import CreateMenu from '~/super_sidebar/components/create_menu.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { createNewMenuGroups } from '../mock_data';
describe('CreateMenu component', () => {
@@ -18,7 +18,6 @@ describe('CreateMenu component', () => {
const findGlDisclosureDropdownGroups = () => wrapper.findAllComponents(GlDisclosureDropdownGroup);
const findGlDisclosureDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
const findInviteMembersTrigger = () => wrapper.findComponent(InviteMembersTrigger);
- const findGlTooltip = () => wrapper.findComponent(GlTooltip);
const createWrapper = ({ provide = {} } = {}) => {
wrapper = shallowMountExtended(CreateMenu, {
@@ -33,6 +32,9 @@ describe('CreateMenu component', () => {
InviteMembersTrigger,
GlDisclosureDropdown,
},
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
});
};
@@ -45,7 +47,7 @@ describe('CreateMenu component', () => {
createWrapper();
expect(findGlDisclosureDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -147,
+ crossAxis: -179,
mainAxis: 4,
});
});
@@ -74,16 +76,12 @@ describe('CreateMenu component', () => {
expect(findInviteMembersTrigger().exists()).toBe(true);
});
- it("sets the toggle ID and tooltip's target", () => {
- expect(findGlDisclosureDropdown().props('toggleId')).toBe(wrapper.vm.$options.toggleId);
- expect(findGlTooltip().props('target')).toBe(`#${wrapper.vm.$options.toggleId}`);
- });
-
it('hides the tooltip when the dropdown is opened', async () => {
findGlDisclosureDropdown().vm.$emit('shown');
await nextTick();
- expect(findGlTooltip().exists()).toBe(false);
+ const tooltip = getBinding(findGlDisclosureDropdown().element, 'gl-tooltip');
+ expect(tooltip.value).toBe('');
});
it('shows the tooltip when the dropdown is closed', async () => {
@@ -91,7 +89,8 @@ describe('CreateMenu component', () => {
findGlDisclosureDropdown().vm.$emit('hidden');
await nextTick();
- expect(findGlTooltip().exists()).toBe(true);
+ const tooltip = getBinding(findGlDisclosureDropdown().element, 'gl-tooltip');
+ expect(tooltip.value).toBe('Create new...');
});
});
@@ -99,7 +98,7 @@ describe('CreateMenu component', () => {
createWrapper({ provide: { isImpersonating: true } });
expect(findGlDisclosureDropdown().props('dropdownOffset')).toEqual({
- crossAxis: -115,
+ crossAxis: -147,
mainAxis: 4,
});
});
diff --git a/spec/frontend/super_sidebar/components/flyout_menu_spec.js b/spec/frontend/super_sidebar/components/flyout_menu_spec.js
index b894d29c875..bf24de870d9 100644
--- a/spec/frontend/super_sidebar/components/flyout_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/flyout_menu_spec.js
@@ -1,16 +1,26 @@
-import { shallowMount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import FlyoutMenu from '~/super_sidebar/components/flyout_menu.vue';
jest.mock('@floating-ui/dom');
describe('FlyoutMenu', () => {
let wrapper;
+ let dummySection;
const createComponent = () => {
- wrapper = shallowMount(FlyoutMenu, {
+ dummySection = document.createElement('section');
+ dummySection.addEventListener = jest.fn();
+
+ dummySection.getBoundingClientRect = jest.fn();
+ dummySection.getBoundingClientRect.mockReturnValue({ top: 0, bottom: 5, width: 10 });
+
+ document.querySelector = jest.fn();
+ document.querySelector.mockReturnValue(dummySection);
+
+ wrapper = mountExtended(FlyoutMenu, {
propsData: {
targetId: 'section-1',
- items: [],
+ items: [{ id: 1, title: 'item 1', link: 'https://example.com' }],
},
});
};
diff --git a/spec/frontend/super_sidebar/components/frequent_items_list_spec.js b/spec/frontend/super_sidebar/components/frequent_items_list_spec.js
deleted file mode 100644
index 63dd941974a..00000000000
--- a/spec/frontend/super_sidebar/components/frequent_items_list_spec.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
-import { s__ } from '~/locale';
-import FrequentItemsList from '~/super_sidebar/components//frequent_items_list.vue';
-import ItemsList from '~/super_sidebar/components/items_list.vue';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { cachedFrequentProjects } from '../mock_data';
-
-const title = s__('Navigation|FREQUENT PROJECTS');
-const pristineText = s__('Navigation|Projects you visit often will appear here.');
-const storageKey = 'storageKey';
-const maxItems = 5;
-
-describe('FrequentItemsList component', () => {
- useLocalStorageSpy();
-
- let wrapper;
-
- const findListTitle = () => wrapper.findByTestId('list-title');
- const findItemsList = () => wrapper.findComponent(ItemsList);
- const findEmptyText = () => wrapper.findByTestId('empty-text');
- const findRemoveItemButton = () => wrapper.findByTestId('item-remove');
-
- const createWrapperFactory = (mountFn = shallowMountExtended) => () => {
- wrapper = mountFn(FrequentItemsList, {
- propsData: {
- title,
- pristineText,
- storageKey,
- maxItems,
- },
- });
- };
- const createWrapper = createWrapperFactory();
- const createFullWrapper = createWrapperFactory(mountExtended);
-
- describe('default', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it("renders the list's title", () => {
- expect(findListTitle().text()).toBe(title);
- });
-
- it('renders the empty text', () => {
- expect(findEmptyText().exists()).toBe(true);
- expect(findEmptyText().text()).toBe(pristineText);
- });
- });
-
- describe('when there are cached frequent items', () => {
- beforeEach(() => {
- window.localStorage.setItem(storageKey, cachedFrequentProjects);
- createWrapper();
- });
-
- it('attempts to retrieve the items from the local storage', () => {
- expect(window.localStorage.getItem).toHaveBeenCalledTimes(1);
- expect(window.localStorage.getItem).toHaveBeenCalledWith(storageKey);
- });
-
- it('renders the maximum amount of items', () => {
- expect(findItemsList().props('items').length).toBe(maxItems);
- });
-
- it('does not render the empty text slot', () => {
- expect(findEmptyText().exists()).toBe(false);
- });
- });
-
- describe('items editing', () => {
- beforeEach(() => {
- window.localStorage.setItem(storageKey, cachedFrequentProjects);
- createFullWrapper();
- });
-
- it('remove-item event emission from items-list causes list item to be removed', async () => {
- const localStorageProjects = findItemsList().props('items');
- await findRemoveItemButton().trigger('click');
-
- expect(findItemsList().props('items')).toHaveLength(maxItems - 1);
- expect(findItemsList().props('items')).not.toContain(localStorageProjects[0]);
- });
- });
-});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap b/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap
index d16d137db2f..e6635672ccf 100644
--- a/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap
@@ -2,7 +2,7 @@
exports[`SearchItem should render the item 1`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-avatar-stub
alt="avatar"
@@ -14,33 +14,25 @@ exports[`SearchItem should render the item 1`] = `
size="16"
src="https://www.gravatar.com/avatar/a9638f4ec70148d51e56bf05ad41e993?s=80&d=identicon"
/>
-
- <!---->
-
<span
class="gl-display-flex gl-flex-direction-column"
>
<span
class="gl-text-gray-900"
/>
-
- <!---->
</span>
</div>
`;
exports[`SearchItem should render the item 2`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
- <!---->
-
<gl-icon-stub
class="gl-mr-3"
name="users"
size="16"
/>
-
<span
class="gl-display-flex gl-flex-direction-column"
>
@@ -49,15 +41,13 @@ exports[`SearchItem should render the item 2`] = `
>
Manage &gt; Activity
</span>
-
- <!---->
</span>
</div>
`;
exports[`SearchItem should render the item 3`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-avatar-stub
alt="avatar"
@@ -69,9 +59,6 @@ exports[`SearchItem should render the item 3`] = `
size="32"
src="/project/avatar/1/avatar.png"
/>
-
- <!---->
-
<span
class="gl-display-flex gl-flex-direction-column"
>
@@ -80,7 +67,6 @@ exports[`SearchItem should render the item 3`] = `
>
MockProject1
</span>
-
<span
class="gl-font-sm gl-text-gray-500"
>
@@ -92,7 +78,7 @@ exports[`SearchItem should render the item 3`] = `
exports[`SearchItem should render the item 4`] = `
<div
- class="gl-display-flex gl-align-items-center"
+ class="gl-align-items-center gl-display-flex"
>
<gl-avatar-stub
alt="avatar"
@@ -104,9 +90,6 @@ exports[`SearchItem should render the item 4`] = `
size="16"
src=""
/>
-
- <!---->
-
<span
class="gl-display-flex gl-flex-direction-column"
>
@@ -115,8 +98,6 @@ exports[`SearchItem should render the item 4`] = `
>
Dismiss Cipher with no integrity
</span>
-
- <!---->
</span>
</div>
`;
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js
index 85eb7e2e241..7d85dbcbdd3 100644
--- a/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js
@@ -9,6 +9,7 @@ import {
PATH_GROUP_TITLE,
USER_HANDLE,
PATH_HANDLE,
+ PROJECT_HANDLE,
SEARCH_SCOPE,
MAX_ROWS,
} from '~/super_sidebar/components/global_search/command_palette/constants';
@@ -20,6 +21,7 @@ import {
import { getFormattedItem } from '~/super_sidebar/components/global_search/utils';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { COMMANDS, LINKS, USERS, FILES } from './mock_data';
@@ -32,7 +34,7 @@ describe('CommandPaletteItems', () => {
const projectFilesPath = 'project/files/path';
const projectBlobPath = '/blob/main';
- const createComponent = (props) => {
+ const createComponent = (props, options = {}) => {
wrapper = shallowMount(CommandPaletteItems, {
propsData: {
handle: COMMAND_HANDLE,
@@ -51,6 +53,7 @@ describe('CommandPaletteItems', () => {
projectFilesPath,
projectBlobPath,
},
+ ...options,
});
};
@@ -227,4 +230,41 @@ describe('CommandPaletteItems', () => {
expect(axios.get).toHaveBeenCalledTimes(1);
});
});
+
+ describe('Tracking', () => {
+ let trackingSpy;
+ let mockAxios;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, undefined, jest.spyOn);
+ mockAxios = new MockAdapter(axios);
+ createComponent({ attachTo: document.body });
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('tracks event immediately', () => {
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_command_palette', {
+ label: 'command',
+ });
+ });
+
+ it.each`
+ handle | label
+ ${USER_HANDLE} | ${'user'}
+ ${PROJECT_HANDLE} | ${'project'}
+ ${PATH_HANDLE} | ${'path'}
+ `('tracks changing the handle to "$handle"', async ({ handle, label }) => {
+ trackingSpy.mockClear();
+
+ await wrapper.setProps({ handle });
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_command_palette', {
+ label,
+ });
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js b/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js
index d01e5c85741..25a23433b1e 100644
--- a/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js
@@ -69,24 +69,41 @@ export const TRANSFORMED_LINKS = [
icon: 'users',
keywords: 'Manage',
text: 'Manage',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'item_without_id',
+ 'data-track-extra': '{"title":"Manage"}',
+ },
},
{
href: '/flightjs/Flight/activity',
icon: 'users',
keywords: 'Activity',
text: 'Manage > Activity',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'activity',
+ },
},
{
href: '/flightjs/Flight/-/project_members',
icon: 'users',
keywords: 'Members',
text: 'Manage > Members',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'members',
+ },
},
{
href: '/flightjs/Flight/-/labels',
icon: 'users',
keywords: 'Labels',
text: 'Manage > Labels',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'labels',
+ },
},
];
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js
index ebc52e2d910..76768bd8da9 100644
--- a/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js
@@ -26,6 +26,10 @@ describe('fileMapper', () => {
icon: 'doc-code',
text: file,
href: `${projectBlobPath}/${file}`,
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'file',
+ },
});
});
});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js
index c6126a348f5..f91c8034fe9 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_default_places_spec.js
@@ -67,10 +67,26 @@ describe('GlobalSearchDefaultPlaces', () => {
{
text: 'Explore',
href: '/explore',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-extra': '{"title":"Explore"}',
+ 'data-track-label': 'item_without_id',
+ 'data-track-property': 'nav_panel_unknown',
+ 'data-testid': 'places-item-link',
+ 'data-qa-places-item': 'Explore',
+ },
},
{
text: 'Admin area',
href: '/admin',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-extra': '{"title":"Admin area"}',
+ 'data-track-label': 'item_without_id',
+ 'data-track-property': 'nav_panel_unknown',
+ 'data-testid': 'places-item-link',
+ 'data-qa-places-item': 'Admin area',
+ },
},
]);
});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
index f9a6690a391..038c7a96adc 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
@@ -23,7 +23,6 @@ import {
ICON_SUBGROUP,
SCOPE_TOKEN_MAX_LENGTH,
} from '~/super_sidebar/components/global_search/constants';
-import { SEARCH_GITLAB } from '~/vue_shared/global_search/constants';
import { truncate } from '~/lib/utils/text_utility';
import { visitUrl } from '~/lib/utils/url_utility';
import { ENTER_KEY } from '~/lib/utils/keys';
@@ -52,7 +51,7 @@ describe('GlobalSearchModal', () => {
clearAutocomplete: jest.fn(),
};
- const deafaultMockState = {
+ const defaultMockState = {
searchContext: {
project: MOCK_PROJECT,
group: MOCK_GROUP,
@@ -66,15 +65,14 @@ describe('GlobalSearchModal', () => {
};
const createComponent = ({
- initialState = deafaultMockState,
+ initialState = defaultMockState,
mockGetters = defaultMockGetters,
stubs,
- glFeatures = { commandPalette: false },
...mountOptions
} = {}) => {
const store = new Vuex.Store({
state: {
- ...deafaultMockState,
+ ...defaultMockState,
...initialState,
},
actions: actionSpies,
@@ -89,7 +87,6 @@ describe('GlobalSearchModal', () => {
wrapper = shallowMountExtended(GlobalSearchModal, {
store,
stubs,
- provide: { glFeatures },
...mountOptions,
});
};
@@ -271,49 +268,28 @@ describe('GlobalSearchModal', () => {
});
describe('Command palette', () => {
- describe('when FF `command_palette` is disabled', () => {
+ describe.each([...COMMON_HANDLES, PATH_HANDLE])('when search handle is %s', (handle) => {
beforeEach(() => {
- createComponent();
+ createComponent({
+ initialState: { search: handle },
+ });
});
- it('should not render command mode components', () => {
- expect(findCommandPaletteItems().exists()).toBe(false);
- expect(findFakeSearchInput().exists()).toBe(false);
+ it('should render command mode components', () => {
+ expect(findCommandPaletteItems().exists()).toBe(true);
+ expect(findFakeSearchInput().exists()).toBe(true);
});
- it('should provide default placeholder to the search input', () => {
- expect(findGlobalSearchInput().attributes('placeholder')).toBe(SEARCH_GITLAB);
+ it('should provide an alternative placeholder to the search input', () => {
+ expect(findGlobalSearchInput().attributes('placeholder')).toBe(
+ SEARCH_OR_COMMAND_MODE_PLACEHOLDER,
+ );
});
- });
-
- describe.each([...COMMON_HANDLES, PATH_HANDLE])(
- 'when FF `command_palette` is enabled and search handle is %s',
- (handle) => {
- beforeEach(() => {
- createComponent({
- initialState: { search: handle },
- glFeatures: {
- commandPalette: true,
- },
- });
- });
- it('should render command mode components', () => {
- expect(findCommandPaletteItems().exists()).toBe(true);
- expect(findFakeSearchInput().exists()).toBe(true);
- });
-
- it('should provide an alternative placeholder to the search input', () => {
- expect(findGlobalSearchInput().attributes('placeholder')).toBe(
- SEARCH_OR_COMMAND_MODE_PLACEHOLDER,
- );
- });
-
- it('should not render the scope token', () => {
- expect(findScopeToken().exists()).toBe(false);
- });
- },
- );
+ it('should not render the scope token', () => {
+ expect(findScopeToken().exists()).toBe(false);
+ });
+ });
});
});
@@ -373,9 +349,6 @@ describe('GlobalSearchModal', () => {
beforeEach(() => {
createComponent({
initialState: { search: '>' },
- glFeatures: {
- commandPalette: true,
- },
});
submitSearch();
});
diff --git a/spec/frontend/super_sidebar/components/global_search/mock_data.js b/spec/frontend/super_sidebar/components/global_search/mock_data.js
index dfa8b458844..61ddfb6cae1 100644
--- a/spec/frontend/super_sidebar/components/global_search/mock_data.js
+++ b/spec/frontend/super_sidebar/components/global_search/mock_data.js
@@ -109,6 +109,10 @@ export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
scopeCategory: PROJECTS_CATEGORY,
icon: ICON_PROJECT,
href: MOCK_PROJECT.path,
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'scoped_in_project',
+ },
},
{
text: 'scoped-in-group',
@@ -116,11 +120,19 @@ export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
scopeCategory: GROUPS_CATEGORY,
icon: ICON_GROUP,
href: MOCK_GROUP.path,
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'scoped_in_group',
+ },
},
{
text: 'scoped-in-all',
description: MSG_IN_ALL_GITLAB,
href: MOCK_ALL_PATH,
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'scoped_in_all',
+ },
},
];
export const MOCK_SCOPED_SEARCH_OPTIONS = [
@@ -263,6 +275,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
avatar_size: 32,
entity_id: 1,
entity_name: 'MockGroup1',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'groups',
+ },
},
],
},
@@ -281,6 +297,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
avatar_size: 32,
entity_id: 1,
entity_name: 'MockProject1',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'projects',
+ },
},
{
category: 'Projects',
@@ -294,6 +314,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
avatar_size: 32,
entity_id: 2,
entity_name: 'MockProject2',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'projects',
+ },
},
],
},
@@ -307,6 +331,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
href: 'help/gitlab',
avatar_size: 16,
entity_name: 'GitLab Help',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'help',
+ },
},
],
},
@@ -325,6 +353,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
avatar_size: 32,
entity_id: 1,
entity_name: 'MockGroup1',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'groups',
+ },
},
{
avatar_size: 32,
@@ -338,6 +370,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
namespace: 'Gitlab Org / MockProject1',
text: 'MockProject1',
value: 'MockProject1',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'projects',
+ },
},
{
avatar_size: 32,
@@ -351,6 +387,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
namespace: 'Gitlab Org / MockProject2',
text: 'MockProject2',
value: 'MockProject2',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'projects',
+ },
},
{
avatar_size: 16,
@@ -359,6 +399,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
label: 'GitLab Help',
text: 'GitLab Help',
href: 'help/gitlab',
+ extraAttrs: {
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': 'help',
+ },
},
];
diff --git a/spec/frontend/super_sidebar/components/global_search/utils_spec.js b/spec/frontend/super_sidebar/components/global_search/utils_spec.js
index 3b12063e733..3c30445e936 100644
--- a/spec/frontend/super_sidebar/components/global_search/utils_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/utils_spec.js
@@ -13,48 +13,58 @@ import {
describe('getFormattedItem', () => {
describe.each`
- item | avatarSize | searchContext | entityId | entityName
- ${{ category: PROJECTS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 29 } }} | ${29} | ${'project1'}
- ${{ category: GROUPS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 12 } }} | ${12} | ${'project1'}
- ${{ category: 'Help', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'}
- ${{ category: 'Settings', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'}
- ${{ category: GROUPS_CATEGORY, value: 'group1', label: 'Group 1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${1} | ${'group1'}
- ${{ category: PROJECTS_CATEGORY, value: 'group2', label: 'Group2' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${2} | ${'group2'}
- ${{ category: ISSUES_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${3} | ${'test3'}
- ${{ category: MERGE_REQUEST_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${4} | ${'test4'}
- ${{ category: RECENT_EPICS_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${5} | ${'test5'}
- ${{ category: GROUPS_CATEGORY, group_id: 6, group_name: 'test6' }} | ${LARGE_AVATAR_PX} | ${null} | ${6} | ${'test6'}
- ${{ category: PROJECTS_CATEGORY, project_id: 7, project_name: 'test7' }} | ${LARGE_AVATAR_PX} | ${null} | ${7} | ${'test7'}
- ${{ category: ISSUES_CATEGORY, project_id: 8, project_name: 'test8' }} | ${SMALL_AVATAR_PX} | ${null} | ${8} | ${'test8'}
- ${{ category: MERGE_REQUEST_CATEGORY, project_id: 9, project_name: 'test9' }} | ${SMALL_AVATAR_PX} | ${null} | ${9} | ${'test9'}
- ${{ category: RECENT_EPICS_CATEGORY, group_id: 10, group_name: 'test10' }} | ${SMALL_AVATAR_PX} | ${null} | ${10} | ${'test10'}
- ${{ category: GROUPS_CATEGORY, group_id: 11, group_name: 'test11' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${11} | ${'test11'}
- ${{ category: PROJECTS_CATEGORY, project_id: 12, project_name: 'test12' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${12} | ${'test12'}
- ${{ category: ISSUES_CATEGORY, project_id: 13, project_name: 'test13' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${13} | ${'test13'}
- ${{ category: MERGE_REQUEST_CATEGORY, project_id: 14, project_name: 'test14' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${14} | ${'test14'}
- ${{ category: RECENT_EPICS_CATEGORY, group_id: 15, group_name: 'test15' }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${15} | ${'test15'}
- `('formats the item', ({ item, avatarSize, searchContext, entityId, entityName }) => {
- describe(`when item is ${JSON.stringify(item)}`, () => {
- let formattedItem;
- beforeEach(() => {
- formattedItem = getFormattedItem(item, searchContext);
- });
+ item | avatarSize | searchContext | entityId | entityName | trackingLabel
+ ${{ category: PROJECTS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 29 } }} | ${29} | ${'project1'} | ${'projects'}
+ ${{ category: GROUPS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 12 } }} | ${12} | ${'project1'} | ${'groups'}
+ ${{ category: 'Help', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'} | ${'help'}
+ ${{ category: 'Settings', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'} | ${'settings'}
+ ${{ category: GROUPS_CATEGORY, value: 'group1', label: 'Group 1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${1} | ${'group1'} | ${'groups'}
+ ${{ category: PROJECTS_CATEGORY, value: 'group2', label: 'Group2' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${2} | ${'group2'} | ${'projects'}
+ ${{ category: ISSUES_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${3} | ${'test3'} | ${'recent_issues'}
+ ${{ category: MERGE_REQUEST_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${4} | ${'test4'} | ${'recent_merge_requests'}
+ ${{ category: RECENT_EPICS_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${5} | ${'test5'} | ${'recent_epics'}
+ ${{ category: GROUPS_CATEGORY, group_id: 6, group_name: 'test6' }} | ${LARGE_AVATAR_PX} | ${null} | ${6} | ${'test6'} | ${'groups'}
+ ${{ category: PROJECTS_CATEGORY, project_id: 7, project_name: 'test7' }} | ${LARGE_AVATAR_PX} | ${null} | ${7} | ${'test7'} | ${'projects'}
+ ${{ category: ISSUES_CATEGORY, project_id: 8, project_name: 'test8' }} | ${SMALL_AVATAR_PX} | ${null} | ${8} | ${'test8'} | ${'recent_issues'}
+ ${{ category: MERGE_REQUEST_CATEGORY, project_id: 9, project_name: 'test9' }} | ${SMALL_AVATAR_PX} | ${null} | ${9} | ${'test9'} | ${'recent_merge_requests'}
+ ${{ category: RECENT_EPICS_CATEGORY, group_id: 10, group_name: 'test10' }} | ${SMALL_AVATAR_PX} | ${null} | ${10} | ${'test10'} | ${'recent_epics'}
+ ${{ category: GROUPS_CATEGORY, group_id: 11, group_name: 'test11' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${11} | ${'test11'} | ${'groups'}
+ ${{ category: PROJECTS_CATEGORY, project_id: 12, project_name: 'test12' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${12} | ${'test12'} | ${'projects'}
+ ${{ category: ISSUES_CATEGORY, project_id: 13, project_name: 'test13' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${13} | ${'test13'} | ${'recent_issues'}
+ ${{ category: MERGE_REQUEST_CATEGORY, project_id: 14, project_name: 'test14' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${14} | ${'test14'} | ${'recent_merge_requests'}
+ ${{ category: RECENT_EPICS_CATEGORY, group_id: 15, group_name: 'test15' }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${15} | ${'test15'} | ${'recent_epics'}
+ `(
+ 'formats the item',
+ ({ item, avatarSize, searchContext, entityId, entityName, trackingLabel }) => {
+ describe(`when item is ${JSON.stringify(item)}`, () => {
+ let formattedItem;
+ beforeEach(() => {
+ formattedItem = getFormattedItem(item, searchContext);
+ });
- it(`should set text to ${item.value || item.label}`, () => {
- expect(formattedItem.text).toBe(item.value || item.label);
- });
+ it(`should set text to ${item.value || item.label}`, () => {
+ expect(formattedItem.text).toBe(item.value || item.label);
+ });
- it(`should set avatarSize to ${avatarSize}`, () => {
- expect(formattedItem.avatar_size).toBe(avatarSize);
- });
+ it(`should set avatarSize to ${avatarSize}`, () => {
+ expect(formattedItem.avatar_size).toBe(avatarSize);
+ });
- it(`should set avatar entityId to ${entityId}`, () => {
- expect(formattedItem.entity_id).toBe(entityId);
- });
+ it(`should set avatar entityId to ${entityId}`, () => {
+ expect(formattedItem.entity_id).toBe(entityId);
+ });
+
+ it(`should set avatar entityName to ${entityName}`, () => {
+ expect(formattedItem.entity_name).toBe(entityName);
+ });
- it(`should set avatar entityName to ${entityName}`, () => {
- expect(formattedItem.entity_name).toBe(entityName);
+ it('should add tracking label', () => {
+ expect(formattedItem.extraAttrs).toEqual({
+ 'data-track-action': 'click_command_palette_item',
+ 'data-track-label': trackingLabel,
+ });
+ });
});
- });
- });
+ },
+ );
});
diff --git a/spec/frontend/super_sidebar/components/groups_list_spec.js b/spec/frontend/super_sidebar/components/groups_list_spec.js
deleted file mode 100644
index 4fa3303c12f..00000000000
--- a/spec/frontend/super_sidebar/components/groups_list_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { s__ } from '~/locale';
-import GroupsList from '~/super_sidebar/components/groups_list.vue';
-import SearchResults from '~/super_sidebar/components/search_results.vue';
-import FrequentItemsList from '~/super_sidebar/components/frequent_items_list.vue';
-import NavItem from '~/super_sidebar/components/nav_item.vue';
-import { MAX_FREQUENT_GROUPS_COUNT } from '~/super_sidebar/constants';
-
-const username = 'root';
-const viewAllLink = '/path/to/groups';
-const storageKey = `${username}/frequent-groups`;
-
-describe('GroupsList component', () => {
- let wrapper;
-
- const findSearchResults = () => wrapper.findComponent(SearchResults);
- const findFrequentItemsList = () => wrapper.findComponent(FrequentItemsList);
- const findViewAllLink = () => wrapper.findComponent(NavItem);
-
- const itRendersViewAllItem = () => {
- it('renders the "View all..." item', () => {
- const link = findViewAllLink();
-
- expect(link.props('item')).toEqual({
- icon: 'group',
- link: viewAllLink,
- title: s__('Navigation|View all your groups'),
- });
- expect(link.props('linkClasses')).toEqual({ 'dashboard-shortcuts-groups': true });
- });
- };
-
- const createWrapper = (props = {}) => {
- wrapper = shallowMountExtended(GroupsList, {
- propsData: {
- username,
- viewAllLink,
- ...props,
- },
- });
- };
-
- describe('when displaying search results', () => {
- const searchResults = ['A search result'];
-
- beforeEach(() => {
- createWrapper({
- isSearch: true,
- searchResults,
- });
- });
-
- it('renders the search results component', () => {
- expect(findSearchResults().exists()).toBe(true);
- expect(findFrequentItemsList().exists()).toBe(false);
- });
-
- it('passes the correct props to the search results component', () => {
- expect(findSearchResults().props()).toEqual({
- title: s__('Navigation|Groups'),
- noResultsText: s__('Navigation|No group matches found'),
- searchResults,
- });
- });
-
- itRendersViewAllItem();
- });
-
- describe('when displaying frequent groups', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it('renders the frequent items list', () => {
- expect(findFrequentItemsList().exists()).toBe(true);
- expect(findSearchResults().exists()).toBe(false);
- });
-
- it('passes the correct props to the frequent items list', () => {
- expect(findFrequentItemsList().props()).toEqual({
- title: s__('Navigation|Frequently visited groups'),
- storageKey,
- maxItems: MAX_FREQUENT_GROUPS_COUNT,
- pristineText: s__('Navigation|Groups you visit often will appear here.'),
- });
- });
-
- itRendersViewAllItem();
- });
-});
diff --git a/spec/frontend/super_sidebar/components/items_list_spec.js b/spec/frontend/super_sidebar/components/items_list_spec.js
deleted file mode 100644
index 8e00984f500..00000000000
--- a/spec/frontend/super_sidebar/components/items_list_spec.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ItemsList from '~/super_sidebar/components/items_list.vue';
-import NavItem from '~/super_sidebar/components/nav_item.vue';
-import { cachedFrequentProjects } from '../mock_data';
-
-const mockItems = JSON.parse(cachedFrequentProjects);
-const [firstMockedProject] = mockItems;
-
-describe('ItemsList component', () => {
- let wrapper;
-
- const findNavItems = () => wrapper.findAllComponents(NavItem);
-
- const createWrapper = ({ props = {}, slots = {} } = {}) => {
- wrapper = shallowMountExtended(ItemsList, {
- propsData: {
- ...props,
- },
- slots,
- });
- };
-
- it('does not render nav items when there are no items', () => {
- createWrapper();
-
- expect(findNavItems().length).toBe(0);
- });
-
- it('renders one nav item per item', () => {
- createWrapper({
- props: {
- items: mockItems,
- },
- });
-
- expect(findNavItems().length).not.toBe(0);
- expect(findNavItems().length).toBe(mockItems.length);
- });
-
- it('passes the correct props to the nav items', () => {
- createWrapper({
- props: {
- items: mockItems,
- },
- });
- const firstNavItem = findNavItems().at(0);
-
- expect(firstNavItem.props('item')).toEqual(firstMockedProject);
- });
-
- it('renders the `view-all-items` slot', () => {
- const testId = 'view-all-items';
- createWrapper({
- slots: {
- 'view-all-items': {
- template: `<div data-testid="${testId}" />`,
- },
- },
- });
-
- expect(wrapper.findByTestId(testId).exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/super_sidebar/components/menu_section_spec.js b/spec/frontend/super_sidebar/components/menu_section_spec.js
index 288e317d4c6..e76bb699301 100644
--- a/spec/frontend/super_sidebar/components/menu_section_spec.js
+++ b/spec/frontend/super_sidebar/components/menu_section_spec.js
@@ -79,39 +79,55 @@ describe('MenuSection component', () => {
});
describe('when hasFlyout is true', () => {
- it('is rendered', () => {
+ it('is not yet rendered', () => {
createWrapper({ title: 'Asdf' }, { 'has-flyout': true });
- expect(findFlyout().exists()).toBe(true);
+ expect(findFlyout().exists()).toBe(false);
});
describe('on mouse hover', () => {
describe('when section is expanded', () => {
- it('is not shown', async () => {
+ it('is not rendered', async () => {
createWrapper({ title: 'Asdf' }, { 'has-flyout': true, expanded: true });
await findButton().trigger('pointerover', { pointerType: 'mouse' });
- expect(findFlyout().isVisible()).toBe(false);
+ expect(findFlyout().exists()).toBe(false);
});
});
describe('when section is not expanded', () => {
- it('is shown', async () => {
- createWrapper({ title: 'Asdf' }, { 'has-flyout': true, expanded: false });
- await findButton().trigger('pointerover', { pointerType: 'mouse' });
- expect(findFlyout().isVisible()).toBe(true);
+ describe('when section has no items', () => {
+ it('is not rendered', async () => {
+ createWrapper({ title: 'Asdf' }, { 'has-flyout': true, expanded: false });
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+ expect(findFlyout().exists()).toBe(false);
+ });
+ });
+
+ describe('when section has items', () => {
+ it('is rendered and shown', async () => {
+ createWrapper(
+ { title: 'Asdf', items: [{ title: 'Item1', href: '/item1' }] },
+ { 'has-flyout': true, expanded: false },
+ );
+ await findButton().trigger('pointerover', { pointerType: 'mouse' });
+ expect(findFlyout().isVisible()).toBe(true);
+ });
});
});
});
describe('when section gets closed', () => {
beforeEach(async () => {
- createWrapper({ title: 'Asdf' }, { expanded: true, 'has-flyout': true });
+ createWrapper(
+ { title: 'Asdf', items: [{ title: 'Item1', href: '/item1' }] },
+ { expanded: true, 'has-flyout': true },
+ );
await findButton().trigger('click');
await findButton().trigger('pointerover', { pointerType: 'mouse' });
});
it('shows the flyout only after section title gets hovered out and in again', async () => {
expect(findCollapse().props('visible')).toBe(false);
- expect(findFlyout().isVisible()).toBe(false);
+ expect(findFlyout().exists()).toBe(false);
await findButton().trigger('pointerleave');
await findButton().trigger('pointerover', { pointerType: 'mouse' });
diff --git a/spec/frontend/super_sidebar/components/nav_item_spec.js b/spec/frontend/super_sidebar/components/nav_item_spec.js
index f41f6954ed1..89d774c4b43 100644
--- a/spec/frontend/super_sidebar/components/nav_item_spec.js
+++ b/spec/frontend/super_sidebar/components/nav_item_spec.js
@@ -1,5 +1,6 @@
-import { GlBadge } from '@gitlab/ui';
+import { GlBadge, GlButton, GlAvatar } from '@gitlab/ui';
import { RouterLinkStub } from '@vue/test-utils';
+import { nextTick } from 'vue';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import NavItem from '~/super_sidebar/components/nav_item.vue';
import NavItemRouterLink from '~/super_sidebar/components/nav_item_router_link.vue';
@@ -13,8 +14,10 @@ import {
describe('NavItem component', () => {
let wrapper;
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
const findLink = () => wrapper.findByTestId('nav-item-link');
const findPill = () => wrapper.findComponent(GlBadge);
+ const findPinButton = () => wrapper.findComponent(GlButton);
const findNavItemRouterLink = () => extendedWrapper(wrapper.findComponent(NavItemRouterLink));
const findNavItemLink = () => extendedWrapper(wrapper.findComponent(NavItemLink));
@@ -59,6 +62,66 @@ describe('NavItem component', () => {
);
});
+ describe('pins', () => {
+ describe('when pins are not supported', () => {
+ it('does not render pin button', () => {
+ createWrapper({
+ item: { title: 'Foo' },
+ provide: {
+ panelSupportsPins: false,
+ },
+ });
+
+ expect(findPinButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when pins are supported', () => {
+ beforeEach(() => {
+ createWrapper({
+ item: { title: 'Foo' },
+ provide: {
+ panelSupportsPins: true,
+ },
+ });
+ });
+
+ it('renders pin button', () => {
+ expect(findPinButton().exists()).toBe(true);
+ });
+
+ it('contains an aria-label', () => {
+ expect(findPinButton().attributes('aria-label')).toBe('Pin Foo');
+ });
+
+ it('toggles pointer events on after CSS fade-in', async () => {
+ const pinButton = findPinButton();
+
+ expect(pinButton.classes()).toContain('gl-pointer-events-none');
+
+ wrapper.trigger('mouseenter');
+ pinButton.vm.$emit('transitionend');
+ await nextTick();
+
+ expect(pinButton.classes()).not.toContain('gl-pointer-events-none');
+ });
+
+ it('does not toggle pointer events if mouse leaves before CSS fade-in ends', async () => {
+ const pinButton = findPinButton();
+
+ expect(pinButton.classes()).toContain('gl-pointer-events-none');
+
+ wrapper.trigger('mouseenter');
+ wrapper.trigger('mousemove');
+ wrapper.trigger('mouseleave');
+ pinButton.vm.$emit('transitionend');
+ await nextTick();
+
+ expect(pinButton.classes()).toContain('gl-pointer-events-none');
+ });
+ });
+ });
+
it('applies custom link classes', () => {
const customClass = 'customClass';
createWrapper({
@@ -153,4 +216,36 @@ describe('NavItem component', () => {
});
});
});
+
+ describe('when `item` prop has `entity_id` attribute', () => {
+ it('renders an avatar', () => {
+ createWrapper({
+ item: { title: 'Foo', entity_id: 123, avatar: '/avatar.png', avatar_shape: 'circle' },
+ });
+
+ expect(findAvatar().props()).toMatchObject({
+ entityId: 123,
+ shape: 'circle',
+ src: '/avatar.png',
+ });
+ });
+ });
+
+ describe('when `item.is_active` is true', () => {
+ it('scrolls into view', () => {
+ createWrapper({
+ item: { is_active: true },
+ });
+ expect(wrapper.element.scrollIntoView).toHaveBeenNthCalledWith(1, false);
+ });
+ });
+
+ describe('when `item.is_active` is false', () => {
+ it('scrolls not into view', () => {
+ createWrapper({
+ item: { is_active: false },
+ });
+ expect(wrapper.element.scrollIntoView).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/pinned_section_spec.js b/spec/frontend/super_sidebar/components/pinned_section_spec.js
index 00cc7cf29c9..fe1653f1177 100644
--- a/spec/frontend/super_sidebar/components/pinned_section_spec.js
+++ b/spec/frontend/super_sidebar/components/pinned_section_spec.js
@@ -87,4 +87,33 @@ describe('PinnedSection component', () => {
});
});
});
+
+ describe('ambiguous settings names', () => {
+ it('get renamed to be unambiguous', () => {
+ createWrapper({
+ items: [
+ { title: 'CI/CD', id: 'ci_cd' },
+ { title: 'Merge requests', id: 'merge_request_settings' },
+ { title: 'Monitor', id: 'monitor' },
+ { title: 'Repository', id: 'repository' },
+ { title: 'Repository', id: 'code' },
+ { title: 'Something else', id: 'not_a_setting' },
+ ],
+ });
+
+ expect(
+ wrapper
+ .findComponent(MenuSection)
+ .props('item')
+ .items.map((i) => i.title),
+ ).toEqual([
+ 'CI/CD settings',
+ 'Merge requests settings',
+ 'Monitor settings',
+ 'Repository settings',
+ 'Repository',
+ 'Something else',
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/projects_list_spec.js b/spec/frontend/super_sidebar/components/projects_list_spec.js
deleted file mode 100644
index 93a414e1e8c..00000000000
--- a/spec/frontend/super_sidebar/components/projects_list_spec.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { s__ } from '~/locale';
-import ProjectsList from '~/super_sidebar/components/projects_list.vue';
-import SearchResults from '~/super_sidebar/components/search_results.vue';
-import FrequentItemsList from '~/super_sidebar/components/frequent_items_list.vue';
-import NavItem from '~/super_sidebar/components/nav_item.vue';
-import { MAX_FREQUENT_PROJECTS_COUNT } from '~/super_sidebar/constants';
-
-const username = 'root';
-const viewAllLink = '/path/to/projects';
-const storageKey = `${username}/frequent-projects`;
-
-describe('ProjectsList component', () => {
- let wrapper;
-
- const findSearchResults = () => wrapper.findComponent(SearchResults);
- const findFrequentItemsList = () => wrapper.findComponent(FrequentItemsList);
- const findViewAllLink = () => wrapper.findComponent(NavItem);
-
- const itRendersViewAllItem = () => {
- it('renders the "View all..." item', () => {
- const link = findViewAllLink();
-
- expect(link.props('item')).toEqual({
- icon: 'project',
- link: viewAllLink,
- title: s__('Navigation|View all your projects'),
- });
- expect(link.props('linkClasses')).toEqual({ 'dashboard-shortcuts-projects': true });
- });
- };
-
- const createWrapper = (props = {}) => {
- wrapper = shallowMountExtended(ProjectsList, {
- propsData: {
- username,
- viewAllLink,
- ...props,
- },
- });
- };
-
- describe('when displaying search results', () => {
- const searchResults = ['A search result'];
-
- beforeEach(() => {
- createWrapper({
- isSearch: true,
- searchResults,
- });
- });
-
- it('renders the search results component', () => {
- expect(findSearchResults().exists()).toBe(true);
- expect(findFrequentItemsList().exists()).toBe(false);
- });
-
- it('passes the correct props to the search results component', () => {
- expect(findSearchResults().props()).toEqual({
- title: s__('Navigation|Projects'),
- noResultsText: s__('Navigation|No project matches found'),
- searchResults,
- });
- });
-
- itRendersViewAllItem();
- });
-
- describe('when displaying frequent projects', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it('passes the correct props to the frequent items list', () => {
- expect(findFrequentItemsList().props()).toEqual({
- title: s__('Navigation|Frequently visited projects'),
- storageKey,
- maxItems: MAX_FREQUENT_PROJECTS_COUNT,
- pristineText: s__('Navigation|Projects you visit often will appear here.'),
- });
- });
-
- itRendersViewAllItem();
- });
-});
diff --git a/spec/frontend/super_sidebar/components/search_results_spec.js b/spec/frontend/super_sidebar/components/search_results_spec.js
deleted file mode 100644
index daec5c2a9b4..00000000000
--- a/spec/frontend/super_sidebar/components/search_results_spec.js
+++ /dev/null
@@ -1,69 +0,0 @@
-import { GlCollapse } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { s__ } from '~/locale';
-import SearchResults from '~/super_sidebar/components/search_results.vue';
-import ItemsList from '~/super_sidebar/components/items_list.vue';
-import { stubComponent } from 'helpers/stub_component';
-
-const title = s__('Navigation|PROJECTS');
-const noResultsText = s__('Navigation|No project matches found');
-
-describe('SearchResults component', () => {
- let wrapper;
-
- const findSearchResultsToggle = () => wrapper.findByTestId('search-results-toggle');
- const findCollapsibleSection = () => wrapper.findComponent(GlCollapse);
- const findItemsList = () => wrapper.findComponent(ItemsList);
- const findEmptyText = () => wrapper.findByTestId('empty-text');
-
- const createWrapper = ({ props = {} } = {}) => {
- wrapper = shallowMountExtended(SearchResults, {
- propsData: {
- title,
- noResultsText,
- ...props,
- },
- stubs: {
- GlCollapse: stubComponent(GlCollapse, {
- props: ['visible'],
- }),
- },
- });
- };
-
- describe('default state', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it("renders the list's title", () => {
- expect(findSearchResultsToggle().text()).toBe(title);
- });
-
- it('is expanded', () => {
- expect(findCollapsibleSection().props('visible')).toBe(true);
- });
-
- it('renders the empty text', () => {
- expect(findEmptyText().exists()).toBe(true);
- expect(findEmptyText().text()).toBe(noResultsText);
- });
- });
-
- describe('when displaying search results', () => {
- it('shows search results', () => {
- const searchResults = [{ id: 1 }];
- createWrapper({ props: { isSearch: true, searchResults } });
-
- expect(findItemsList().props('items')[0]).toEqual(searchResults[0]);
- });
-
- it('shows the no results text if search results are empty', () => {
- const searchResults = [];
- createWrapper({ props: { isSearch: true, searchResults } });
-
- expect(findItemsList().props('items').length).toEqual(0);
- expect(findEmptyText().text()).toBe(noResultsText);
- });
- });
-});
diff --git a/spec/frontend/super_sidebar/components/sidebar_hover_peek_behavior_spec.js b/spec/frontend/super_sidebar/components/sidebar_hover_peek_behavior_spec.js
new file mode 100644
index 00000000000..75b834ee7c9
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/sidebar_hover_peek_behavior_spec.js
@@ -0,0 +1,213 @@
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import {
+ SUPER_SIDEBAR_PEEK_OPEN_DELAY,
+ SUPER_SIDEBAR_PEEK_CLOSE_DELAY,
+ JS_TOGGLE_EXPAND_CLASS,
+ SUPER_SIDEBAR_PEEK_STATE_CLOSED as STATE_CLOSED,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_OPEN as STATE_WILL_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_OPEN as STATE_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_CLOSE as STATE_WILL_CLOSE,
+} from '~/super_sidebar/constants';
+import SidebarHoverPeek from '~/super_sidebar/components/sidebar_hover_peek_behavior.vue';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { moveMouse, mouseEnter, mouseLeave, moveMouseOutOfDocument } from '../mocks';
+
+// This is measured at runtime in the browser, but statically defined here
+// since Jest does not do layout/styling.
+const X_SIDEBAR_EDGE = 10;
+
+jest.mock('~/lib/utils/css_utils', () => ({
+ getCssClassDimensions: () => ({ width: X_SIDEBAR_EDGE }),
+}));
+
+describe('SidebarHoverPeek component', () => {
+ let wrapper;
+ let toggle;
+ let trackingSpy = null;
+
+ const createComponent = (props = { isMouseOverSidebar: false }) => {
+ wrapper = mount(SidebarHoverPeek, {
+ propsData: props,
+ });
+
+ return nextTick();
+ };
+
+ const lastNChangeEvents = (n = 1) => wrapper.emitted('change').slice(-n).flat();
+
+ beforeEach(() => {
+ toggle = document.createElement('button');
+ toggle.classList.add(JS_TOGGLE_EXPAND_CLASS);
+ document.body.appendChild(toggle);
+ trackingSpy = mockTracking(undefined, undefined, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ // We destroy the wrapper ourselves as that needs to happen before the toggle is removed.
+ // eslint-disable-next-line @gitlab/vtu-no-explicit-wrapper-destroy
+ wrapper.destroy();
+ toggle?.remove();
+ });
+
+ it('begins in the closed state', async () => {
+ await createComponent();
+
+ expect(lastNChangeEvents(Infinity)).toEqual([STATE_CLOSED]);
+ });
+
+ describe('when mouse enters the toggle', () => {
+ beforeEach(async () => {
+ await createComponent();
+ mouseEnter(toggle);
+ });
+
+ it('does not emit duplicate events in a region', () => {
+ mouseEnter(toggle);
+
+ expect(lastNChangeEvents(Infinity)).toEqual([STATE_CLOSED, STATE_WILL_OPEN]);
+ });
+
+ it('transitions to will-open when hovering the toggle', () => {
+ expect(lastNChangeEvents(1)).toEqual([STATE_WILL_OPEN]);
+ });
+
+ describe('when transitioning away from the will-open state', () => {
+ beforeEach(() => {
+ jest.advanceTimersByTime(SUPER_SIDEBAR_PEEK_OPEN_DELAY - 1);
+ });
+
+ it('transitions to open after delay', () => {
+ expect(lastNChangeEvents(1)).toEqual([STATE_WILL_OPEN]);
+
+ jest.advanceTimersByTime(1);
+
+ expect(lastNChangeEvents(2)).toEqual([STATE_WILL_OPEN, STATE_OPEN]);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'nav_hover_peek', {
+ label: 'nav_sidebar_toggle',
+ property: 'nav_sidebar',
+ });
+ });
+
+ it('cancels transition to open if mouse out of toggle', () => {
+ mouseLeave(toggle);
+ jest.runOnlyPendingTimers();
+
+ expect(lastNChangeEvents(3)).toEqual([STATE_WILL_OPEN, STATE_WILL_CLOSE, STATE_CLOSED]);
+ });
+
+ it('transitions to closed if cursor leaves document', () => {
+ moveMouseOutOfDocument();
+
+ expect(lastNChangeEvents(2)).toEqual([STATE_WILL_OPEN, STATE_CLOSED]);
+ });
+ });
+
+ describe('when transitioning away from the will-close state', () => {
+ beforeEach(() => {
+ jest.runOnlyPendingTimers();
+ moveMouse(X_SIDEBAR_EDGE);
+ jest.advanceTimersByTime(SUPER_SIDEBAR_PEEK_CLOSE_DELAY - 1);
+ });
+
+ it('transitions to closed after delay', () => {
+ expect(lastNChangeEvents(1)).toEqual([STATE_WILL_CLOSE]);
+
+ jest.advanceTimersByTime(1);
+
+ expect(lastNChangeEvents(2)).toEqual([STATE_WILL_CLOSE, STATE_CLOSED]);
+ });
+
+ it('cancels transition to close if mouse moves back to toggle', () => {
+ expect(lastNChangeEvents(1)).toEqual([STATE_WILL_CLOSE]);
+
+ mouseEnter(toggle);
+ jest.runOnlyPendingTimers();
+
+ expect(lastNChangeEvents(4)).toEqual([
+ STATE_OPEN,
+ STATE_WILL_CLOSE,
+ STATE_WILL_OPEN,
+ STATE_OPEN,
+ ]);
+ });
+ });
+
+ describe('when transitioning away from the open state', () => {
+ beforeEach(() => {
+ jest.runOnlyPendingTimers();
+ });
+
+ it('transitions to will-close if mouse out of sidebar region', () => {
+ expect(lastNChangeEvents(1)).toEqual([STATE_OPEN]);
+
+ moveMouse(X_SIDEBAR_EDGE);
+
+ expect(lastNChangeEvents(2)).toEqual([STATE_OPEN, STATE_WILL_CLOSE]);
+ });
+
+ it('transitions to will-close if cursor leaves document', () => {
+ moveMouseOutOfDocument();
+
+ expect(lastNChangeEvents(2)).toEqual([STATE_OPEN, STATE_WILL_CLOSE]);
+ });
+ });
+
+ it('cleans up its mouseleave listener before destroy', () => {
+ jest.runOnlyPendingTimers();
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_OPEN]);
+
+ wrapper.destroy();
+ mouseLeave(toggle);
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_OPEN]);
+ });
+
+ it('cleans up its timers before destroy', () => {
+ wrapper.destroy();
+ jest.runOnlyPendingTimers();
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_WILL_OPEN]);
+ });
+
+ it('cleans up document mouseleave listener before destroy', () => {
+ mouseEnter(toggle);
+
+ wrapper.destroy();
+
+ moveMouseOutOfDocument();
+
+ expect(lastNChangeEvents(1)).not.toEqual([STATE_CLOSED]);
+ });
+ });
+
+ describe('when mouse is over sidebar child element', () => {
+ beforeEach(async () => {
+ await createComponent({ isMouseOverSidebar: true });
+ });
+
+ it('does not transition to will-close or closed when mouse is over sidebar child element', () => {
+ mouseEnter(toggle);
+ jest.runOnlyPendingTimers();
+ mouseLeave(toggle);
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_OPEN]);
+ });
+ });
+
+ it('cleans up its mouseenter listener before destroy', async () => {
+ await createComponent();
+
+ mouseLeave(toggle);
+ jest.runOnlyPendingTimers();
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_CLOSED]);
+
+ wrapper.destroy();
+ mouseEnter(toggle);
+
+ expect(lastNChangeEvents(1)).toEqual([STATE_CLOSED]);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
index 5d9a35fbf70..c85a6609e6f 100644
--- a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
@@ -16,13 +16,8 @@ const menuItems = [
describe('Sidebar Menu', () => {
let wrapper;
- let flyoutFlag = false;
-
const createWrapper = (extraProps = {}) => {
wrapper = shallowMountExtended(SidebarMenu, {
- provide: {
- glFeatures: { superSidebarFlyoutMenus: flyoutFlag },
- },
propsData: {
items: sidebarData.current_menu_items,
isLoggedIn: sidebarData.is_logged_in,
@@ -125,8 +120,11 @@ describe('Sidebar Menu', () => {
});
describe('flyout menus', () => {
- describe('when feature is disabled', () => {
+ describe('when screen width is smaller than "md" breakpoint', () => {
beforeEach(() => {
+ jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
+ return 767;
+ });
createWrapper({
items: menuItems,
});
@@ -140,59 +138,27 @@ describe('Sidebar Menu', () => {
});
});
- describe('when feature is enabled', () => {
+ describe('when screen width is equal or larger than "md" breakpoint', () => {
beforeEach(() => {
- flyoutFlag = true;
- });
-
- describe('when screen width is smaller than "md" breakpoint', () => {
- beforeEach(() => {
- jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
- return 767;
- });
- createWrapper({
- items: menuItems,
- });
+ jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
+ return 768;
});
-
- it('does not add flyout menus to sections', () => {
- expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
- false,
- false,
- ]);
+ createWrapper({
+ items: menuItems,
});
});
- describe('when screen width is equal or larger than "md" breakpoint', () => {
- beforeEach(() => {
- jest.spyOn(GlBreakpointInstance, 'windowWidth').mockImplementation(() => {
- return 768;
- });
- createWrapper({
- items: menuItems,
- });
- });
-
- it('adds flyout menus to sections', () => {
- expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
- true,
- true,
- ]);
- });
+ it('adds flyout menus to sections', () => {
+ expect(findNonStaticSectionItems().wrappers.map((w) => w.props('hasFlyout'))).toEqual([
+ true,
+ true,
+ ]);
});
});
});
});
describe('Separators', () => {
- it('should add the separator above pinned section', () => {
- createWrapper({
- items: menuItems,
- panelType: 'project',
- });
- expect(findPinnedSection().props('separated')).toBe(true);
- });
-
it('should add the separator above main menu items when there is a pinned section', () => {
createWrapper({
items: menuItems,
@@ -209,11 +175,4 @@ describe('Sidebar Menu', () => {
expect(findMainMenuSeparator().exists()).toBe(false);
});
});
-
- describe('ARIA attributes', () => {
- it('adds aria-label attribute to nav element', () => {
- createWrapper();
- expect(wrapper.find('nav').attributes('aria-label')).toBe('Main navigation');
- });
- });
});
diff --git a/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js b/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
index 94ef072a951..90a950c5f35 100644
--- a/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
+++ b/spec/frontend/super_sidebar/components/sidebar_peek_behavior_spec.js
@@ -2,14 +2,14 @@ import { mount } from '@vue/test-utils';
import {
SUPER_SIDEBAR_PEEK_OPEN_DELAY,
SUPER_SIDEBAR_PEEK_CLOSE_DELAY,
+ SUPER_SIDEBAR_PEEK_STATE_CLOSED as STATE_CLOSED,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_OPEN as STATE_WILL_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_OPEN as STATE_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_CLOSE as STATE_WILL_CLOSE,
} from '~/super_sidebar/constants';
-import SidebarPeek, {
- STATE_CLOSED,
- STATE_WILL_OPEN,
- STATE_OPEN,
- STATE_WILL_CLOSE,
-} from '~/super_sidebar/components/sidebar_peek_behavior.vue';
+import SidebarPeek from '~/super_sidebar/components/sidebar_peek_behavior.vue';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { moveMouse, moveMouseOutOfDocument } from '../mocks';
// These are measured at runtime in the browser, but statically defined here
// since Jest does not do layout/styling.
@@ -41,19 +41,6 @@ describe('SidebarPeek component', () => {
});
};
- const moveMouse = (clientX) => {
- const event = new MouseEvent('mousemove', {
- clientX,
- });
-
- document.dispatchEvent(event);
- };
-
- const moveMouseOutOfDocument = () => {
- const event = new MouseEvent('mouseleave');
- document.documentElement.dispatchEvent(event);
- };
-
const lastNChangeEvents = (n = 1) => wrapper.emitted('change').slice(-n).flat();
beforeEach(() => {
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
index 7b7b8a7be13..1371f8f00a7 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
@@ -4,29 +4,32 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SuperSidebar from '~/super_sidebar/components/super_sidebar.vue';
import HelpCenter from '~/super_sidebar/components/help_center.vue';
import UserBar from '~/super_sidebar/components/user_bar.vue';
-import SidebarPeekBehavior, {
- STATE_CLOSED,
- STATE_WILL_OPEN,
- STATE_OPEN,
- STATE_WILL_CLOSE,
-} from '~/super_sidebar/components/sidebar_peek_behavior.vue';
+import SidebarPeekBehavior from '~/super_sidebar/components/sidebar_peek_behavior.vue';
+import SidebarHoverPeekBehavior from '~/super_sidebar/components/sidebar_hover_peek_behavior.vue';
import SidebarPortalTarget from '~/super_sidebar/components/sidebar_portal_target.vue';
-import ContextHeader from '~/super_sidebar/components/context_header.vue';
-import ContextSwitcher from '~/super_sidebar/components/context_switcher.vue';
import SidebarMenu from '~/super_sidebar/components/sidebar_menu.vue';
-import { sidebarState } from '~/super_sidebar/constants';
+import {
+ sidebarState,
+ SUPER_SIDEBAR_PEEK_STATE_CLOSED as STATE_CLOSED,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_OPEN as STATE_WILL_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_OPEN as STATE_OPEN,
+ SUPER_SIDEBAR_PEEK_STATE_WILL_CLOSE as STATE_WILL_CLOSE,
+} from '~/super_sidebar/constants';
import {
toggleSuperSidebarCollapsed,
isCollapsed,
} from '~/super_sidebar/super_sidebar_collapsed_state_manager';
-import { stubComponent } from 'helpers/stub_component';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { trackContextAccess } from '~/super_sidebar/utils';
import { sidebarData as mockSidebarData, loggedOutSidebarData } from '../mock_data';
const initialSidebarState = { ...sidebarState };
jest.mock('~/super_sidebar/super_sidebar_collapsed_state_manager');
-const closeContextSwitcherMock = jest.fn();
+jest.mock('~/super_sidebar/utils', () => ({
+ ...jest.requireActual('~/super_sidebar/utils'),
+ trackContextAccess: jest.fn(),
+}));
const trialStatusWidgetStubTestId = 'trial-status-widget';
const TrialStatusWidgetStub = { template: `<div data-testid="${trialStatusWidgetStubTestId}" />` };
@@ -36,6 +39,7 @@ const TrialStatusPopoverStub = {
};
const peekClass = 'super-sidebar-peek';
+const hasPeekedClass = 'super-sidebar-has-peeked';
const peekHintClass = 'super-sidebar-peek-hint';
describe('SuperSidebar component', () => {
@@ -43,12 +47,11 @@ describe('SuperSidebar component', () => {
const findSidebar = () => wrapper.findByTestId('super-sidebar');
const findUserBar = () => wrapper.findComponent(UserBar);
- const findContextHeader = () => wrapper.findComponent(ContextHeader);
- const findContextSwitcher = () => wrapper.findComponent(ContextSwitcher);
const findNavContainer = () => wrapper.findByTestId('nav-container');
const findHelpCenter = () => wrapper.findComponent(HelpCenter);
const findSidebarPortalTarget = () => wrapper.findComponent(SidebarPortalTarget);
const findPeekBehavior = () => wrapper.findComponent(SidebarPeekBehavior);
+ const findHoverPeekBehavior = () => wrapper.findComponent(SidebarHoverPeekBehavior);
const findTrialStatusWidget = () => wrapper.findByTestId(trialStatusWidgetStubTestId);
const findTrialStatusPopover = () => wrapper.findByTestId(trialStatusPopoverStubTestId);
const findSidebarMenu = () => wrapper.findComponent(SidebarMenu);
@@ -70,9 +73,6 @@ describe('SuperSidebar component', () => {
sidebarData,
},
stubs: {
- ContextSwitcher: stubComponent(ContextSwitcher, {
- methods: { close: closeContextSwitcherMock },
- }),
TrialStatusWidget: TrialStatusWidgetStub,
TrialStatusPopover: TrialStatusPopoverStub,
},
@@ -128,12 +128,6 @@ describe('SuperSidebar component', () => {
expect(findSidebarPortalTarget().exists()).toBe(true);
});
- it("does not call the context switcher's close method initially", () => {
- createWrapper();
-
- expect(closeContextSwitcherMock).not.toHaveBeenCalled();
- });
-
it('renders hidden shortcut links', () => {
createWrapper();
const [linkAttrs] = mockSidebarData.shortcut_links;
@@ -181,21 +175,43 @@ describe('SuperSidebar component', () => {
expect(findTrialStatusPopover().exists()).toBe(false);
});
- it('does not have peek behavior', () => {
+ it('does not have peek behaviors', () => {
createWrapper();
expect(findPeekBehavior().exists()).toBe(false);
+ expect(findHoverPeekBehavior().exists()).toBe(false);
});
- });
- describe('on collapse', () => {
- beforeEach(() => {
+ it('renders the context header', () => {
createWrapper();
- sidebarState.isCollapsed = true;
+
+ expect(wrapper.text()).toContain('Your work');
});
- it('closes the context switcher', () => {
- expect(closeContextSwitcherMock).toHaveBeenCalled();
+ describe('item access tracking', () => {
+ it('does not track anything if logged out', () => {
+ createWrapper({ sidebarData: loggedOutSidebarData });
+
+ expect(trackContextAccess).not.toHaveBeenCalled();
+ });
+
+ it('does not track anything if logged in and not within a trackable context', () => {
+ createWrapper();
+
+ expect(trackContextAccess).not.toHaveBeenCalled();
+ });
+
+ it('tracks item access if logged in within a trackable context', () => {
+ const currentContext = { namespace: 'groups' };
+ createWrapper({
+ sidebarData: {
+ ...mockSidebarData,
+ current_context: currentContext,
+ },
+ });
+
+ expect(trackContextAccess).toHaveBeenCalledWith('root', currentContext, '/-/track_visits');
+ });
});
});
@@ -205,6 +221,7 @@ describe('SuperSidebar component', () => {
expect(findSidebar().attributes('inert')).toBe('inert');
expect(findSidebar().classes()).not.toContain(peekHintClass);
+ expect(findSidebar().classes()).not.toContain(hasPeekedClass);
expect(findSidebar().classes()).not.toContain(peekClass);
});
@@ -216,6 +233,7 @@ describe('SuperSidebar component', () => {
expect(findSidebar().attributes('inert')).toBe('inert');
expect(findSidebar().classes()).toContain(peekHintClass);
+ expect(findSidebar().classes()).toContain(hasPeekedClass);
expect(findSidebar().classes()).not.toContain(peekClass);
});
@@ -230,9 +248,23 @@ describe('SuperSidebar component', () => {
expect(findSidebar().attributes('inert')).toBe(undefined);
expect(findSidebar().classes()).toContain(peekClass);
expect(findSidebar().classes()).not.toContain(peekHintClass);
+ expect(findHoverPeekBehavior().exists()).toBe(false);
},
);
+ it(`makes sidebar interactive and visible when hover peek state is ${STATE_OPEN}`, async () => {
+ createWrapper({ sidebarState: { isCollapsed: true, isPeekable: true } });
+
+ findHoverPeekBehavior().vm.$emit('change', STATE_OPEN);
+ await nextTick();
+
+ expect(findSidebar().attributes('inert')).toBe(undefined);
+ expect(findSidebar().classes()).toContain(peekClass);
+ expect(findSidebar().classes()).toContain(hasPeekedClass);
+ expect(findSidebar().classes()).not.toContain(peekHintClass);
+ expect(findPeekBehavior().exists()).toBe(false);
+ });
+
it('keeps track of if sidebar has mouseover or not', async () => {
createWrapper({ sidebarState: { isCollapsed: false, isPeekable: true } });
expect(findPeekBehavior().props('isMouseOverSidebar')).toBe(false);
@@ -248,16 +280,9 @@ describe('SuperSidebar component', () => {
createWrapper();
});
- it('allows overflow while the context switcher is closed', () => {
+ it('allows overflow', () => {
expect(findNavContainer().classes()).toContain('gl-overflow-auto');
});
-
- it('hides overflow when context switcher is opened', async () => {
- findContextSwitcher().vm.$emit('toggle', true);
- await nextTick();
-
- expect(findNavContainer().classes()).not.toContain('gl-overflow-auto');
- });
});
describe('when a trial is active', () => {
@@ -271,14 +296,10 @@ describe('SuperSidebar component', () => {
});
});
- describe('Logged out', () => {
- beforeEach(() => {
- createWrapper({ sidebarData: loggedOutSidebarData });
- });
-
- it('renders context header instead of context switcher', () => {
- expect(findContextHeader().exists()).toBe(true);
- expect(findContextSwitcher().exists()).toBe(false);
+ describe('ARIA attributes', () => {
+ it('adds aria-label attribute to nav element', () => {
+ createWrapper();
+ expect(wrapper.find('nav').attributes('aria-label')).toBe('Primary');
});
});
});
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
index 23b735c2773..1f2e5602d10 100644
--- a/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
+++ b/spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js
@@ -1,6 +1,5 @@
import { nextTick } from 'vue';
import { GlButton } from '@gitlab/ui';
-import { __ } from '~/locale';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -46,31 +45,29 @@ describe('SuperSidebarToggle component', () => {
expect(findButton().attributes('aria-expanded')).toBe('true');
});
- it('has aria-expanded as false when collapsed', () => {
- createWrapper({ sidebarState: { isCollapsed: true } });
- expect(findButton().attributes('aria-expanded')).toBe('false');
- });
+ it.each(['isCollapsed', 'isPeek', 'isHoverPeek'])(
+ 'has aria-expanded as false when %s is `true`',
+ (stateProp) => {
+ createWrapper({ sidebarState: { [stateProp]: true } });
+ expect(findButton().attributes('aria-expanded')).toBe('false');
+ },
+ );
it('has aria-label attribute', () => {
createWrapper();
- expect(findButton().attributes('aria-label')).toBe(__('Navigation sidebar'));
- });
-
- it('is disabled when isPeek is true', () => {
- createWrapper({ sidebarState: { isPeek: true } });
- expect(findButton().attributes('disabled')).toBeDefined();
+ expect(findButton().attributes('aria-label')).toBe('Primary navigation sidebar');
});
});
describe('tooltip', () => {
it('displays collapse when expanded', () => {
createWrapper();
- expect(getTooltip().title).toBe(__('Hide sidebar'));
+ expect(getTooltip().title).toBe('Hide sidebar');
});
it('displays expand when collapsed', () => {
createWrapper({ sidebarState: { isCollapsed: true } });
- expect(getTooltip().title).toBe(__('Show sidebar'));
+ expect(getTooltip().title).toBe('Keep sidebar visible');
});
});
diff --git a/spec/frontend/super_sidebar/components/user_bar_spec.js b/spec/frontend/super_sidebar/components/user_bar_spec.js
index c6dd8441094..b58b65f09f5 100644
--- a/spec/frontend/super_sidebar/components/user_bar_spec.js
+++ b/spec/frontend/super_sidebar/components/user_bar_spec.js
@@ -65,8 +65,20 @@ describe('UserBar component', () => {
createWrapper();
});
- it('passes the "Create new..." menu groups to the create-menu component', () => {
- expect(findCreateMenu().props('groups')).toBe(mockSidebarData.create_new_menu_groups);
+ describe('"Create new..." menu', () => {
+ describe('when there are no menu items for it', () => {
+ // This scenario usually happens for an "External" user.
+ it('does not render it', () => {
+ createWrapper({ sidebarData: { ...mockSidebarData, create_new_menu_groups: [] } });
+ expect(findCreateMenu().exists()).toBe(false);
+ });
+ });
+
+ describe('when there are menu items for it', () => {
+ it('passes the "Create new..." menu groups to the create-menu component', () => {
+ expect(findCreateMenu().props('groups')).toBe(mockSidebarData.create_new_menu_groups);
+ });
+ });
});
it('passes the "Merge request" menu groups to the merge_request_menu component', () => {
@@ -165,7 +177,7 @@ describe('UserBar component', () => {
it('search button should have tooltip', () => {
const tooltip = getBinding(findSearchButton().element, 'gl-tooltip');
- expect(tooltip.value).toBe(`Search GitLab <kbd>/</kbd>`);
+ expect(tooltip.value).toBe(`Type <kbd>/</kbd> to search`);
});
it('should render search modal', () => {
@@ -184,7 +196,7 @@ describe('UserBar component', () => {
findSearchModal().vm.$emit('hidden');
await nextTick();
const tooltip = getBinding(findSearchButton().element, 'gl-tooltip');
- expect(tooltip.value).toBe(`Search GitLab <kbd>/</kbd>`);
+ expect(tooltip.value).toBe(`Type <kbd>/</kbd> to search`);
});
});
});
diff --git a/spec/frontend/super_sidebar/components/user_menu_spec.js b/spec/frontend/super_sidebar/components/user_menu_spec.js
index 662677be40f..bcc3383bcd4 100644
--- a/spec/frontend/super_sidebar/components/user_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_spec.js
@@ -468,27 +468,6 @@ describe('UserMenu component', () => {
});
});
- describe('Feedback item', () => {
- let item;
-
- beforeEach(() => {
- createWrapper();
- item = wrapper.findByTestId('feedback-item');
- });
-
- it('should render feedback item with a link to a new GitLab issue', () => {
- expect(item.find('a').attributes('href')).toBe(UserMenu.feedbackUrl);
- });
-
- it('has Snowplow tracking attributes', () => {
- expect(item.find('a').attributes()).toMatchObject({
- 'data-track-property': 'nav_user_menu',
- 'data-track-action': 'click_link',
- 'data-track-label': 'provide_nav_feedback',
- });
- });
- });
-
describe('Sign out group', () => {
const findSignOutGroup = () => wrapper.findByTestId('sign-out-group');
diff --git a/spec/frontend/super_sidebar/mock_data.js b/spec/frontend/super_sidebar/mock_data.js
index 6fb9715824f..d464ce372ed 100644
--- a/spec/frontend/super_sidebar/mock_data.js
+++ b/spec/frontend/super_sidebar/mock_data.js
@@ -79,10 +79,8 @@ export const contextSwitcherLinks = [
export const sidebarData = {
is_logged_in: true,
current_menu_items: [],
- current_context_header: {
- title: 'Your Work',
- icon: 'work',
- },
+ current_context: {},
+ current_context_header: 'Your work',
name: 'Administrator',
username: 'root',
avatar_url: 'path/to/img_administrator',
@@ -124,15 +122,14 @@ export const sidebarData = {
css_class: 'shortcut-link-class',
},
],
+ track_visits_path: '/-/track_visits',
};
export const loggedOutSidebarData = {
is_logged_in: false,
current_menu_items: [],
- current_context_header: {
- title: 'Your Work',
- icon: 'work',
- },
+ current_context: {},
+ current_context_header: 'Your work',
support_path: '/support',
display_whats_new: true,
whats_new_most_recent_release_items_count: 5,
@@ -285,36 +282,3 @@ export const cachedFrequentGroups = JSON.stringify([
frequency: 3,
},
]);
-
-export const searchUserProjectsAndGroupsResponseMock = {
- data: {
- projects: {
- nodes: [
- {
- id: 'gid://gitlab/Project/2',
- name: 'Gitlab Shell',
- namespace: 'Gitlab Org / Gitlab Shell',
- webUrl: 'http://gdk.test:3000/gitlab-org/gitlab-shell',
- avatarUrl: null,
- __typename: 'Project',
- },
- ],
- },
-
- user: {
- id: 'gid://gitlab/User/1',
- groups: {
- nodes: [
- {
- id: 'gid://gitlab/Group/60',
- name: 'GitLab Instance',
- namespace: 'gitlab-instance-2e4abb29',
- webUrl: 'http://gdk.test:3000/groups/gitlab-instance-2e4abb29',
- avatarUrl: null,
- __typename: 'Group',
- },
- ],
- },
- },
- },
-};
diff --git a/spec/frontend/super_sidebar/mocks.js b/spec/frontend/super_sidebar/mocks.js
new file mode 100644
index 00000000000..d13e5f1f361
--- /dev/null
+++ b/spec/frontend/super_sidebar/mocks.js
@@ -0,0 +1,24 @@
+export const moveMouse = (clientX) => {
+ const event = new MouseEvent('mousemove', {
+ clientX,
+ });
+
+ document.dispatchEvent(event);
+};
+
+export const mouseEnter = (el) => {
+ const event = new MouseEvent('mouseenter');
+
+ el.dispatchEvent(event);
+};
+
+export const mouseLeave = (el) => {
+ const event = new MouseEvent('mouseleave');
+
+ el.dispatchEvent(event);
+};
+
+export const moveMouseOutOfDocument = () => {
+ const event = new MouseEvent('mouseleave');
+ document.documentElement.dispatchEvent(event);
+};
diff --git a/spec/frontend/super_sidebar/utils_spec.js b/spec/frontend/super_sidebar/utils_spec.js
index 536599e6c12..85f45de06ba 100644
--- a/spec/frontend/super_sidebar/utils_spec.js
+++ b/spec/frontend/super_sidebar/utils_spec.js
@@ -1,17 +1,20 @@
import * as Sentry from '@sentry/browser';
+import MockAdapter from 'axios-mock-adapter';
import {
getTopFrequentItems,
trackContextAccess,
- formatContextSwitcherItems,
getItemsFromLocalStorage,
removeItemFromLocalStorage,
ariaCurrent,
} from '~/super_sidebar/utils';
+import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
import { FREQUENT_ITEMS, FIFTEEN_MINUTES_IN_MS } from '~/frequent_items/constants';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import waitForPromises from 'helpers/wait_for_promises';
import { unsortedFrequentItems, sortedFrequentItems } from '../frequent_items/mock_data';
-import { cachedFrequentProjects, searchUserProjectsAndGroupsResponseMock } from './mock_data';
+import { cachedFrequentProjects } from './mock_data';
jest.mock('@sentry/browser');
@@ -42,13 +45,29 @@ describe('Super sidebar utils spec', () => {
});
describe('trackContextAccess', () => {
+ useLocalStorageSpy();
+
+ let axiosMock;
+
const username = 'root';
+ const trackVisitsPath = '/-/track_visits';
const context = {
namespace: 'groups',
item: { id: 1 },
};
const storageKey = `${username}/frequent-${context.namespace}`;
+ beforeEach(() => {
+ gon.features = { serverSideFrecentNamespaces: true };
+ axiosMock = new MockAdapter(axios);
+ axiosMock.onPost(trackVisitsPath).reply(HTTP_STATUS_OK);
+ });
+
+ afterEach(() => {
+ gon.features = {};
+ axiosMock.restore();
+ });
+
it('returns `false` if local storage is not available', () => {
jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
@@ -56,7 +75,7 @@ describe('Super sidebar utils spec', () => {
});
it('creates a new item if it does not exist in the local storage', () => {
- trackContextAccess(username, context);
+ trackContextAccess(username, context, trackVisitsPath);
expect(window.localStorage.setItem).toHaveBeenCalledWith(
storageKey,
@@ -70,6 +89,24 @@ describe('Super sidebar utils spec', () => {
);
});
+ it('sends a POST request to persist the visit in the DB', async () => {
+ expect(axiosMock.history.post).toHaveLength(0);
+
+ trackContextAccess(username, context, trackVisitsPath);
+ await waitForPromises();
+
+ expect(axiosMock.history.post).toHaveLength(1);
+ expect(axiosMock.history.post[0].url).toBe(trackVisitsPath);
+ });
+
+ it('does not send a POST request when the serverSideFrecentNamespaces feature flag is disabled', async () => {
+ gon.features = { serverSideFrecentNamespaces: false };
+ trackContextAccess(username, context, trackVisitsPath);
+ await waitForPromises();
+
+ expect(axiosMock.history.post).toHaveLength(0);
+ });
+
it('updates existing item frequency/access time if it was persisted to the local storage over 15 minutes ago', () => {
window.localStorage.setItem(
storageKey,
@@ -81,7 +118,7 @@ describe('Super sidebar utils spec', () => {
},
]),
);
- trackContextAccess(username, context);
+ trackContextAccess(username, context, trackVisitsPath);
expect(window.localStorage.setItem).toHaveBeenCalledWith(
storageKey,
@@ -95,7 +132,7 @@ describe('Super sidebar utils spec', () => {
);
});
- it('leaves item frequency/access time as is if it was persisted to the local storage under 15 minutes ago', () => {
+ it('leaves item frequency/access time as is if it was persisted to the local storage under 15 minutes ago, and does not send a POST request', () => {
const jsonString = JSON.stringify([
{
id: 1,
@@ -108,10 +145,12 @@ describe('Super sidebar utils spec', () => {
expect(window.localStorage.setItem).toHaveBeenCalledTimes(1);
expect(window.localStorage.setItem).toHaveBeenCalledWith(storageKey, jsonString);
- trackContextAccess(username, context);
+ trackContextAccess(username, context, trackVisitsPath);
expect(window.localStorage.setItem).toHaveBeenCalledTimes(3);
expect(window.localStorage.setItem).toHaveBeenLastCalledWith(storageKey, jsonString);
+
+ expect(axiosMock.history.post).toHaveLength(0);
});
it('always updates stored item metadata', () => {
@@ -163,10 +202,14 @@ describe('Super sidebar utils spec', () => {
const newItem = {
id: FREQUENT_ITEMS.MAX_COUNT + 1,
};
- trackContextAccess(username, {
- namespace: 'groups',
- item: newItem,
- });
+ trackContextAccess(
+ username,
+ {
+ namespace: 'groups',
+ item: newItem,
+ },
+ trackVisitsPath,
+ );
// Finally, retrieve the final data from the local storage
const finallyStoredItems = JSON.parse(window.localStorage.getItem(storageKey));
@@ -182,21 +225,6 @@ describe('Super sidebar utils spec', () => {
});
});
- describe('formatContextSwitcherItems', () => {
- it('returns the formatted items', () => {
- const projects = searchUserProjectsAndGroupsResponseMock.data.projects.nodes;
- expect(formatContextSwitcherItems(projects)).toEqual([
- {
- id: projects[0].id,
- avatar: null,
- title: projects[0].name,
- subtitle: 'Gitlab Org',
- link: projects[0].webUrl,
- },
- ]);
- });
- });
-
describe('getItemsFromLocalStorage', () => {
const storageKey = 'mockStorageKey';
const maxItems = 5;
diff --git a/spec/frontend/time_tracking/components/timelogs_app_spec.js b/spec/frontend/time_tracking/components/timelogs_app_spec.js
index ca470ce63ac..13188f3b937 100644
--- a/spec/frontend/time_tracking/components/timelogs_app_spec.js
+++ b/spec/frontend/time_tracking/components/timelogs_app_spec.js
@@ -95,12 +95,12 @@ describe('Timelogs app', () => {
mountComponent();
const username = 'johnsmith';
- const fromDate = new Date('2023-02-28');
- const toDate = new Date('2023-03-28');
+ const fromDateTime = new Date('2023-02-28');
+ const toDateTime = new Date('2023-03-28');
findUsernameInput().vm.$emit('input', username);
- findFromDatepicker().vm.$emit('input', fromDate);
- findToDatepicker().vm.$emit('input', toDate);
+ findFromDatepicker().vm.$emit('input', fromDateTime);
+ findToDatepicker().vm.$emit('input', toDateTime);
resolvedEmptyListMock.mockClear();
@@ -110,8 +110,8 @@ describe('Timelogs app', () => {
expect(resolvedEmptyListMock).toHaveBeenCalledWith({
username,
- startDate: fromDate,
- endDate: toDate,
+ startTime: fromDateTime,
+ endTime: toDateTime,
groupId: null,
projectId: null,
first: 20,
@@ -119,6 +119,15 @@ describe('Timelogs app', () => {
after: null,
before: null,
});
+
+ expect(`${wrapper.vm.queryVariables.startTime}`).toEqual(
+ 'Tue Feb 28 2023 00:00:00 GMT+0000 (Greenwich Mean Time)',
+ );
+ // should be 1 day ahead of the initial To Date value
+ expect(`${wrapper.vm.queryVariables.endTime}`).toEqual(
+ 'Wed Mar 29 2023 00:00:00 GMT+0000 (Greenwich Mean Time)',
+ );
+
expect(createAlert).not.toHaveBeenCalled();
expect(Sentry.captureException).not.toHaveBeenCalled();
});
@@ -140,8 +149,8 @@ describe('Timelogs app', () => {
expect(resolvedEmptyListMock).toHaveBeenCalledWith({
username,
- startDate: null,
- endDate: null,
+ startTime: null,
+ endTime: null,
groupId: null,
projectId: null,
first: 20,
diff --git a/spec/frontend/tracing/components/tracing_details_spec.js b/spec/frontend/tracing/components/tracing_details_spec.js
deleted file mode 100644
index c5efa2a7eb5..00000000000
--- a/spec/frontend/tracing/components/tracing_details_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import TracingDetails from '~/tracing/components/tracing_details.vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import { createAlert } from '~/alert';
-import { visitUrl, isSafeURL } from '~/lib/utils/url_utility';
-
-jest.mock('~/alert');
-jest.mock('~/lib/utils/url_utility');
-
-describe('TracingDetails', () => {
- let wrapper;
- let observabilityClientMock;
-
- const TRACE_ID = 'test-trace-id';
- const TRACING_INDEX_URL = 'https://www.gitlab.com/flightjs/Flight/-/tracing';
-
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findTraceDetails = () => wrapper.findComponentByTestId('trace-details');
-
- const props = {
- traceId: TRACE_ID,
- tracingIndexUrl: TRACING_INDEX_URL,
- };
-
- const mountComponent = async () => {
- wrapper = shallowMountExtended(TracingDetails, {
- propsData: {
- ...props,
- observabilityClient: observabilityClientMock,
- },
- });
- await waitForPromises();
- };
-
- beforeEach(() => {
- isSafeURL.mockReturnValue(true);
-
- observabilityClientMock = {
- isTracingEnabled: jest.fn(),
- fetchTrace: jest.fn(),
- };
- });
-
- it('renders the loading indicator while checking if tracing is enabled', () => {
- mountComponent();
-
- expect(findLoadingIcon().exists()).toBe(true);
- expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
- });
-
- describe('when tracing is enabled', () => {
- const mockTrace = { traceId: 'test-trace-id', foo: 'bar' };
- beforeEach(async () => {
- observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(true);
- observabilityClientMock.fetchTrace.mockResolvedValueOnce(mockTrace);
-
- await mountComponent();
- });
-
- it('fetches the trace and renders the trace details', () => {
- expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
- expect(observabilityClientMock.fetchTrace).toHaveBeenCalled();
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findTraceDetails().exists()).toBe(true);
- });
- });
-
- describe('when tracing is not enabled', () => {
- beforeEach(async () => {
- observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(false);
-
- await mountComponent();
- });
-
- it('redirects to tracingIndexUrl', () => {
- expect(visitUrl).toHaveBeenCalledWith(props.tracingIndexUrl);
- });
- });
-
- describe('error handling', () => {
- it('if isTracingEnabled fails, it renders an alert and empty page', async () => {
- observabilityClientMock.isTracingEnabled.mockRejectedValueOnce('error');
-
- await mountComponent();
-
- expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load trace details.' });
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findTraceDetails().exists()).toBe(false);
- });
-
- it('if fetchTrace fails, it renders an alert and empty page', async () => {
- observabilityClientMock.isTracingEnabled.mockReturnValueOnce(true);
- observabilityClientMock.fetchTrace.mockRejectedValueOnce('error');
-
- await mountComponent();
-
- expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load trace details.' });
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findTraceDetails().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/tracing/components/tracing_empty_state_spec.js b/spec/frontend/tracing/components/tracing_empty_state_spec.js
deleted file mode 100644
index d91c62a1dad..00000000000
--- a/spec/frontend/tracing/components/tracing_empty_state_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { GlButton, GlEmptyState } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import TracingEmptyState from '~/tracing/components/tracing_empty_state.vue';
-
-describe('TracingEmptyState', () => {
- let wrapper;
-
- const findEnableButton = () => wrapper.findComponent(GlButton);
-
- beforeEach(() => {
- wrapper = shallowMountExtended(TracingEmptyState);
- });
-
- it('renders the component properly', () => {
- expect(wrapper.exists()).toBe(true);
- });
-
- it('displays the correct title', () => {
- const { title } = wrapper.findComponent(GlEmptyState).props();
- expect(title).toBe('Get started with Tracing');
- });
-
- it('displays the correct description', () => {
- const description = wrapper.find('span').text();
- expect(description).toBe('Monitor your applications with GitLab Distributed Tracing.');
- });
-
- it('displays the enable button', () => {
- const enableButton = findEnableButton();
- expect(enableButton.exists()).toBe(true);
- expect(enableButton.text()).toBe('Enable');
- });
-
- it('emits enable-tracing when enable button is clicked', () => {
- findEnableButton().vm.$emit('click');
-
- expect(wrapper.emitted('enable-tracing')).toHaveLength(1);
- });
-});
diff --git a/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js b/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js
deleted file mode 100644
index ad15dd4a371..00000000000
--- a/spec/frontend/tracing/components/tracing_list_filtered_search_spec.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import { GlFilteredSearch } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-
-import TracingListFilteredSearch from '~/tracing/components/tracing_list_filtered_search.vue';
-
-describe('TracingListFilteredSearch', () => {
- let wrapper;
- const initialFilters = [
- { type: 'period', value: '1h' },
- { type: 'service_name', value: 'example-service' },
- ];
- beforeEach(() => {
- wrapper = shallowMountExtended(TracingListFilteredSearch, {
- propsData: {
- initialFilters,
- },
- });
- });
-
- it('renders the component', () => {
- expect(wrapper.exists()).toBe(true);
- });
-
- it('sets initialFilters prop correctly', () => {
- expect(wrapper.findComponent(GlFilteredSearch).props('value')).toEqual(initialFilters);
- });
-
- it('emits submit event on filtered search submit', () => {
- wrapper
- .findComponent(GlFilteredSearch)
- .vm.$emit('submit', { filters: [{ type: 'period', value: '1h' }] });
-
- expect(wrapper.emitted('submit')).toHaveLength(1);
- expect(wrapper.emitted('submit')[0][0]).toEqual({
- filters: [{ type: 'period', value: '1h' }],
- });
- });
-});
diff --git a/spec/frontend/tracing/components/tracing_list_spec.js b/spec/frontend/tracing/components/tracing_list_spec.js
deleted file mode 100644
index 9aa37ac9c9c..00000000000
--- a/spec/frontend/tracing/components/tracing_list_spec.js
+++ /dev/null
@@ -1,216 +0,0 @@
-import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import TracingList from '~/tracing/components/tracing_list.vue';
-import TracingEmptyState from '~/tracing/components/tracing_empty_state.vue';
-import TracingTableList from '~/tracing/components/tracing_table_list.vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import { createAlert } from '~/alert';
-import * as urlUtility from '~/lib/utils/url_utility';
-import {
- queryToFilterObj,
- filterObjToQuery,
- filterObjToFilterToken,
- filterTokensToFilterObj,
-} from '~/tracing/filters';
-import FilteredSearch from '~/tracing/components/tracing_list_filtered_search.vue';
-import UrlSync from '~/vue_shared/components/url_sync.vue';
-import setWindowLocation from 'helpers/set_window_location_helper';
-
-jest.mock('~/alert');
-jest.mock('~/tracing/filters');
-
-describe('TracingList', () => {
- let wrapper;
- let observabilityClientMock;
-
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findEmptyState = () => wrapper.findComponent(TracingEmptyState);
- const findTableList = () => wrapper.findComponent(TracingTableList);
- const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
- const findUrlSync = () => wrapper.findComponent(UrlSync);
-
- const mountComponent = async () => {
- wrapper = shallowMountExtended(TracingList, {
- propsData: {
- observabilityClient: observabilityClientMock,
- },
- });
- await waitForPromises();
- };
-
- beforeEach(() => {
- observabilityClientMock = {
- isTracingEnabled: jest.fn(),
- enableTraces: jest.fn(),
- fetchTraces: jest.fn(),
- };
- });
-
- it('renders the loading indicator while checking if tracing is enabled', () => {
- mountComponent();
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findEmptyState().exists()).toBe(false);
- expect(findTableList().exists()).toBe(false);
- expect(findFilteredSearch().exists()).toBe(false);
- expect(findUrlSync().exists()).toBe(false);
- expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
- });
-
- describe('when tracing is enabled', () => {
- const mockTraces = ['trace1', 'trace2'];
- beforeEach(async () => {
- observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(true);
- observabilityClientMock.fetchTraces.mockResolvedValueOnce(mockTraces);
-
- await mountComponent();
- });
-
- it('fetches the traces and renders the trace list with filtered search', () => {
- expect(observabilityClientMock.isTracingEnabled).toHaveBeenCalled();
- expect(observabilityClientMock.fetchTraces).toHaveBeenCalled();
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findEmptyState().exists()).toBe(false);
- expect(findTableList().exists()).toBe(true);
- expect(findFilteredSearch().exists()).toBe(true);
- expect(findUrlSync().exists()).toBe(true);
- expect(findTableList().props('traces')).toBe(mockTraces);
- });
-
- it('calls fetchTraces method when TracingTableList emits reload event', () => {
- observabilityClientMock.fetchTraces.mockClear();
- observabilityClientMock.fetchTraces.mockResolvedValueOnce(['trace1']);
-
- findTableList().vm.$emit('reload');
-
- expect(observabilityClientMock.fetchTraces).toHaveBeenCalledTimes(1);
- });
-
- it('on trace selection it redirects to the details url', () => {
- setWindowLocation('base_path');
- const visitUrlMock = jest.spyOn(urlUtility, 'visitUrl').mockReturnValue({});
-
- findTableList().vm.$emit('trace-selected', { trace_id: 'test-trace-id' });
-
- expect(visitUrlMock).toHaveBeenCalledTimes(1);
- expect(visitUrlMock).toHaveBeenCalledWith('/base_path/test-trace-id');
- });
- });
-
- describe('filtered search', () => {
- let mockFilterObj;
- let mockFilterToken;
- let mockQuery;
- let mockUpdatedFilterObj;
-
- beforeEach(async () => {
- observabilityClientMock.isTracingEnabled.mockResolvedValue(true);
- observabilityClientMock.fetchTraces.mockResolvedValue([]);
-
- setWindowLocation('?trace-id=foo');
-
- mockFilterObj = { mock: 'filter-obj' };
- queryToFilterObj.mockReturnValue(mockFilterObj);
-
- mockFilterToken = ['mock-token'];
- filterObjToFilterToken.mockReturnValue(mockFilterToken);
-
- mockQuery = { mock: 'query' };
- filterObjToQuery.mockReturnValueOnce(mockQuery);
-
- mockUpdatedFilterObj = { mock: 'filter-obj-upd' };
- filterTokensToFilterObj.mockReturnValue(mockUpdatedFilterObj);
-
- await mountComponent();
- });
-
- it('renders FilteredSeach with initial filters parsed from window.location', () => {
- expect(queryToFilterObj).toHaveBeenCalledWith('?trace-id=foo');
- expect(filterObjToFilterToken).toHaveBeenCalledWith(mockFilterObj);
- expect(findFilteredSearch().props('initialFilters')).toBe(mockFilterToken);
- });
-
- it('renders UrlSync and sets query prop', () => {
- expect(filterObjToQuery).toHaveBeenCalledWith(mockFilterObj);
- expect(findUrlSync().props('query')).toBe(mockQuery);
- });
-
- it('process filters on search submit', async () => {
- const mockUpdatedQuery = { mock: 'updated-query' };
- filterObjToQuery.mockReturnValueOnce(mockUpdatedQuery);
- const mockFilters = { mock: 'some-filter' };
-
- findFilteredSearch().vm.$emit('submit', mockFilters);
- await waitForPromises();
-
- expect(filterTokensToFilterObj).toHaveBeenCalledWith(mockFilters);
- expect(filterObjToQuery).toHaveBeenCalledWith(mockUpdatedFilterObj);
- expect(findUrlSync().props('query')).toBe(mockUpdatedQuery);
- });
-
- it('fetches traces with filters', () => {
- expect(observabilityClientMock.fetchTraces).toHaveBeenCalledWith(mockFilterObj);
-
- findFilteredSearch().vm.$emit('submit', {});
-
- expect(observabilityClientMock.fetchTraces).toHaveBeenLastCalledWith(mockUpdatedFilterObj);
- });
- });
-
- describe('when tracing is not enabled', () => {
- beforeEach(async () => {
- observabilityClientMock.isTracingEnabled.mockResolvedValueOnce(false);
- observabilityClientMock.fetchTraces.mockResolvedValueOnce([]);
-
- await mountComponent();
- });
-
- it('renders TracingEmptyState', () => {
- expect(findEmptyState().exists()).toBe(true);
- });
-
- it('calls enableTracing when TracingEmptyState emits enable-tracing', () => {
- findEmptyState().vm.$emit('enable-tracing');
-
- expect(observabilityClientMock.enableTraces).toHaveBeenCalled();
- });
- });
-
- describe('error handling', () => {
- it('if isTracingEnabled fails, it renders an alert and empty page', async () => {
- observabilityClientMock.isTracingEnabled.mockRejectedValueOnce('error');
-
- await mountComponent();
-
- expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load page.' });
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findEmptyState().exists()).toBe(false);
- expect(findTableList().exists()).toBe(false);
- });
-
- it('if fetchTraces fails, it renders an alert and empty list', async () => {
- observabilityClientMock.fetchTraces.mockRejectedValueOnce('error');
- observabilityClientMock.isTracingEnabled.mockReturnValueOnce(true);
-
- await mountComponent();
-
- expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to load traces.' });
- expect(findTableList().exists()).toBe(true);
- expect(findTableList().props('traces')).toEqual([]);
- });
-
- it('if enableTraces fails, it renders an alert and empty-state', async () => {
- observabilityClientMock.isTracingEnabled.mockReturnValueOnce(false);
- observabilityClientMock.enableTraces.mockRejectedValueOnce('error');
-
- await mountComponent();
-
- findEmptyState().vm.$emit('enable-tracing');
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledWith({ message: 'Failed to enable tracing.' });
- expect(findLoadingIcon().exists()).toBe(false);
- expect(findEmptyState().exists()).toBe(true);
- expect(findTableList().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/tracing/components/tracing_table_list_spec.js b/spec/frontend/tracing/components/tracing_table_list_spec.js
deleted file mode 100644
index aa96b9b370f..00000000000
--- a/spec/frontend/tracing/components/tracing_table_list_spec.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { nextTick } from 'vue';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import TracingTableList from '~/tracing/components/tracing_table_list.vue';
-
-describe('TracingTableList', () => {
- let wrapper;
- const mockTraces = [
- {
- timestamp: '2023-07-10T15:02:30.677538Z',
- service_name: 'tracegen',
- operation: 'lets-go',
- duration: 150,
- },
- {
- timestamp: '2023-07-10T15:02:30.677538Z',
- service_name: 'tracegen',
- operation: 'lets-go',
- duration: 200,
- },
- ];
-
- const mountComponent = ({ traces = mockTraces } = {}) => {
- wrapper = mountExtended(TracingTableList, {
- propsData: {
- traces,
- },
- });
- };
-
- const getRows = () => wrapper.findComponent({ name: 'GlTable' }).find('tbody').findAll('tr');
- const getRow = (idx) => getRows().at(idx);
- const getCells = (trIdx) => getRows().at(trIdx).findAll('td');
-
- const getCell = (trIdx, tdIdx) => {
- return getCells(trIdx).at(tdIdx);
- };
-
- const selectRow = async (idx) => {
- getRow(idx).trigger('click');
- await nextTick();
- };
-
- it('renders traces as table', () => {
- mountComponent();
-
- const rows = wrapper.findAll('table tbody tr');
-
- expect(rows.length).toBe(mockTraces.length);
-
- mockTraces.forEach((trace, i) => {
- expect(getCells(i).length).toBe(4);
- expect(getCell(i, 0).text()).toBe(trace.timestamp);
- expect(getCell(i, 1).text()).toBe(trace.service_name);
- expect(getCell(i, 2).text()).toBe(trace.operation);
- expect(getCell(i, 3).text()).toBe(`${trace.duration} ms`);
- });
- });
-
- it('emits trace-selected on row selection', async () => {
- mountComponent();
-
- await selectRow(0);
- expect(wrapper.emitted('trace-selected')).toHaveLength(1);
- expect(wrapper.emitted('trace-selected')[0][0]).toBe(mockTraces[0]);
- });
-
- it('renders the empty state when no traces are provided', () => {
- mountComponent({ traces: [] });
-
- expect(getCell(0, 0).text()).toContain('No traces to display');
- const link = getCell(0, 0).findComponent({ name: 'GlLink' });
- expect(link.text()).toBe('Check again');
-
- link.trigger('click');
- expect(wrapper.emitted('reload')).toHaveLength(1);
- });
-});
diff --git a/spec/frontend/tracing/details_index_spec.js b/spec/frontend/tracing/details_index_spec.js
deleted file mode 100644
index e0d368b6cb7..00000000000
--- a/spec/frontend/tracing/details_index_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import DetailsIndex from '~/tracing/details_index.vue';
-import TracingDetails from '~/tracing/components/tracing_details.vue';
-import ObservabilityContainer from '~/observability/components/observability_container.vue';
-
-describe('DetailsIndex', () => {
- const props = {
- traceId: 'test-trace-id',
- tracingIndexUrl: 'https://example.com/tracing/index',
- oauthUrl: 'https://example.com/oauth',
- tracingUrl: 'https://example.com/tracing',
- provisioningUrl: 'https://example.com/provisioning',
- };
-
- let wrapper;
-
- const mountComponent = () => {
- wrapper = shallowMountExtended(DetailsIndex, {
- propsData: props,
- });
- };
-
- it('renders ObservabilityContainer component', () => {
- mountComponent();
-
- const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
- expect(observabilityContainer.exists()).toBe(true);
- expect(observabilityContainer.props('oauthUrl')).toBe(props.oauthUrl);
- expect(observabilityContainer.props('tracingUrl')).toBe(props.tracingUrl);
- expect(observabilityContainer.props('provisioningUrl')).toBe(props.provisioningUrl);
- });
-
- it('renders TracingList component inside ObservabilityContainer', () => {
- mountComponent();
-
- const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
- const detailsCmp = observabilityContainer.findComponent(TracingDetails);
- expect(detailsCmp.exists()).toBe(true);
- expect(detailsCmp.props('traceId')).toBe(props.traceId);
- expect(detailsCmp.props('tracingIndexUrl')).toBe(props.tracingIndexUrl);
- });
-});
diff --git a/spec/frontend/tracing/filters_spec.js b/spec/frontend/tracing/filters_spec.js
deleted file mode 100644
index ee396326f45..00000000000
--- a/spec/frontend/tracing/filters_spec.js
+++ /dev/null
@@ -1,141 +0,0 @@
-import {
- filterToQueryObject,
- urlQueryToFilter,
- prepareTokens,
- processFilters,
-} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
-import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
-
-import {
- PERIOD_FILTER_TOKEN_TYPE,
- SERVICE_NAME_FILTER_TOKEN_TYPE,
- OPERATION_FILTER_TOKEN_TYPE,
- TRACE_ID_FILTER_TOKEN_TYPE,
- DURATION_MS_FILTER_TOKEN_TYPE,
- queryToFilterObj,
- filterObjToQuery,
- filterObjToFilterToken,
- filterTokensToFilterObj,
-} from '~/tracing/filters';
-
-jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils');
-
-describe('utils', () => {
- describe('queryToFilterObj', () => {
- it('should build a filter obj', () => {
- const url = 'http://example.com/';
- urlQueryToFilter.mockReturnValue({
- period: '7d',
- service: 'my_service',
- operation: 'my_operation',
- trace_id: 'my_trace_id',
- durationMs: '500',
- [FILTERED_SEARCH_TERM]: 'test',
- });
-
- const filterObj = queryToFilterObj(url);
-
- expect(urlQueryToFilter).toHaveBeenCalledWith(url, {
- customOperators: [
- { operator: '>', prefix: 'gt' },
- { operator: '<', prefix: 'lt' },
- ],
- filteredSearchTermKey: 'search',
- });
- expect(filterObj).toEqual({
- period: '7d',
- service: 'my_service',
- operation: 'my_operation',
- traceId: 'my_trace_id',
- durationMs: '500',
- search: 'test',
- });
- });
- });
-
- describe('filterObjToQuery', () => {
- it('should convert filter object to URL query', () => {
- filterToQueryObject.mockReturnValue('mockquery');
-
- const query = filterObjToQuery({
- period: '7d',
- serviceName: 'my_service',
- operation: 'my_operation',
- traceId: 'my_trace_id',
- durationMs: '500',
- search: 'test',
- });
-
- expect(filterToQueryObject).toHaveBeenCalledWith(
- {
- period: '7d',
- service: 'my_service',
- operation: 'my_operation',
- trace_id: 'my_trace_id',
- durationMs: '500',
- 'filtered-search-term': 'test',
- },
- {
- customOperators: [
- { applyOnlyToKey: 'durationMs', operator: '>', prefix: 'gt' },
- { applyOnlyToKey: 'durationMs', operator: '<', prefix: 'lt' },
- ],
- filteredSearchTermKey: 'search',
- },
- );
- expect(query).toBe('mockquery');
- });
- });
-
- describe('filterObjToFilterToken', () => {
- it('should convert filter object to filter tokens', () => {
- const mockTokens = [];
- prepareTokens.mockReturnValue(mockTokens);
-
- const tokens = filterObjToFilterToken({
- period: '7d',
- serviceName: 'my_service',
- operation: 'my_operation',
- traceId: 'my_trace_id',
- durationMs: '500',
- search: 'test',
- });
-
- expect(prepareTokens).toHaveBeenCalledWith({
- [PERIOD_FILTER_TOKEN_TYPE]: '7d',
- [SERVICE_NAME_FILTER_TOKEN_TYPE]: 'my_service',
- [OPERATION_FILTER_TOKEN_TYPE]: 'my_operation',
- [TRACE_ID_FILTER_TOKEN_TYPE]: 'my_trace_id',
- [DURATION_MS_FILTER_TOKEN_TYPE]: '500',
- [FILTERED_SEARCH_TERM]: 'test',
- });
- expect(tokens).toBe(mockTokens);
- });
- });
-
- describe('filterTokensToFilterObj', () => {
- it('should convert filter tokens to filter object', () => {
- const mockTokens = [];
- processFilters.mockReturnValue({
- [SERVICE_NAME_FILTER_TOKEN_TYPE]: 'my_service',
- [PERIOD_FILTER_TOKEN_TYPE]: '7d',
- [OPERATION_FILTER_TOKEN_TYPE]: 'my_operation',
- [TRACE_ID_FILTER_TOKEN_TYPE]: 'my_trace_id',
- [DURATION_MS_FILTER_TOKEN_TYPE]: '500',
- [FILTERED_SEARCH_TERM]: 'test',
- });
-
- const filterObj = filterTokensToFilterObj(mockTokens);
-
- expect(processFilters).toHaveBeenCalledWith(mockTokens);
- expect(filterObj).toEqual({
- serviceName: 'my_service',
- period: '7d',
- operation: 'my_operation',
- traceId: 'my_trace_id',
- durationMs: '500',
- search: 'test',
- });
- });
- });
-});
diff --git a/spec/frontend/tracing/list_index_spec.js b/spec/frontend/tracing/list_index_spec.js
deleted file mode 100644
index a5759035c2f..00000000000
--- a/spec/frontend/tracing/list_index_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ListIndex from '~/tracing/list_index.vue';
-import TracingList from '~/tracing/components/tracing_list.vue';
-import ObservabilityContainer from '~/observability/components/observability_container.vue';
-
-describe('ListIndex', () => {
- const props = {
- oauthUrl: 'https://example.com/oauth',
- tracingUrl: 'https://example.com/tracing',
- provisioningUrl: 'https://example.com/provisioning',
- };
-
- let wrapper;
-
- const mountComponent = () => {
- wrapper = shallowMountExtended(ListIndex, {
- propsData: props,
- });
- };
-
- it('renders ObservabilityContainer component', () => {
- mountComponent();
-
- const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
- expect(observabilityContainer.exists()).toBe(true);
- expect(observabilityContainer.props('oauthUrl')).toBe(props.oauthUrl);
- expect(observabilityContainer.props('tracingUrl')).toBe(props.tracingUrl);
- expect(observabilityContainer.props('provisioningUrl')).toBe(props.provisioningUrl);
- });
-
- it('renders TracingList component inside ObservabilityContainer', () => {
- mountComponent();
-
- const observabilityContainer = wrapper.findComponent(ObservabilityContainer);
- expect(observabilityContainer.findComponent(TracingList).exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/tracking/dispatch_snowplow_event_spec.js b/spec/frontend/tracking/dispatch_snowplow_event_spec.js
new file mode 100644
index 00000000000..5f4d065d504
--- /dev/null
+++ b/spec/frontend/tracking/dispatch_snowplow_event_spec.js
@@ -0,0 +1,76 @@
+import * as Sentry from '@sentry/browser';
+
+import { dispatchSnowplowEvent } from '~/tracking/dispatch_snowplow_event';
+import getStandardContext from '~/tracking/get_standard_context';
+import { extraContext, servicePingContext } from './mock_data';
+
+jest.mock('@sentry/browser');
+jest.mock('~/tracking/get_standard_context');
+
+const category = 'Incident Management';
+const action = 'view_incident_details';
+
+describe('dispatchSnowplowEvent', () => {
+ const snowplowMock = jest.fn();
+ global.window.snowplow = snowplowMock;
+
+ const mockStandardContext = { some: 'context' };
+ getStandardContext.mockReturnValue(mockStandardContext);
+
+ beforeEach(() => {
+ snowplowMock.mockClear();
+ Sentry.captureException.mockClear();
+ });
+
+ it('calls snowplow trackStructEvent with correct arguments', () => {
+ const data = {
+ label: 'Show Incident',
+ property: 'click_event',
+ value: '12',
+ context: extraContext,
+ extra: { namespace: 'GitLab' },
+ };
+
+ dispatchSnowplowEvent(category, action, data);
+
+ expect(snowplowMock).toHaveBeenCalledWith('trackStructEvent', {
+ category,
+ action,
+ label: data.label,
+ property: data.property,
+ value: Number(data.value),
+ context: [mockStandardContext, data.context],
+ });
+ });
+
+ it('throws an error if no category is provided', () => {
+ expect(() => {
+ dispatchSnowplowEvent(undefined, 'some-action', {});
+ }).toThrow('Tracking: no category provided for tracking.');
+ });
+
+ it('handles an array of contexts', () => {
+ const data = {
+ context: [extraContext, servicePingContext],
+ extra: { namespace: 'GitLab' },
+ };
+
+ dispatchSnowplowEvent(category, action, data);
+
+ expect(snowplowMock).toHaveBeenCalledWith('trackStructEvent', {
+ category,
+ action,
+ context: [mockStandardContext, ...data.context],
+ });
+ });
+
+ it('handles Sentry error capturing', () => {
+ snowplowMock.mockImplementation(() => {
+ throw new Error('some error');
+ });
+
+ dispatchSnowplowEvent(category, action, {});
+
+ expect(Sentry.captureException).toHaveBeenCalledTimes(1);
+ });
+});
diff --git a/spec/frontend/tracking/internal_events_spec.js b/spec/frontend/tracking/internal_events_spec.js
index ca244c25b06..6e773fde4db 100644
--- a/spec/frontend/tracking/internal_events_spec.js
+++ b/spec/frontend/tracking/internal_events_spec.js
@@ -6,9 +6,11 @@ import {
GITLAB_INTERNAL_EVENT_CATEGORY,
SERVICE_PING_SCHEMA,
LOAD_INTERNAL_EVENTS_SELECTOR,
+ USER_CONTEXT_SCHEMA,
} from '~/tracking/constants';
import * as utils from '~/tracking/utils';
import { Tracker } from '~/tracking/tracker';
+import { extraContext } from './mock_data';
jest.mock('~/api', () => ({
trackInternalEvent: jest.fn(),
@@ -21,11 +23,11 @@ jest.mock('~/tracking/utils', () => ({
Tracker.enabled = jest.fn();
+const event = 'TestEvent';
+
describe('InternalEvents', () => {
describe('track_event', () => {
it('track_event calls API.trackInternalEvent with correct arguments', () => {
- const event = 'TestEvent';
-
InternalEvents.track_event(event);
expect(API.trackInternalEvent).toHaveBeenCalledTimes(1);
@@ -35,42 +37,65 @@ describe('InternalEvents', () => {
it('track_event calls tracking.event functions with correct arguments', () => {
const trackingSpy = mockTracking(GITLAB_INTERNAL_EVENT_CATEGORY, undefined, jest.spyOn);
- const event = 'TestEvent';
-
- InternalEvents.track_event(event);
+ InternalEvents.track_event(event, { context: extraContext });
expect(trackingSpy).toHaveBeenCalledTimes(1);
expect(trackingSpy).toHaveBeenCalledWith(GITLAB_INTERNAL_EVENT_CATEGORY, event, {
- context: {
- schema: SERVICE_PING_SCHEMA,
- data: {
- event_name: event,
- data_source: 'redis_hll',
+ context: [
+ {
+ schema: SERVICE_PING_SCHEMA,
+ data: {
+ event_name: event,
+ data_source: 'redis_hll',
+ },
},
- },
+ extraContext,
+ ],
});
});
});
describe('mixin', () => {
let wrapper;
+ const Component = {
+ template: `
+ <div>
+ <button data-testid="button1" @click="handleButton1Click">Button 1</button>
+ <button data-testid="button2" @click="handleButton2Click">Button 2</button>
+ </div>
+ `,
+ methods: {
+ handleButton1Click() {
+ this.track_event(event);
+ },
+ handleButton2Click() {
+ this.track_event(event, extraContext);
+ },
+ },
+ mixins: [InternalEvents.mixin()],
+ };
beforeEach(() => {
- const Component = {
- render() {},
- mixins: [InternalEvents.mixin()],
- };
wrapper = shallowMountExtended(Component);
});
- it('this.track_event function calls InternalEvent`s track function with an event', () => {
- const event = 'TestEvent';
+ it('this.track_event function calls InternalEvent`s track function with an event', async () => {
+ const trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
+
+ await wrapper.findByTestId('button1').trigger('click');
+
+ expect(trackEventSpy).toHaveBeenCalledTimes(1);
+ expect(trackEventSpy).toHaveBeenCalledWith(event, {});
+ });
+
+ it("this.track_event function calls InternalEvent's track function with an event and data", async () => {
+ const data = extraContext;
const trackEventSpy = jest.spyOn(InternalEvents, 'track_event');
- wrapper.vm.track_event(event);
+ await wrapper.findByTestId('button2').trigger('click');
expect(trackEventSpy).toHaveBeenCalledTimes(1);
- expect(trackEventSpy).toHaveBeenCalledWith(event);
+ expect(trackEventSpy).toHaveBeenCalledWith(event, data);
});
});
@@ -145,4 +170,88 @@ describe('InternalEvents', () => {
});
});
});
+
+ describe('initBrowserSDK', () => {
+ beforeEach(() => {
+ window.glClient = {
+ setDocumentTitle: jest.fn(),
+ page: jest.fn(),
+ };
+ window.gl = {
+ environment: 'testing',
+ key: 'value',
+ };
+ window.gl.snowplowStandardContext = {
+ schema: 'iglu:com.gitlab/gitlab_standard',
+ data: {
+ environment: 'testing',
+ key: 'value',
+ google_analytics_id: '',
+ source: 'gitlab-javascript',
+ extra: {},
+ },
+ };
+ });
+
+ it('should not call setDocumentTitle or page methods when window.glClient is undefined', () => {
+ window.glClient = undefined;
+
+ InternalEvents.initBrowserSDK();
+
+ expect(window.glClient?.setDocumentTitle).toBeUndefined();
+ expect(window.glClient?.page).toBeUndefined();
+ });
+
+ it('should call setDocumentTitle and page methods on window.glClient when it is defined', () => {
+ const mockStandardContext = window.gl.snowplowStandardContext;
+ const userContext = {
+ schema: USER_CONTEXT_SCHEMA,
+ data: mockStandardContext?.data,
+ };
+
+ InternalEvents.initBrowserSDK();
+
+ expect(window.glClient.setDocumentTitle).toHaveBeenCalledWith('GitLab');
+ expect(window.glClient.page).toHaveBeenCalledWith({
+ title: 'GitLab',
+ context: [userContext],
+ });
+ });
+
+ it('should call page method with combined standard and experiment contexts', () => {
+ const mockStandardContext = window.gl.snowplowStandardContext;
+ const userContext = {
+ schema: USER_CONTEXT_SCHEMA,
+ data: mockStandardContext?.data,
+ };
+
+ InternalEvents.initBrowserSDK();
+
+ expect(window.glClient.page).toHaveBeenCalledWith({
+ title: 'GitLab',
+ context: [userContext],
+ });
+ });
+
+ it('should call setDocumentTitle and page methods with default data when window.gl is undefined', () => {
+ window.gl = undefined;
+
+ InternalEvents.initBrowserSDK();
+
+ expect(window.glClient.setDocumentTitle).toHaveBeenCalledWith('GitLab');
+ expect(window.glClient.page).toHaveBeenCalledWith({
+ title: 'GitLab',
+ context: [
+ {
+ schema: USER_CONTEXT_SCHEMA,
+ data: {
+ google_analytics_id: '',
+ source: 'gitlab-javascript',
+ extra: {},
+ },
+ },
+ ],
+ });
+ });
+ });
});
diff --git a/spec/frontend/tracking/mock_data.js b/spec/frontend/tracking/mock_data.js
new file mode 100644
index 00000000000..acde8676291
--- /dev/null
+++ b/spec/frontend/tracking/mock_data.js
@@ -0,0 +1,17 @@
+export const extraContext = {
+ schema: 'iglu:com.gitlab/design_management_context/jsonschema/1-0-0',
+ data: {
+ 'design-version-number': '1.0.0',
+ 'design-is-current-version': '1.0.0',
+ 'internal-object-referrer': 'https://gitlab.com',
+ 'design-collection-owner': 'GitLab',
+ },
+};
+
+export const servicePingContext = {
+ schema: 'iglu:com.gitlab/gitlab_service_ping/jsonschema/1-0-1',
+ data: {
+ event_name: 'track_incident_event',
+ data_source: 'redis_hll',
+ },
+};
diff --git a/spec/frontend/tracking/tracking_initialization_spec.js b/spec/frontend/tracking/tracking_initialization_spec.js
index 3c512cf73a7..2dc3c6ab41c 100644
--- a/spec/frontend/tracking/tracking_initialization_spec.js
+++ b/spec/frontend/tracking/tracking_initialization_spec.js
@@ -1,6 +1,6 @@
import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
import { getExperimentData, getAllExperimentContexts } from '~/experimentation/utils';
-import Tracking, { initUserTracking, initDefaultTrackers } from '~/tracking';
+import Tracking, { initUserTracking, initDefaultTrackers, InternalEvents } from '~/tracking';
import getStandardContext from '~/tracking/get_standard_context';
jest.mock('~/experimentation/utils', () => ({
@@ -15,6 +15,9 @@ describe('Tracking', () => {
let trackLoadEventsSpy;
let enableFormTracking;
let setAnonymousUrlsSpy;
+ let bindInternalEventDocumentSpy;
+ let trackInternalLoadEventsSpy;
+ let initBrowserSDKSpy;
beforeAll(() => {
window.gl = window.gl || {};
@@ -74,6 +77,15 @@ describe('Tracking', () => {
.spyOn(Tracking, 'enableFormTracking')
.mockImplementation(() => null);
setAnonymousUrlsSpy = jest.spyOn(Tracking, 'setAnonymousUrls').mockImplementation(() => null);
+ bindInternalEventDocumentSpy = jest
+ .spyOn(InternalEvents, 'bindInternalEventDocument')
+ .mockImplementation(() => null);
+ trackInternalLoadEventsSpy = jest
+ .spyOn(InternalEvents, 'trackInternalLoadEvents')
+ .mockImplementation(() => null);
+ initBrowserSDKSpy = jest
+ .spyOn(InternalEvents, 'initBrowserSDK')
+ .mockImplementation(() => null);
});
it('should activate features based on what has been enabled', () => {
@@ -117,6 +129,21 @@ describe('Tracking', () => {
expect(setAnonymousUrlsSpy).toHaveBeenCalled();
});
+ it('binds the document event handling for intenral events', () => {
+ initDefaultTrackers();
+ expect(bindInternalEventDocumentSpy).toHaveBeenCalled();
+ });
+
+ it('tracks page loaded events for internal events', () => {
+ initDefaultTrackers();
+ expect(trackInternalLoadEventsSpy).toHaveBeenCalled();
+ });
+
+ it('calls initBrowserSDKSpy', () => {
+ initDefaultTrackers();
+ expect(initBrowserSDKSpy).toHaveBeenCalled();
+ });
+
describe('when there are experiment contexts', () => {
const experimentContexts = [
{
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
index 88ab51cf135..0ae01083a09 100644
--- a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
@@ -5,7 +5,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ProjectStorageApp from '~/usage_quotas/storage/components/project_storage_app.vue';
-import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
+import SectionedPercentageBar from '~/usage_quotas/components/sectioned_percentage_bar.vue';
import {
descendingStorageUsageSort,
getStorageTypesFromProjectStatistics,
@@ -56,7 +56,7 @@ describe('ProjectStorageApp', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findUsagePercentage = () => wrapper.findByTestId('total-usage');
- const findUsageGraph = () => wrapper.findComponent(UsageGraph);
+ const findSectionedPercentageBar = () => wrapper.findComponent(SectionedPercentageBar);
const findProjectDetailsTable = () => wrapper.findByTestId('usage-quotas-project-usage-details');
const findNamespaceDetailsTable = () =>
wrapper.findByTestId('usage-quotas-namespace-usage-details');
@@ -157,7 +157,7 @@ describe('ProjectStorageApp', () => {
});
});
- describe('rendering <usage-graph />', () => {
+ describe('rendering <sectioned-percentage-bar />', () => {
let mockApollo;
beforeEach(async () => {
@@ -168,16 +168,23 @@ describe('ProjectStorageApp', () => {
await waitForPromises();
});
- it('renders usage-graph component if project.statistics exists', () => {
- expect(findUsageGraph().exists()).toBe(true);
+ it('renders sectioned-percentage-bar component if project.statistics exists', () => {
+ expect(findSectionedPercentageBar().exists()).toBe(true);
});
- it('passes project.statistics to usage-graph component', () => {
- const {
- __typename,
- ...statistics
- } = mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics;
- expect(findUsageGraph().props('rootStorageStatistics')).toMatchObject(statistics);
+ it('passes processed project statistics to sectioned-percentage-bar component', () => {
+ expect(findSectionedPercentageBar().props('sections')).toMatchObject([
+ { formattedValue: '4.58 MiB', id: 'lfsObjects', label: 'LFS', value: 4800000 },
+ { formattedValue: '3.72 MiB', id: 'repository', label: 'Repository', value: 3900000 },
+ { formattedValue: '3.62 MiB', id: 'packages', label: 'Packages', value: 3800000 },
+ {
+ formattedValue: '390.63 KiB',
+ id: 'buildArtifacts',
+ label: 'Job artifacts',
+ value: 400000,
+ },
+ { formattedValue: '292.97 KiB', id: 'wiki', label: 'Wiki', value: 300000 },
+ ]);
});
});
});
diff --git a/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js b/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
deleted file mode 100644
index fc116211bf0..00000000000
--- a/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
+++ /dev/null
@@ -1,125 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { numberToHumanSize } from '~/lib/utils/number_utils';
-import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
-
-let data;
-let wrapper;
-
-function mountComponent({ rootStorageStatistics, limit }) {
- wrapper = shallowMount(UsageGraph, {
- propsData: {
- rootStorageStatistics,
- limit,
- },
- });
-}
-function findStorageTypeUsagesSerialized() {
- return wrapper
- .findAll('[data-testid="storage-type-usage"]')
- .wrappers.map((wp) => wp.element.style.flex);
-}
-
-describe('UsageGraph', () => {
- beforeEach(() => {
- data = {
- rootStorageStatistics: {
- wikiSize: 5000,
- repositorySize: 4000,
- packagesSize: 3000,
- containerRegistrySize: 2500,
- lfsObjectsSize: 2000,
- buildArtifactsSize: 700,
- snippetsSize: 2000,
- storageSize: 17000,
- },
- limit: 2000,
- };
- mountComponent(data);
- });
-
- it('renders the legend in order', () => {
- const types = wrapper.findAll('[data-testid="storage-type-legend"]');
-
- const {
- buildArtifactsSize,
- lfsObjectsSize,
- packagesSize,
- repositorySize,
- wikiSize,
- snippetsSize,
- } = data.rootStorageStatistics;
-
- expect(types.at(0).text()).toMatchInterpolatedText(`Wiki ${numberToHumanSize(wikiSize)}`);
- expect(types.at(1).text()).toMatchInterpolatedText(
- `Repository ${numberToHumanSize(repositorySize)}`,
- );
- expect(types.at(2).text()).toMatchInterpolatedText(
- `Packages ${numberToHumanSize(packagesSize)}`,
- );
- expect(types.at(3).text()).toMatchInterpolatedText(`LFS ${numberToHumanSize(lfsObjectsSize)}`);
- expect(types.at(4).text()).toMatchInterpolatedText(
- `Snippets ${numberToHumanSize(snippetsSize)}`,
- );
- expect(types.at(5).text()).toMatchInterpolatedText(
- `Job artifacts ${numberToHumanSize(buildArtifactsSize)}`,
- );
- });
-
- describe('when storage type is not used', () => {
- beforeEach(() => {
- data.rootStorageStatistics.wikiSize = 0;
- mountComponent(data);
- });
-
- it('filters the storage type', () => {
- expect(wrapper.text()).not.toContain('Wikis');
- });
- });
-
- describe('when there is no storage usage', () => {
- beforeEach(() => {
- data.rootStorageStatistics.storageSize = 0;
- mountComponent(data);
- });
-
- it('does not render', () => {
- expect(wrapper.html()).toEqual('');
- });
- });
-
- describe('when limit is 0', () => {
- beforeEach(() => {
- data.limit = 0;
- mountComponent(data);
- });
-
- it('sets correct flex values', () => {
- expect(findStorageTypeUsagesSerialized()).toStrictEqual([
- '0.29411764705882354',
- '0.23529411764705882',
- '0.17647058823529413',
- '0.11764705882352941',
- '0.11764705882352941',
- '0.041176470588235294',
- ]);
- });
- });
-
- describe('when storage exceeds limit', () => {
- beforeEach(() => {
- data.limit = data.rootStorageStatistics.storageSize - 1;
- mountComponent(data);
- });
-
- it('does render correclty', () => {
- expect(findStorageTypeUsagesSerialized()).toStrictEqual([
- '0.29411764705882354',
- '0.23529411764705882',
- '0.17647058823529413',
- '0.11764705882352941',
- '0.11764705882352941',
- '0.041176470588235294',
- ]);
- });
- });
-});
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
index 286fb9fef5f..0ed21114778 100644
--- a/spec/frontend/user_lists/components/user_list_spec.js
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -169,7 +169,7 @@ describe('User List', () => {
it('displays the alert message', () => {
const alert = findAlert();
- expect(alert.text()).toBe('Something went wrong on our end. Please try again!');
+ expect(alert.text()).toBe('Unable to load user list. Reload the page and try again.');
});
it('can dismiss the alert', async () => {
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index b38400446a9..5aae922fec2 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -70,7 +70,8 @@ export const findDropdownItemsModel = () =>
return {
type: 'divider',
};
- } else if (el.classList.contains('dropdown-header')) {
+ }
+ if (el.classList.contains('dropdown-header')) {
return {
type: 'dropdown-header',
text: el.textContent,
diff --git a/spec/frontend/vue_merge_request_widget/components/action_buttons.js b/spec/frontend/vue_merge_request_widget/components/action_buttons_spec.js
index 7334f061dc9..02e23b81413 100644
--- a/spec/frontend/vue_merge_request_widget/components/action_buttons.js
+++ b/spec/frontend/vue_merge_request_widget/components/action_buttons_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDropdownItem } from '@gitlab/ui';
+import { GlButton, GlDisclosureDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Actions from '~/vue_merge_request_widget/components/action_buttons.vue';
@@ -6,7 +6,7 @@ let wrapper;
function factory(propsData = {}) {
wrapper = shallowMount(Actions, {
- propsData: { ...propsData, widget: 'test' },
+ propsData,
});
}
@@ -33,11 +33,29 @@ describe('MR widget extension actions', () => {
});
it('renders tertiary actions in dropdown', () => {
+ const action = { text: 'hello world', href: 'https://gitlab.com', target: '_blank' };
factory({
- tertiaryButtons: [{ text: 'hello world', href: 'https://gitlab.com', target: '_blank' }],
+ tertiaryButtons: [action, action],
});
- expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(1);
+ const component = wrapper.findComponent(GlDisclosureDropdown);
+ expect(component.exists()).toBe(true);
+ expect(component.props('items')).toMatchObject([
+ {
+ text: action.text,
+ href: action.href,
+ extraAttrs: {
+ target: action.target,
+ },
+ },
+ {
+ text: action.text,
+ href: action.href,
+ extraAttrs: {
+ target: action.target,
+ },
+ },
+ ]);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
index a0064224b46..35b4e222e01 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
@@ -6,7 +6,7 @@ import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import MRWidgetPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
-import LegacyPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
+import LegacyPipelineMiniGraph from '~/ci/pipeline_mini_graph/legacy_pipeline_mini_graph.vue';
import { SUCCESS } from '~/vue_merge_request_widget/constants';
import mockData from '../mock_data';
diff --git a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
index f9936f22ea3..ecf4040cbda 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
@@ -2,36 +2,30 @@
exports[`New ready to merge state component renders permission text if canMerge (false) is false 1`] = `
<div
- class="mr-widget-body media"
+ class="media mr-widget-body"
>
<status-icon-stub
status="success"
/>
-
<p
- class="media-body gl-m-0! gl-font-weight-bold gl-text-gray-900!"
+ class="gl-font-weight-bold gl-m-0! gl-text-gray-900! media-body"
>
-
- Ready to merge by members who can write to the target branch.
-
+ Ready to merge by members who can write to the target branch.
</p>
</div>
`;
exports[`New ready to merge state component renders permission text if canMerge (true) is false 1`] = `
<div
- class="mr-widget-body media"
+ class="media mr-widget-body"
>
<status-icon-stub
status="success"
/>
-
<p
- class="media-body gl-m-0! gl-font-weight-bold gl-text-gray-900!"
+ class="gl-font-weight-bold gl-m-0! gl-text-gray-900! media-body"
>
-
- Ready to merge!
-
+ Ready to merge!
</p>
</div>
`;
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
index e44e2834a0e..5efb1dcce42 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
@@ -1,4 +1,4 @@
-import { getByRole } from '@testing-library/dom';
+import { getAllByRole } from '@testing-library/dom';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
@@ -132,7 +132,7 @@ describe('MRWidgetMerged', () => {
createComponent();
const eventHubSpy = jest.spyOn(modalEventHub, '$emit');
- getByRole(wrapper.element, 'button', { name: /Revert/i }).click();
+ getAllByRole(wrapper.element, 'button', { name: /Revert/i })[0].click();
expect(eventHubSpy).toHaveBeenCalledWith(OPEN_REVERT_MODAL);
});
@@ -141,7 +141,7 @@ describe('MRWidgetMerged', () => {
createComponent();
const eventHubSpy = jest.spyOn(modalEventHub, '$emit');
- getByRole(wrapper.element, 'button', { name: /Cherry-pick/i }).click();
+ getAllByRole(wrapper.element, 'button', { name: /Cherry-pick/i })[0].click();
expect(eventHubSpy).toHaveBeenCalledWith(OPEN_CHERRY_PICK_MODAL);
});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
index ce4bf11f16b..d5d3f56e451 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
@@ -1,63 +1,133 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue renders given data 1`] = `
-"<div class=\\"gl-display-flex gl-border-t gl-py-3 gl-pl-7 gl-align-items-baseline\\">
- <!---->
- <div class=\\"gl-w-full gl-min-w-0\\">
- <div class=\\"gl-display-flex\\">
- <div class=\\"gl-mb-2\\"><strong class=\\"gl-display-block\\">This is a header</strong><span class=\\"gl-display-block\\">This is a subheader</span></div>
- <div class=\\"gl-ml-auto gl-display-flex gl-align-items-baseline\\">
- <help-popover-stub options=\\"[object Object]\\" icon=\\"information-o\\" triggerclass=\\"\\" class=\\"\\">
- <p class=\\"gl-mb-0\\">Widget help popover content</p>
- <!---->
+<div
+ class="gl-align-items-baseline gl-border-t gl-display-flex gl-pl-7 gl-py-3"
+>
+ <div
+ class="gl-min-w-0 gl-w-full"
+ >
+ <div
+ class="gl-display-flex"
+ >
+ <div
+ class="gl-mb-2"
+ >
+ <strong
+ class="gl-display-block"
+ >
+ This is a header
+ </strong>
+ <span
+ class="gl-display-block"
+ >
+ This is a subheader
+ </span>
+ </div>
+ <div
+ class="gl-align-items-baseline gl-display-flex gl-ml-auto"
+ >
+ <help-popover-stub
+ icon="information-o"
+ options="[object Object]"
+ triggerclass=""
+ >
+ <p
+ class="gl-mb-0"
+ >
+ Widget help popover content
+ </p>
</help-popover-stub>
- <!---->
</div>
</div>
- <div class=\\"gl-display-flex gl-align-items-baseline\\">
- <status-icon-stub level=\\"2\\" name=\\"MyWidget\\" iconname=\\"success\\"></status-icon-stub>
- <div class=\\"gl-w-full gl-display-flex\\">
- <div class=\\"gl-display-flex gl-flex-grow-1\\">
- <div class=\\"gl-display-flex gl-flex-grow-1 gl-align-items-baseline\\">
+ <div
+ class="gl-align-items-baseline gl-display-flex"
+ >
+ <status-icon-stub
+ iconname="success"
+ level="2"
+ name="MyWidget"
+ />
+ <div
+ class="gl-display-flex gl-w-full"
+ >
+ <div
+ class="gl-display-flex gl-flex-grow-1"
+ >
+ <div
+ class="gl-align-items-baseline gl-display-flex gl-flex-grow-1"
+ >
<div>
- <p class=\\"gl-mb-0 gl-mr-1\\">Main text for the row</p>
- <gl-link-stub href=\\"https://gitlab.com\\">Optional link to display after text</gl-link-stub>
- <!---->
+ <p
+ class="gl-mb-0 gl-mr-1"
+ >
+ Main text for the row
+ </p>
+ <gl-link-stub
+ href="https://gitlab.com"
+ >
+ Optional link to display after text
+ </gl-link-stub>
</div>
- <gl-badge-stub size=\\"md\\" variant=\\"info\\" iconsize=\\"md\\">
+ <gl-badge-stub
+ iconsize="md"
+ size="md"
+ variant="info"
+ >
Badge is optional. Text to be displayed inside badge
</gl-badge-stub>
</div>
- <!---->
- <p class=\\"gl-m-0 gl-font-sm\\">Optional: Smaller sub-text to be displayed below the main text</p>
+ <p
+ class="gl-font-sm gl-m-0"
+ >
+ Optional: Smaller sub-text to be displayed below the main text
+ </p>
</div>
- <ul class=\\"gl-m-0 gl-p-0 gl-list-style-none\\">
+ <ul
+ class="gl-list-style-none gl-m-0 gl-p-0"
+ >
<li>
- <div class=\\"gl-display-flex gl-align-items-center\\" data-qa-selector=\\"child_content\\">
- <!---->
- <div class=\\"gl-w-full gl-min-w-0\\">
- <div class=\\"gl-display-flex\\">
- <div class=\\"gl-mb-2\\"><strong class=\\"gl-display-block\\">Child row header</strong>
- <!---->
+ <div
+ class="gl-align-items-center gl-display-flex"
+ data-qa-selector="child_content"
+ >
+ <div
+ class="gl-min-w-0 gl-w-full"
+ >
+ <div
+ class="gl-display-flex"
+ >
+ <div
+ class="gl-mb-2"
+ >
+ <strong
+ class="gl-display-block"
+ >
+ Child row header
+ </strong>
</div>
- <!---->
</div>
- <div class=\\"gl-display-flex gl-align-items-baseline\\">
- <!---->
- <div class=\\"gl-w-full gl-display-flex\\">
- <div class=\\"gl-display-flex gl-flex-grow-1\\">
- <div class=\\"gl-display-flex gl-flex-grow-1 gl-align-items-baseline\\">
+ <div
+ class="gl-align-items-baseline gl-display-flex"
+ >
+ <div
+ class="gl-display-flex gl-w-full"
+ >
+ <div
+ class="gl-display-flex gl-flex-grow-1"
+ >
+ <div
+ class="gl-align-items-baseline gl-display-flex gl-flex-grow-1"
+ >
<div>
- <p class=\\"gl-mb-0 gl-mr-1\\">This is recursive. It will be listed in level 3.</p>
- <!---->
- <!---->
+ <p
+ class="gl-mb-0 gl-mr-1"
+ >
+ This is recursive. It will be listed in level 3.
+ </p>
</div>
- <!---->
</div>
- <!---->
- <!---->
</div>
- <!---->
</div>
</div>
</div>
@@ -67,5 +137,5 @@ exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue render
</div>
</div>
</div>
-</div>"
+</div>
`;
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
index bf318cd6b88..205824c3edd 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
@@ -1,13 +1,23 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import App from '~/vue_merge_request_widget/components/widget/app.vue';
+import MrSecurityWidgetCE from '~/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports.vue';
+import MrTestReportWidget from '~/vue_merge_request_widget/extensions/test_report/index.vue';
+import MrTerraformWidget from '~/vue_merge_request_widget/extensions/terraform/index.vue';
+import MrCodeQualityWidget from '~/vue_merge_request_widget/extensions/code_quality/index.vue';
describe('MR Widget App', () => {
let wrapper;
- const createComponent = () => {
+ const createComponent = ({ mr = {} } = {}) => {
wrapper = shallowMountExtended(App, {
propsData: {
- mr: {},
+ mr: {
+ pipeline: {
+ path: '/path/to/pipeline',
+ },
+ ...mr,
+ },
},
});
};
@@ -16,4 +26,35 @@ describe('MR Widget App', () => {
createComponent();
expect(wrapper.findByTestId('mr-widget-app').exists()).toBe(true);
});
+
+ describe('MRSecurityWidget', () => {
+ it('mounts MrSecurityWidgetCE', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.findComponent(MrSecurityWidgetCE).exists()).toBe(true);
+ });
+ });
+
+ describe.each`
+ widgetName | widget | endpoint
+ ${'testReportWidget'} | ${MrTestReportWidget} | ${'testResultsPath'}
+ ${'terraformPlansWidget'} | ${MrTerraformWidget} | ${'terraformReportsPath'}
+ ${'codeQualityWidget'} | ${MrCodeQualityWidget} | ${'codequalityReportsPath'}
+ `('$widgetName', ({ widget, endpoint }) => {
+ it(`is mounted when ${endpoint} is defined`, async () => {
+ createComponent({ mr: { [endpoint]: `path/to/${endpoint}` } });
+ await waitForPromises();
+
+ expect(wrapper.findComponent(widget).exists()).toBe(true);
+ });
+
+ it(`is not mounted when ${endpoint} is not defined`, async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(wrapper.findComponent(widget).exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
index b901b80e8bf..6f5e08a0829 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
@@ -2,7 +2,6 @@ import { mount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
-import { visitUrl } from '~/lib/utils/url_utility';
import {
CREATED,
MANUAL_DEPLOY,
@@ -20,6 +19,7 @@ import {
deploymentMockData,
playDetails,
retryDetails,
+ mockRedeployProps,
} from './deployment_mock_data';
jest.mock('~/alert');
@@ -36,7 +36,6 @@ describe('DeploymentAction component', () => {
const findStopButton = () => wrapper.find('.js-stop-env');
const findDeployButton = () => wrapper.find('.js-manual-deploy-action');
- const findManualRedeployButton = () => wrapper.find('.js-manual-redeploy-action');
const findRedeployButton = () => wrapper.find('.js-redeploy-action');
beforeEach(() => {
@@ -78,23 +77,25 @@ describe('DeploymentAction component', () => {
expect(findDeployButton().exists()).toBe(false);
});
});
-
- describe('when there is no retry_path in details', () => {
- it('the manual redeploy button does not appear', () => {
- expect(findManualRedeployButton().exists()).toBe(false);
- });
- });
});
describe('when conditions are met', () => {
describe.each`
- configConst | computedDeploymentStatus | displayConditionChanges | finderFn | endpoint
- ${STOPPING} | ${CREATED} | ${{}} | ${findStopButton} | ${deploymentMockData.stop_url}
- ${DEPLOYING} | ${MANUAL_DEPLOY} | ${playDetails} | ${findDeployButton} | ${playDetails.playable_build.play_path}
- ${REDEPLOYING} | ${FAILED} | ${retryDetails} | ${findManualRedeployButton} | ${retryDetails.playable_build.retry_path}
+ configConst | computedDeploymentStatus | displayConditionChanges | finderFn | endpoint | props
+ ${STOPPING} | ${CREATED} | ${{}} | ${findStopButton} | ${deploymentMockData.stop_url} | ${{}}
+ ${DEPLOYING} | ${MANUAL_DEPLOY} | ${playDetails} | ${findDeployButton} | ${playDetails.playable_build.play_path} | ${{}}
+ ${REDEPLOYING} | ${FAILED} | ${{}} | ${findRedeployButton} | ${retryDetails.playable_build.retry_path} | ${mockRedeployProps}
+ ${REDEPLOYING} | ${SUCCESS} | ${{}} | ${findRedeployButton} | ${retryDetails.playable_build.retry_path} | ${mockRedeployProps}
`(
'$configConst action',
- ({ configConst, computedDeploymentStatus, displayConditionChanges, finderFn, endpoint }) => {
+ ({
+ configConst,
+ computedDeploymentStatus,
+ displayConditionChanges,
+ finderFn,
+ endpoint,
+ props,
+ }) => {
describe(`${configConst} action`, () => {
beforeEach(() => {
factory({
@@ -103,6 +104,7 @@ describe('DeploymentAction component', () => {
deployment: {
...deploymentMockData,
details: displayConditionChanges,
+ ...props,
},
},
});
@@ -163,25 +165,6 @@ describe('DeploymentAction component', () => {
expect(createAlert).not.toHaveBeenCalled();
});
- describe('response includes redirect_url', () => {
- const url = '/root/example';
- beforeEach(async () => {
- executeActionSpy.mockResolvedValueOnce({
- data: { redirect_url: url },
- });
-
- await waitForPromises();
-
- confirmAction.mockResolvedValueOnce(true);
- finderFn().trigger('click');
- });
-
- it('calls visit url with the redirect_url', () => {
- expect(visitUrl).toHaveBeenCalled();
- expect(visitUrl).toHaveBeenCalledWith(url);
- });
- });
-
describe('it should call the executeAction method', () => {
beforeEach(async () => {
jest.spyOn(wrapper.vm, 'executeAction').mockImplementation();
@@ -234,7 +217,7 @@ describe('DeploymentAction component', () => {
);
});
- describe('with the reviewAppsRedeployMrWidget feature flag turned on', () => {
+ describe('redeploy action', () => {
beforeEach(() => {
factory({
propsData: {
@@ -246,11 +229,6 @@ describe('DeploymentAction component', () => {
environment_available: false,
},
},
- provide: {
- glFeatures: {
- reviewAppsRedeployMrWidget: true,
- },
- },
});
});
@@ -304,24 +282,6 @@ describe('DeploymentAction component', () => {
expect(createAlert).not.toHaveBeenCalled();
});
- describe('response includes redirect_url', () => {
- const url = '/root/example';
- beforeEach(async () => {
- executeActionSpy.mockResolvedValueOnce({
- data: { redirect_url: url },
- });
-
- await waitForPromises();
-
- confirmAction.mockResolvedValueOnce(true);
- findRedeployButton().trigger('click');
- });
-
- it('does not call visit url', () => {
- expect(visitUrl).not.toHaveBeenCalled();
- });
- });
-
describe('it should call the executeAction method', () => {
beforeEach(async () => {
jest.spyOn(wrapper.vm, 'executeAction').mockImplementation();
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
index 374fe4e1b95..2c6a40c6e16 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
@@ -74,4 +74,9 @@ const retryDetails = {
},
};
-export { actionButtonMocks, deploymentMockData, playDetails, retryDetails };
+const mockRedeployProps = {
+ retry_url: retryDetails.playable_build.retry_path,
+ environment_available: false,
+};
+
+export { actionButtonMocks, deploymentMockData, playDetails, retryDetails, mockRedeployProps };
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
index 234491c531a..0a96feb184f 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
@@ -10,7 +10,12 @@ import {
import DeploymentComponent from '~/vue_merge_request_widget/components/deployment/deployment.vue';
import DeploymentInfo from '~/vue_merge_request_widget/components/deployment/deployment_info.vue';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
-import { deploymentMockData, playDetails, retryDetails } from './deployment_mock_data';
+import {
+ deploymentMockData,
+ playDetails,
+ retryDetails,
+ mockRedeployProps,
+} from './deployment_mock_data';
describe('Deployment component', () => {
let wrapper;
@@ -46,7 +51,6 @@ describe('Deployment component', () => {
};
const defaultGroup = ['.js-deploy-url', '.js-stop-env'];
const manualDeployGroup = ['.js-manual-deploy-action', ...defaultGroup];
- const manualRedeployGroup = ['.js-manual-redeploy-action', ...defaultGroup];
describe.each`
status | previous | deploymentDetails | text | actionButtons
@@ -62,7 +66,7 @@ describe('Deployment component', () => {
${SUCCESS} | ${true} | ${noDetails} | ${'Deployed to'} | ${defaultGroup}
${SUCCESS} | ${false} | ${deployDetail} | ${'Deployed to'} | ${defaultGroup}
${SUCCESS} | ${false} | ${noDetails} | ${'Deployed to'} | ${defaultGroup}
- ${FAILED} | ${true} | ${retryDetail} | ${'Failed to deploy to'} | ${manualRedeployGroup}
+ ${FAILED} | ${true} | ${retryDetail} | ${'Failed to deploy to'} | ${defaultGroup}
${FAILED} | ${true} | ${noDetails} | ${'Failed to deploy to'} | ${defaultGroup}
${FAILED} | ${false} | ${retryDetail} | ${'Failed to deploy to'} | ${noActions}
${FAILED} | ${false} | ${noDetails} | ${'Failed to deploy to'} | ${noActions}
@@ -139,6 +143,27 @@ describe('Deployment component', () => {
}
},
);
+
+ describe('redeploy action', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ showMetrics: false,
+ deployment: {
+ ...deploymentMockData,
+ ...mockRedeployProps,
+ },
+ },
+ });
+ });
+
+ it('shows only the redeploy button', () => {
+ expect(wrapper.find('.js-redeploy-action').exists()).toBe(true);
+ expect(wrapper.find('.js-deploy-url').exists()).toBe(false);
+ expect(wrapper.find('.js-stop-env').exists()).toBe(false);
+ expect(wrapper.find('.js-manual-deploy-action').exists()).toBe(false);
+ });
+ });
});
describe('hasExternalUrls', () => {
diff --git a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
index d2d622d0534..88c348629cb 100644
--- a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
@@ -1,19 +1,17 @@
import { nextTick } from 'vue';
import MockAdapter from 'axios-mock-adapter';
-import testReportExtension from '~/vue_merge_request_widget/extensions/test_report';
+import testReportExtension from '~/vue_merge_request_widget/extensions/test_report/index.vue';
import { i18n } from '~/vue_merge_request_widget/extensions/test_report/constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
-import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
import {
HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_NO_CONTENT,
HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
-import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
+import TestCaseDetails from '~/ci/pipeline_details/test_reports/test_case_details.vue';
import { failedReport } from 'jest/ci/reports/mock_data/mock_data';
import mixedResultsTestReports from 'jest/ci/reports/mock_data/new_and_fixed_failures_report.json';
@@ -34,12 +32,10 @@ describe('Test report extension', () => {
let wrapper;
let mock;
- registerExtension(testReportExtension);
-
const endpoint = '/root/repo/-/merge_requests/4/test_reports.json';
const mockApi = (statusCode, data = mixedResultsTestReports) => {
- mock.onGet(endpoint).reply(statusCode, data);
+ mock.onGet(endpoint).reply(statusCode, data, {});
};
const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
@@ -49,7 +45,7 @@ describe('Test report extension', () => {
const findModal = () => wrapper.findComponent(TestCaseDetails);
const createComponent = () => {
- wrapper = mountExtended(extensionsContainer, {
+ wrapper = mountExtended(testReportExtension, {
propsData: {
mr: {
testResultsPath: endpoint,
@@ -84,7 +80,7 @@ describe('Test report extension', () => {
expect(wrapper.text()).toContain(i18n.loading);
});
- it('with a 204 response, continues to display loading state', async () => {
+ it('with a "no content" response, continues to display loading state', async () => {
mockApi(HTTP_STATUS_NO_CONTENT, '');
createComponent();
@@ -269,7 +265,7 @@ describe('Test report extension', () => {
beforeEach(async () => {
await createExpandedWidgetWithData();
- wrapper.findByTestId('modal-link').trigger('click');
+ wrapper.findByTestId('extension-actions-button').trigger('click');
});
it('opens a modal to display test case details', () => {
diff --git a/spec/frontend/vue_merge_request_widget/mock_data.js b/spec/frontend/vue_merge_request_widget/mock_data.js
index 5b3f533f34e..34f147307fc 100644
--- a/spec/frontend/vue_merge_request_widget/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/mock_data.js
@@ -352,7 +352,7 @@ export default {
merge_request_widget_path: '/root/acets-app/-/merge_requests/22/widget.json',
merge_request_cached_widget_path: '/cached.json',
merge_check_path: '/root/acets-app/-/merge_requests/22/merge_check',
- ci_environments_status_url: '/root/acets-app/-/merge_requests/22/ci_environments_status',
+ ci_environments_status_path: '/root/acets-app/-/merge_requests/22/ci_environments_status',
project_archived: false,
default_merge_commit_message_with_description:
"Merge branch 'daaaa' into 'main'\n\nUpdate README.md\n\nSee merge request !22",
@@ -452,3 +452,169 @@ export const mockStore = {
hasCI: true,
exposedArtifactsPath: 'exposed_artifacts.json',
};
+
+export const mockMergePipeline = {
+ id: 127,
+ user: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ web_url: 'http://localhost:3000/root',
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ active: true,
+ coverage: null,
+ source: 'push',
+ created_at: '2018-10-22T11:41:35.186Z',
+ updated_at: '2018-10-22T11:41:35.433Z',
+ path: '/root/ci-web-terminal/pipelines/127',
+ flags: {
+ latest: true,
+ stuck: true,
+ auto_devops: false,
+ yaml_errors: false,
+ retryable: false,
+ cancelable: true,
+ failure_reason: false,
+ },
+ details: {
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/root/ci-web-terminal/pipelines/127',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ },
+ duration: null,
+ finished_at: null,
+ stages: [
+ {
+ name: 'test',
+ title: 'test: pending',
+ status: {
+ icon: 'status_pending',
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ tooltip: 'pending',
+ has_details: true,
+ details_path: '/root/ci-web-terminal/pipelines/127#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
+ },
+ path: '/root/ci-web-terminal/pipelines/127#test',
+ dropdown_path: '/root/ci-web-terminal/pipelines/127/stage.json?stage=test',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'main',
+ path: '/root/ci-web-terminal/commits/main',
+ tag: false,
+ branch: true,
+ },
+ commit: {
+ id: 'aa1939133d373c94879becb79d91828a892ee319',
+ short_id: 'aa193913',
+ title: "Merge branch 'main-test' into 'main'",
+ created_at: '2018-10-22T11:41:33.000Z',
+ parent_ids: [
+ '4622f4dd792468993003caf2e3be978798cbe096',
+ '76598df914cdfe87132d0c3c40f80db9fa9396a4',
+ ],
+ message:
+ "Merge branch 'main-test' into 'main'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
+ author_name: 'Administrator',
+ author_email: 'admin@example.com',
+ authored_date: '2018-10-22T11:41:33.000Z',
+ committer_name: 'Administrator',
+ committer_email: 'admin@example.com',
+ committed_date: '2018-10-22T11:41:33.000Z',
+ author: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: null,
+ web_url: 'http://localhost:3000/root',
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ author_gravatar_url: null,
+ commit_url:
+ 'http://localhost:3000/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
+ commit_path: '/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
+ },
+ cancel_path: '/root/ci-web-terminal/pipelines/127/cancel',
+};
+
+export const mockPostMergeDeployments = [
+ {
+ id: 15,
+ name: 'review/diplo',
+ url: '/root/acets-review-apps/environments/15',
+ stop_url: '/root/acets-review-apps/environments/15/stop',
+ metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
+ metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
+ external_url: 'http://diplo.',
+ external_url_formatted: 'diplo.',
+ deployed_at: '2017-03-22T22:44:42.258Z',
+ deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ changes: [
+ {
+ path: 'index.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
+ },
+ {
+ path: 'imgs/gallery.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ },
+ {
+ path: 'about/',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
+ },
+ ],
+ status: 'success',
+ },
+];
+
+export const mockDeployment = {
+ id: 15,
+ name: 'review/diplo',
+ url: '/root/acets-review-apps/environments/15',
+ stop_url: '/root/acets-review-apps/environments/15/stop',
+ metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
+ metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
+ external_url: 'http://diplo.',
+ external_url_formatted: 'diplo.',
+ deployed_at: '2017-03-22T22:44:42.258Z',
+ deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ changes: [
+ {
+ path: 'index.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
+ },
+ {
+ path: 'imgs/gallery.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ },
+ {
+ path: 'about/',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
+ },
+ ],
+ status: SUCCESS,
+ environment_available: true,
+};
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index ecb5a8448f9..09f58f17fd9 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -21,12 +21,15 @@ import {
registerExtension,
registeredExtensions,
} from '~/vue_merge_request_widget/components/extensions';
+import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
import { STATE_QUERY_POLLING_INTERVAL_BACKOFF } from '~/vue_merge_request_widget/constants';
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import Approvals from '~/vue_merge_request_widget/components/approvals/approvals.vue';
+import ConflictsState from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
import Preparing from '~/vue_merge_request_widget/components/states/mr_widget_preparing.vue';
+import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue';
import WidgetContainer from '~/vue_merge_request_widget/components/widget/app.vue';
import WidgetSuggestPipeline from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
import MrWidgetAlertMessage from '~/vue_merge_request_widget/components/mr_widget_alert_message.vue';
@@ -40,7 +43,7 @@ import approvedBySubscription from 'ee_else_ce/vue_merge_request_widget/componen
import userPermissionsQuery from '~/vue_merge_request_widget/queries/permissions.query.graphql';
import conflictsStateQuery from '~/vue_merge_request_widget/queries/states/conflicts.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
-import mockData from './mock_data';
+import mockData, { mockDeployment, mockMergePipeline, mockPostMergeDeployments } from './mock_data';
import {
workingExtension,
collapsedDataErrorExtension,
@@ -60,9 +63,7 @@ jest.mock('~/smart_interval');
jest.mock('~/lib/utils/favicon');
jest.mock('@sentry/browser', () => ({
- setExtra: jest.fn(),
- setExtras: jest.fn(),
- captureMessage: jest.fn(),
+ ...jest.requireActual('@sentry/browser'),
captureException: jest.fn(),
}));
@@ -76,36 +77,25 @@ describe('MrWidgetOptions', () => {
let stateSubscription;
const COLLABORATION_MESSAGE = 'Members who can merge are allowed to add commits';
- const findApprovalsWidget = () => wrapper.findComponent(Approvals);
- const findPreparingWidget = () => wrapper.findComponent(Preparing);
- const findMergedPipelineContainer = () => wrapper.findByTestId('merged-pipeline-container');
- const findPipelineContainer = () => wrapper.findByTestId('pipeline-container');
- const findAlertMessage = () => wrapper.findComponent(MrWidgetAlertMessage);
-
- beforeEach(() => {
- gl.mrWidgetData = { ...mockData };
- gon.features = { asyncMrWidget: true };
- mock = new MockAdapter(axios);
- mock.onGet(mockData.merge_request_widget_path).reply(() => [HTTP_STATUS_OK, { ...mockData }]);
+ const setInitialData = (data) => {
+ gl.mrWidgetData = { ...mockData, ...data };
+ mock
+ .onGet(mockData.merge_request_widget_path)
+ .reply(() => [HTTP_STATUS_OK, { ...mockData, ...data }]);
mock
.onGet(mockData.merge_request_cached_widget_path)
- .reply(() => [HTTP_STATUS_OK, { ...mockData }]);
- });
-
- afterEach(() => {
- mock.restore();
- // eslint-disable-next-line @gitlab/vtu-no-explicit-wrapper-destroy
- wrapper.destroy();
- gl.mrWidgetData = {};
- });
+ .reply(() => [HTTP_STATUS_OK, { ...mockData, ...data }]);
+ };
const createComponent = ({
- mrData = mockData,
+ updatedMrData = {},
options = {},
data = {},
mountFn = shallowMountExtended,
} = {}) => {
+ setInitialData(updatedMrData);
+ const mrData = { ...mockData, ...updatedMrData };
const mockedApprovalsSubscription = createMockApolloSubscription();
queryResponse = {
data: {
@@ -153,9 +143,7 @@ describe('MrWidgetOptions', () => {
});
wrapper = mountFn(MrWidgetOptions, {
- propsData: {
- mrData: { ...mrData },
- },
+ propsData: { mrData },
data() {
return {
loading: false,
@@ -170,6 +158,12 @@ describe('MrWidgetOptions', () => {
return axios.waitForAll();
};
+ const findApprovalsWidget = () => wrapper.findComponent(Approvals);
+ const findPreparingWidget = () => wrapper.findComponent(Preparing);
+ const findMergedPipelineContainer = () => wrapper.findByTestId('merged-pipeline-container');
+ const findPipelineContainer = () => wrapper.findByTestId('pipeline-container');
+ const findAlertMessage = () => wrapper.findComponent(MrWidgetAlertMessage);
+ const findMergePipelineForkAlert = () => wrapper.findByTestId('merge-pipeline-fork-warning');
const findExtensionToggleButton = () =>
wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]');
const findExtensionLink = (linkHref) =>
@@ -177,23 +171,25 @@ describe('MrWidgetOptions', () => {
const findSuggestPipeline = () => wrapper.findComponent(WidgetSuggestPipeline);
const findWidgetContainer = () => wrapper.findComponent(WidgetContainer);
- describe('default', () => {
- beforeEach(() => {
- jest.spyOn(document, 'dispatchEvent');
- return createComponent();
- });
+ beforeEach(() => {
+ gon.features = { asyncMrWidget: true };
+ mock = new MockAdapter(axios);
+ });
- // quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/385238
- // eslint-disable-next-line jest/no-disabled-tests
- describe.skip('data', () => {
- it('should instantiate Store and Service', () => {
- expect(wrapper.vm.mr).toBeDefined();
- expect(wrapper.vm.service).toBeDefined();
- });
- });
+ afterEach(() => {
+ mock.restore();
+ // eslint-disable-next-line @gitlab/vtu-no-explicit-wrapper-destroy
+ wrapper.destroy();
+ gl.mrWidgetData = {};
+ });
+ describe('default', () => {
describe('computed', () => {
describe('componentName', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
// quarantine: https://gitlab.com/gitlab-org/gitlab/-/issues/409365
// eslint-disable-next-line jest/no-disabled-tests
it.skip.each`
@@ -205,137 +201,134 @@ describe('MrWidgetOptions', () => {
});
it.each`
- state | componentName
- ${'conflicts'} | ${'mr-widget-conflicts'}
- ${'shaMismatch'} | ${'sha-mismatch'}
- `('should translate $state into $componentName', ({ state, componentName }) => {
- wrapper.vm.mr.state = state;
-
- expect(wrapper.vm.componentName).toEqual(componentName);
+ state | componentName | component
+ ${'conflicts'} | ${'ConflictsState'} | ${ConflictsState}
+ ${'shaMismatch'} | ${'ShaMismatch'} | ${ShaMismatch}
+ `('should translate $state into $componentName component', async ({ state, component }) => {
+ Vue.set(wrapper.vm.mr, 'state', state);
+ await nextTick();
+ expect(wrapper.findComponent(component).exists()).toBe(true);
});
});
describe('MrWidgetPipelineContainer', () => {
- it('should return true when hasCI is true', async () => {
- wrapper.vm.mr.hasCI = true;
- await nextTick();
+ it('renders the pipeline container when it has CI', () => {
+ createComponent({ updatedMrData: { has_ci: true } });
expect(findPipelineContainer().exists()).toBe(true);
});
- it('should return false when hasCI is false', async () => {
- wrapper.vm.mr.hasCI = false;
- await nextTick();
-
+ it('does not render the pipeline container when it does not have CI', () => {
+ createComponent({ updatedMrData: { has_ci: false } });
expect(findPipelineContainer().exists()).toBe(false);
});
});
describe('shouldRenderCollaborationStatus', () => {
- describe('when collaboration is allowed', () => {
- beforeEach(() => {
- wrapper.vm.mr.allowCollaboration = true;
- });
-
- describe('when merge request is opened', () => {
- beforeEach(() => {
- wrapper.vm.mr.isOpen = true;
- return nextTick();
- });
-
- it('should render collaboration status', () => {
- expect(wrapper.text()).toContain(COLLABORATION_MESSAGE);
- });
+ it('renders collaboration message when collaboration is allowed and the MR is open', () => {
+ createComponent({
+ updatedMrData: { allow_collaboration: true, state: STATUS_OPEN, not: false },
});
-
- describe('when merge request is not opened', () => {
- beforeEach(() => {
- wrapper.vm.mr.isOpen = false;
- return nextTick();
- });
-
- it('should not render collaboration status', () => {
- expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
- });
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ allowCollaboration: true,
+ isOpen: true,
});
+ expect(wrapper.text()).toContain(COLLABORATION_MESSAGE);
});
- describe('when collaboration is not allowed', () => {
- beforeEach(() => {
- wrapper.vm.mr.allowCollaboration = false;
+ it('does not render collaboration message when collaboration is allowed and the MR is closed', () => {
+ createComponent({
+ updatedMrData: { allow_collaboration: true, state: STATUS_CLOSED, not: true },
});
-
- describe('when merge request is opened', () => {
- beforeEach(() => {
- wrapper.vm.mr.isOpen = true;
- return nextTick();
- });
-
- it('should not render collaboration status', () => {
- expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
- });
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ allowCollaboration: true,
+ isOpen: false,
});
+ expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
});
- });
- describe('showMergePipelineForkWarning', () => {
- describe('when the source project and target project are the same', () => {
- beforeEach(() => {
- Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
- Vue.set(wrapper.vm.mr, 'targetProjectId', 1);
- return nextTick();
+ it('does not render collaboration message when collaboration is not allowed and the MR is closed', () => {
+ createComponent({
+ updatedMrData: { allow_collaboration: undefined, state: STATUS_CLOSED, not: true },
});
-
- it('should be false', () => {
- expect(findAlertMessage().exists()).toBe(false);
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ allowCollaboration: undefined,
+ isOpen: false,
});
+ expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
});
- describe('when merge pipelines are not enabled', () => {
- beforeEach(() => {
- Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', false);
- Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
- Vue.set(wrapper.vm.mr, 'targetProjectId', 2);
- return nextTick();
+ it('does not render collaboration message when collaboration is not allowed and the MR is open', () => {
+ createComponent({
+ updatedMrData: { allow_collaboration: undefined, state: STATUS_OPEN, not: true },
+ });
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ allowCollaboration: undefined,
+ isOpen: true,
});
+ expect(wrapper.text()).not.toContain(COLLABORATION_MESSAGE);
+ });
+ });
- it('should be false', () => {
- expect(findAlertMessage().exists()).toBe(false);
+ describe('showMergePipelineForkWarning', () => {
+ it('hides the alert when the source project and target project are the same', async () => {
+ createComponent({
+ updatedMrData: {
+ source_project_id: 1,
+ target_project_id: 1,
+ },
});
+ await nextTick();
+ Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
+ await nextTick();
+ expect(findMergePipelineForkAlert().exists()).toBe(false);
});
- describe('when merge pipelines are enabled _and_ the source project and target project are different', () => {
- beforeEach(() => {
- Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
- Vue.set(wrapper.vm.mr, 'sourceProjectId', 1);
- Vue.set(wrapper.vm.mr, 'targetProjectId', 2);
- return nextTick();
+ it('hides the alert when merge pipelines are not enabled', async () => {
+ createComponent({
+ updatedMrData: {
+ source_project_id: 1,
+ target_project_id: 2,
+ },
});
+ await nextTick();
+ expect(findMergePipelineForkAlert().exists()).toBe(false);
+ });
- it('should be true', () => {
- expect(findAlertMessage().exists()).toBe(true);
+ it('shows the alert when merge pipelines are enabled and the source project and target project are different', async () => {
+ createComponent({
+ updatedMrData: {
+ source_project_id: 1,
+ target_project_id: 2,
+ },
});
+ await nextTick();
+ Vue.set(wrapper.vm.mr, 'mergePipelinesEnabled', true);
+ await nextTick();
+ expect(findMergePipelineForkAlert().exists()).toBe(true);
});
});
describe('formattedHumanAccess', () => {
- it('when user is a tool admin but not a member of project', async () => {
- wrapper.vm.mr.humanAccess = null;
- wrapper.vm.mr.mergeRequestAddCiConfigPath = 'test';
- wrapper.vm.mr.hasCI = false;
- wrapper.vm.mr.isDismissedSuggestPipeline = false;
- await nextTick();
-
+ it('renders empty string when user is a tool admin but not a member of project', () => {
+ createComponent({
+ updatedMrData: {
+ human_access: null,
+ merge_request_add_ci_config_path: 'test',
+ has_ci: false,
+ is_dismissed_suggest_pipeline: false,
+ },
+ });
expect(findSuggestPipeline().props('humanAccess')).toBe('');
});
-
- it('when user a member of the project', async () => {
- wrapper.vm.mr.humanAccess = 'Owner';
- wrapper.vm.mr.mergeRequestAddCiConfigPath = 'test';
- wrapper.vm.mr.hasCI = false;
- wrapper.vm.mr.isDismissedSuggestPipeline = false;
- await nextTick();
-
+ it('renders human access when user is a member of the project', () => {
+ createComponent({
+ updatedMrData: {
+ human_access: 'Owner',
+ merge_request_add_ci_config_path: 'test',
+ has_ci: false,
+ is_dismissed_suggest_pipeline: false,
+ },
+ });
expect(findSuggestPipeline().props('humanAccess')).toBe('owner');
});
});
@@ -343,33 +336,50 @@ describe('MrWidgetOptions', () => {
describe('methods', () => {
describe('checkStatus', () => {
- let cb;
- let isCbExecuted;
-
- beforeEach(() => {
- jest.spyOn(wrapper.vm.service, 'checkStatus').mockResolvedValue({ data: mockData });
- jest.spyOn(wrapper.vm.mr, 'setData').mockImplementation(() => {});
- jest.spyOn(wrapper.vm, 'handleNotification').mockImplementation(() => {});
-
- isCbExecuted = false;
- cb = () => {
- isCbExecuted = true;
- };
+ it('checks the status of the pipelines', async () => {
+ const callback = jest.fn();
+ await createComponent({ updatedMrData: { foo: 1 } });
+ await waitForPromises();
+ eventHub.$emit('MRWidgetUpdateRequested', callback);
+ await waitForPromises();
+ expect(callback).toHaveBeenCalledWith(expect.objectContaining({ foo: 1 }));
});
- it('should tell service to check status if document is visible', () => {
- wrapper.vm.checkStatus(cb);
+ it('notifies the user of the pipeline status', async () => {
+ jest.spyOn(notify, 'notifyMe').mockImplementation(() => {});
+ const logoFilename = 'logo.png';
+ await createComponent({
+ updatedMrData: { gitlabLogo: logoFilename },
+ });
+ eventHub.$emit('MRWidgetUpdateRequested');
+ await waitForPromises();
+ expect(notify.notifyMe).toHaveBeenCalledWith(
+ `Pipeline passed`,
+ `Pipeline passed for "${mockData.title}"`,
+ logoFilename,
+ );
+ });
- return nextTick().then(() => {
- expect(wrapper.vm.service.checkStatus).toHaveBeenCalled();
- expect(wrapper.vm.mr.setData).toHaveBeenCalled();
- expect(wrapper.vm.handleNotification).toHaveBeenCalledWith(mockData);
- expect(isCbExecuted).toBe(true);
+ it('updates the stores data', async () => {
+ const mockSetData = jest.fn();
+ await createComponent({
+ data: {
+ mr: {
+ setData: mockSetData,
+ setGraphqlData: jest.fn(),
+ },
+ },
});
+ eventHub.$emit('MRWidgetUpdateRequested');
+ expect(mockSetData).toHaveBeenCalled();
});
});
describe('initDeploymentsPolling', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
it('should call SmartInterval', () => {
wrapper.vm.initDeploymentsPolling();
@@ -382,83 +392,98 @@ describe('MrWidgetOptions', () => {
});
describe('fetchDeployments', () => {
- it('should fetch deployments', () => {
- jest
- .spyOn(wrapper.vm.service, 'fetchDeployments')
- .mockResolvedValue({ data: [{ id: 1, status: SUCCESS }] });
-
- wrapper.vm.fetchPreMergeDeployments();
+ beforeEach(async () => {
+ mock
+ .onGet(mockData.ci_environments_status_path)
+ .reply(() => [HTTP_STATUS_OK, [{ id: 1, status: SUCCESS }]]);
+ await createComponent();
+ });
- return nextTick().then(() => {
- expect(wrapper.vm.service.fetchDeployments).toHaveBeenCalled();
- expect(wrapper.vm.mr.deployments.length).toEqual(1);
- expect(wrapper.vm.mr.deployments[0].id).toBe(1);
- });
+ it('should fetch deployments', async () => {
+ eventHub.$emit('FetchDeployments', {});
+ await waitForPromises();
+ expect(wrapper.vm.mr.deployments.length).toEqual(1);
+ expect(wrapper.vm.mr.deployments[0].id).toBe(1);
});
});
describe('fetchActionsContent', () => {
- it('should fetch content of Cherry Pick and Revert modals', () => {
- jest
- .spyOn(wrapper.vm.service, 'fetchMergeActionsContent')
- .mockResolvedValue({ data: 'hello world' });
-
- wrapper.vm.fetchActionsContent();
-
- return nextTick().then(() => {
- expect(wrapper.vm.service.fetchMergeActionsContent).toHaveBeenCalled();
- expect(document.body.textContent).toContain('hello world');
- expect(document.dispatchEvent).toHaveBeenCalledWith(
- new CustomEvent('merged:UpdateActions'),
- );
- });
+ const innerHTML = 'hello world';
+ beforeEach(async () => {
+ jest.spyOn(document, 'dispatchEvent');
+ mock.onGet(mockData.commit_change_content_path).reply(() => [HTTP_STATUS_OK, innerHTML]);
+ await createComponent();
+ });
+
+ it('should fetch content of Cherry Pick and Revert modals', async () => {
+ eventHub.$emit('FetchActionsContent');
+ await waitForPromises();
+ expect(document.body.textContent).toContain(innerHTML);
+ expect(document.dispatchEvent).toHaveBeenCalledWith(
+ new CustomEvent('merged:UpdateActions'),
+ );
});
});
describe('bindEventHubListeners', () => {
- it.each`
- event | method | methodArgs
- ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${(x) => [x]}
- ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${(x) => [x, true]}
- ${'FetchActionsContent'} | ${'fetchActionsContent'} | ${() => []}
- ${'EnablePolling'} | ${'resumePolling'} | ${() => []}
- ${'DisablePolling'} | ${'stopPolling'} | ${() => []}
- ${'FetchDeployments'} | ${'fetchPreMergeDeployments'} | ${() => []}
- `('should bind to $event', ({ event, method, methodArgs }) => {
- jest.spyOn(wrapper.vm, method).mockImplementation();
-
- const eventArg = {};
- eventHub.$emit(event, eventArg);
-
- expect(wrapper.vm[method]).toHaveBeenCalledWith(...methodArgs(eventArg));
+ const mockSetData = jest.fn();
+ beforeEach(async () => {
+ await createComponent({
+ data: {
+ mr: {
+ setData: mockSetData,
+ setGraphqlData: jest.fn(),
+ },
+ },
+ });
});
- it('should bind to SetBranchRemoveFlag', () => {
- expect(wrapper.vm.mr.isRemovingSourceBranch).toBe(false);
-
- eventHub.$emit('SetBranchRemoveFlag', [true]);
+ it('refetches when "MRWidgetUpdateRequested" event is emitted', async () => {
+ expect(stateQueryHandler).toHaveBeenCalledTimes(1);
+ eventHub.$emit('MRWidgetUpdateRequested', () => {});
+ await waitForPromises();
+ expect(stateQueryHandler).toHaveBeenCalledTimes(2);
+ });
- expect(wrapper.vm.mr.isRemovingSourceBranch).toBe(true);
+ it('refetches when "MRWidgetRebaseSuccess" event is emitted', async () => {
+ expect(stateQueryHandler).toHaveBeenCalledTimes(1);
+ eventHub.$emit('MRWidgetRebaseSuccess', () => {});
+ await waitForPromises();
+ expect(stateQueryHandler).toHaveBeenCalledTimes(2);
});
- it('should bind to FailedToMerge', () => {
- wrapper.vm.mr.state = '';
- wrapper.vm.mr.mergeError = '';
+ it('should bind to SetBranchRemoveFlag', () => {
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ isRemovingSourceBranch: false,
+ });
+ eventHub.$emit('SetBranchRemoveFlag', [true]);
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ isRemovingSourceBranch: true,
+ });
+ });
+ it('should bind to FailedToMerge', async () => {
+ expect(findAlertMessage().exists()).toBe(false);
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ mergeError: undefined,
+ state: 'merged',
+ });
const mergeError = 'Something bad happened!';
- eventHub.$emit('FailedToMerge', mergeError);
+ await eventHub.$emit('FailedToMerge', mergeError);
- expect(wrapper.vm.mr.state).toBe('failedToMerge');
- expect(wrapper.vm.mr.mergeError).toBe(mergeError);
+ expect(findAlertMessage().exists()).toBe(true);
+ expect(findAlertMessage().text()).toBe(`${mergeError}. Try again.`);
+ expect(findPipelineContainer().props('mr')).toMatchObject({
+ mergeError,
+ state: 'failedToMerge',
+ });
});
it('should bind to UpdateWidgetData', () => {
- jest.spyOn(wrapper.vm.mr, 'setData').mockImplementation();
-
const data = { ...mockData };
eventHub.$emit('UpdateWidgetData', data);
- expect(wrapper.vm.mr.setData).toHaveBeenCalledWith(data);
+ expect(mockSetData).toHaveBeenCalledWith(data);
});
});
@@ -479,58 +504,39 @@ describe('MrWidgetOptions', () => {
});
it('should call setFavicon method', async () => {
- wrapper.vm.mr.faviconOverlayPath = overlayDataUrl;
-
- await wrapper.vm.setFaviconHelper();
-
+ await createComponent({ updatedMrData: { favicon_overlay_path: overlayDataUrl } });
expect(setFaviconOverlay).toHaveBeenCalledWith(overlayDataUrl);
});
it('should not call setFavicon when there is no faviconOverlayPath', async () => {
- wrapper.vm.mr.faviconOverlayPath = null;
- await wrapper.vm.setFaviconHelper();
+ await createComponent({ updatedMrData: { favicon_overlay_path: null } });
expect(faviconElement.getAttribute('href')).toEqual(null);
});
});
describe('handleNotification', () => {
- const data = {
- ci_status: 'running',
- title: 'title',
- pipeline: { details: { status: { label: 'running-label' } } },
- };
-
beforeEach(() => {
jest.spyOn(notify, 'notifyMe').mockImplementation(() => {});
-
- wrapper.vm.mr.ciStatus = 'failed';
- wrapper.vm.mr.gitlabLogo = 'logo.png';
});
- it('should call notifyMe', () => {
- wrapper.vm.handleNotification(data);
-
+ it('should call notifyMe', async () => {
+ const logoFilename = 'logo.png';
+ await createComponent({ updatedMrData: { gitlabLogo: logoFilename } });
expect(notify.notifyMe).toHaveBeenCalledWith(
- 'Pipeline running-label',
- 'Pipeline running-label for "title"',
- 'logo.png',
+ `Pipeline passed`,
+ `Pipeline passed for "${mockData.title}"`,
+ logoFilename,
);
});
- it('should not call notifyMe if the status has not changed', () => {
- wrapper.vm.mr.ciStatus = data.ci_status;
-
- wrapper.vm.handleNotification(data);
-
+ it('should not call notifyMe if the status has not changed', async () => {
+ await createComponent({ updatedMrData: { ci_status: undefined } });
+ await eventHub.$emit('MRWidgetUpdateRequested');
expect(notify.notifyMe).not.toHaveBeenCalled();
});
- it('should not notify if no pipeline provided', () => {
- wrapper.vm.handleNotification({
- ...data,
- pipeline: undefined,
- });
-
+ it('should not notify if no pipeline provided', async () => {
+ await createComponent({ updatedMrData: { pipeline: undefined } });
expect(notify.notifyMe).not.toHaveBeenCalled();
});
});
@@ -546,7 +552,6 @@ describe('MrWidgetOptions', () => {
wrapper.destroy();
return createComponent({
- mrData: mockData,
options: {},
data: {
pollInterval: interval,
@@ -597,47 +602,18 @@ describe('MrWidgetOptions', () => {
});
describe('rendering deployments', () => {
- const changes = [
- {
- path: 'index.html',
- external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
- },
- {
- path: 'imgs/gallery.html',
- external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
- },
- {
- path: 'about/',
- external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
- },
- ];
- const deploymentMockData = {
- id: 15,
- name: 'review/diplo',
- url: '/root/acets-review-apps/environments/15',
- stop_url: '/root/acets-review-apps/environments/15/stop',
- metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
- metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
- external_url: 'http://diplo.',
- external_url_formatted: 'diplo.',
- deployed_at: '2017-03-22T22:44:42.258Z',
- deployed_at_formatted: 'Mar 22, 2017 10:44pm',
- changes,
- status: SUCCESS,
- environment_available: true,
- };
-
it('renders multiple deployments', async () => {
- wrapper.vm.mr.deployments.push(
- {
- ...deploymentMockData,
- },
- {
- ...deploymentMockData,
- id: deploymentMockData.id + 1,
+ await createComponent({
+ updatedMrData: {
+ deployments: [
+ mockDeployment,
+ {
+ ...mockDeployment,
+ id: mockDeployment.id + 1,
+ },
+ ],
},
- );
- await nextTick();
+ });
expect(findPipelineContainer().props('isPostMerge')).toBe(false);
expect(findPipelineContainer().props('mr').deployments).toHaveLength(2);
expect(findPipelineContainer().props('mr').postMergeDeployments).toHaveLength(0);
@@ -646,189 +622,44 @@ describe('MrWidgetOptions', () => {
describe('pipeline for target branch after merge', () => {
describe('with information for target branch pipeline', () => {
- beforeEach(() => {
- wrapper.vm.mr.state = 'merged';
- wrapper.vm.mr.mergePipeline = {
- id: 127,
- user: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- status_tooltip_html: null,
- path: '/root',
- },
- active: true,
- coverage: null,
- source: 'push',
- created_at: '2018-10-22T11:41:35.186Z',
- updated_at: '2018-10-22T11:41:35.433Z',
- path: '/root/ci-web-terminal/pipelines/127',
- flags: {
- latest: true,
- stuck: true,
- auto_devops: false,
- yaml_errors: false,
- retryable: false,
- cancelable: true,
- failure_reason: false,
- },
- details: {
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/root/ci-web-terminal/pipelines/127',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- },
- duration: null,
- finished_at: null,
- stages: [
- {
- name: 'test',
- title: 'test: pending',
- status: {
- icon: 'status_pending',
- text: 'pending',
- label: 'pending',
- group: 'pending',
- tooltip: 'pending',
- has_details: true,
- details_path: '/root/ci-web-terminal/pipelines/127#test',
- illustration: null,
- favicon:
- '/assets/ci_favicons/favicon_status_pending-5bdf338420e5221ca24353b6bff1c9367189588750632e9a871b7af09ff6a2ae.png',
- },
- path: '/root/ci-web-terminal/pipelines/127#test',
- dropdown_path: '/root/ci-web-terminal/pipelines/127/stage.json?stage=test',
- },
- ],
- artifacts: [],
- manual_actions: [],
- scheduled_actions: [],
- },
- ref: {
- name: 'main',
- path: '/root/ci-web-terminal/commits/main',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'aa1939133d373c94879becb79d91828a892ee319',
- short_id: 'aa193913',
- title: "Merge branch 'main-test' into 'main'",
- created_at: '2018-10-22T11:41:33.000Z',
- parent_ids: [
- '4622f4dd792468993003caf2e3be978798cbe096',
- '76598df914cdfe87132d0c3c40f80db9fa9396a4',
- ],
- message:
- "Merge branch 'main-test' into 'main'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- authored_date: '2018-10-22T11:41:33.000Z',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- committed_date: '2018-10-22T11:41:33.000Z',
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: null,
- web_url: 'http://localhost:3000/root',
- status_tooltip_html: null,
- path: '/root',
- },
- author_gravatar_url: null,
- commit_url:
- 'http://localhost:3000/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
- commit_path: '/root/ci-web-terminal/commit/aa1939133d373c94879becb79d91828a892ee319',
- },
- cancel_path: '/root/ci-web-terminal/pipelines/127/cancel',
- };
- return nextTick();
- });
+ const state = 'merged';
- it('renders pipeline block', () => {
+ it('renders pipeline block', async () => {
+ await createComponent({ updatedMrData: { state, merge_pipeline: mockMergePipeline } });
expect(findMergedPipelineContainer().exists()).toBe(true);
});
describe('with post merge deployments', () => {
- beforeEach(() => {
- wrapper.vm.mr.postMergeDeployments = [
- {
- id: 15,
- name: 'review/diplo',
- url: '/root/acets-review-apps/environments/15',
- stop_url: '/root/acets-review-apps/environments/15/stop',
- metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
- metrics_monitoring_url: '/root/acets-review-apps/environments/15/metrics',
- external_url: 'http://diplo.',
- external_url_formatted: 'diplo.',
- deployed_at: '2017-03-22T22:44:42.258Z',
- deployed_at_formatted: 'Mar 22, 2017 10:44pm',
- changes: [
- {
- path: 'index.html',
- external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
- },
- {
- path: 'imgs/gallery.html',
- external_url:
- 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
- },
- {
- path: 'about/',
- external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
- },
- ],
- status: 'success',
+ it('renders post deployment information', async () => {
+ await createComponent({
+ updatedMrData: {
+ state,
+ merge_pipeline: mockMergePipeline,
+ post_merge_deployments: mockPostMergeDeployments,
},
- ];
-
- return nextTick();
- });
-
- it('renders post deployment information', () => {
+ });
expect(findMergedPipelineContainer().exists()).toBe(true);
});
});
});
describe('without information for target branch pipeline', () => {
- beforeEach(() => {
- wrapper.vm.mr.state = 'merged';
-
- return nextTick();
- });
-
- it('does not render pipeline block', () => {
+ it('does not render pipeline block', async () => {
+ await createComponent({ updatedMrData: { merge_pipeline: undefined } });
expect(findMergedPipelineContainer().exists()).toBe(false);
});
});
describe('when state is not merged', () => {
- beforeEach(() => {
- wrapper.vm.mr.state = 'archived';
-
- return nextTick();
- });
-
- it('does not render pipeline block', () => {
+ it('does not render pipeline block', async () => {
+ await createComponent({ updatedMrData: { state: 'archived' } });
expect(findMergedPipelineContainer().exists()).toBe(false);
});
});
});
it('should not suggest pipelines when feature flag is not present', () => {
+ createComponent();
expect(findSuggestPipeline().exists()).toBe(false);
});
});
@@ -839,28 +670,23 @@ describe('MrWidgetOptions', () => {
});
describe('given feature flag is enabled', () => {
- beforeEach(async () => {
- await createComponent();
- wrapper.vm.mr.hasCI = false;
- });
-
- it('should suggest pipelines when none exist', () => {
+ it('should suggest pipelines when none exist', async () => {
+ await createComponent({ updatedMrData: { has_ci: false } });
expect(findSuggestPipeline().exists()).toBe(true);
});
it.each([
- { isDismissedSuggestPipeline: true },
- { mergeRequestAddCiConfigPath: null },
- { hasCI: true },
+ { is_dismissed_suggest_pipeline: true },
+ { merge_request_add_ci_config_path: null },
+ { has_ci: true },
])('with %s, should not suggest pipeline', async (obj) => {
- Object.assign(wrapper.vm.mr, obj);
-
- await nextTick();
+ await createComponent({ updatedMrData: { has_ci: false, ...obj } });
expect(findSuggestPipeline().exists()).toBe(false);
});
it('should allow dismiss of the suggest pipeline message', async () => {
+ await createComponent({ updatedMrData: { has_ci: false } });
await findSuggestPipeline().vm.$emit('dismiss');
expect(findSuggestPipeline().exists()).toBe(false);
@@ -875,11 +701,11 @@ describe('MrWidgetOptions', () => {
${'merged'} | ${true} | ${'shows'}
${'open'} | ${true} | ${'shows'}
`('$showText merge error when state is $state', async ({ state, show }) => {
- createComponent({ mrData: { ...mockData, state, mergeError: 'Error!' } });
+ createComponent({ updatedMrData: { state, mergeError: 'Error!' } });
await waitForPromises();
- expect(wrapper.find('[data-testid="merge_error"]').exists()).toBe(show);
+ expect(wrapper.findByTestId('merge-error').exists()).toBe(show);
});
});
@@ -1111,6 +937,67 @@ describe('MrWidgetOptions', () => {
registeredExtensions.extensions = [];
});
+ describe('component name tier suffixes', () => {
+ let extension;
+
+ beforeEach(() => {
+ extension = workingExtension();
+ });
+
+ it('reports events without a CE suffix', () => {
+ extension.name = `${extension.name}CE`;
+
+ registerExtension(extension);
+ createComponent({ mountFn: mountExtended });
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_view',
+ );
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_c_e_view',
+ );
+ });
+
+ it('reports events without a EE suffix', () => {
+ extension.name = `${extension.name}EE`;
+
+ registerExtension(extension);
+ createComponent({ mountFn: mountExtended });
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_view',
+ );
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_e_e_view',
+ );
+ });
+
+ it('leaves non-CE & non-EE all caps suffixes intact', () => {
+ extension.name = `${extension.name}HI`;
+
+ registerExtension(extension);
+ createComponent({ mountFn: mountExtended });
+
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_view',
+ );
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_extension_h_i_view',
+ );
+ });
+
+ it("doesn't remove CE or EE from the middle of a widget name", () => {
+ extension.name = 'TestCEExtensionEETest';
+
+ registerExtension(extension);
+ createComponent({ mountFn: mountExtended });
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_code_review_merge_request_widget_test_c_e_extension_e_e_test_view',
+ );
+ });
+ });
+
it('triggers view events when mounted', () => {
registerExtension(workingExtension());
createComponent({ mountFn: mountExtended });
@@ -1217,11 +1104,7 @@ describe('MrWidgetOptions', () => {
it('renders the Preparing state component when the MR state is initially "preparing"', async () => {
await createComponent({
- mrData: {
- ...mockData,
- state: 'opened',
- detailedMergeStatus: 'PREPARING',
- },
+ updatedMrData: { state: 'opened', detailedMergeStatus: 'PREPARING' },
});
expect(findApprovalsWidget().exists()).toBe(false);
@@ -1235,11 +1118,7 @@ describe('MrWidgetOptions', () => {
it("shows the Preparing widget when the MR reports it's not ready yet", async () => {
await createComponent({
- mrData: {
- ...mockData,
- state: 'opened',
- detailedMergeStatus: 'PREPARING',
- },
+ updatedMrData: { state: 'opened', detailedMergeStatus: 'PREPARING' },
options: {},
data: {},
});
@@ -1249,11 +1128,7 @@ describe('MrWidgetOptions', () => {
it('removes the Preparing widget when the MR indicates it has been prepared', async () => {
await createComponent({
- mrData: {
- ...mockData,
- state: 'opened',
- detailedMergeStatus: 'PREPARING',
- },
+ updatedMrData: { state: 'opened', detailedMergeStatus: 'PREPARING' },
options: {},
data: {},
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index b93c64efbcb..da1a15b1b2b 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -4,12 +4,10 @@ exports[`Expand button on click when short text is provided renders button after
<span>
<button
aria-label="Click to expand text"
- class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md gl-button btn-icon button-ellipsis-horizontal"
+ class="btn btn-blank btn-default btn-icon btn-md button-ellipsis-horizontal gl-button js-text-expander-prepend text-expander"
style="display: none;"
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -20,26 +18,18 @@ exports[`Expand button on click when short text is provided renders button after
href="file-mock#ellipsis_h"
/>
</svg>
-
- <!---->
</button>
-
- <!---->
-
<span>
<p>
Expanded!
</p>
</span>
-
<button
aria-label="Click to expand text"
- class="btn js-text-expander-append text-expander btn-blank btn-default btn-md gl-button btn-icon button-ellipsis-horizontal"
+ class="btn btn-blank btn-default btn-icon btn-md button-ellipsis-horizontal gl-button js-text-expander-append text-expander"
style=""
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -50,8 +40,6 @@ exports[`Expand button on click when short text is provided renders button after
href="file-mock#ellipsis_h"
/>
</svg>
-
- <!---->
</button>
</span>
`;
@@ -60,11 +48,9 @@ exports[`Expand button when short text is provided renders button before text 1`
<span>
<button
aria-label="Click to expand text"
- class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md gl-button btn-icon button-ellipsis-horizontal"
+ class="btn btn-blank btn-default btn-icon btn-md button-ellipsis-horizontal gl-button js-text-expander-prepend text-expander"
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -75,26 +61,18 @@ exports[`Expand button when short text is provided renders button before text 1`
href="file-mock#ellipsis_h"
/>
</svg>
-
- <!---->
</button>
-
<span>
<p>
Short
</p>
</span>
-
- <!---->
-
<button
aria-label="Click to expand text"
- class="btn js-text-expander-append text-expander btn-blank btn-default btn-md gl-button btn-icon button-ellipsis-horizontal"
+ class="btn btn-blank btn-default btn-icon btn-md button-ellipsis-horizontal gl-button js-text-expander-append text-expander"
style="display: none;"
type="button"
>
- <!---->
-
<svg
aria-hidden="true"
class="gl-button-icon gl-icon s16"
@@ -105,8 +83,6 @@ exports[`Expand button when short text is provided renders button before text 1`
href="file-mock#ellipsis_h"
/>
</svg>
-
- <!---->
</button>
</span>
`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/integration_help_text_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/integration_help_text_spec.js.snap
index df0fcf5da1c..d630d23873f 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/integration_help_text_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/integration_help_text_spec.js.snap
@@ -8,14 +8,12 @@ exports[`IntegrationHelpText component should not render the link when start and
exports[`IntegrationHelpText component should render the help text 1`] = `
<span>
- Click
+ Click
<gl-link-stub
href="http://bar.com"
target="_blank"
>
-
- Bar
-
+ Bar
<gl-icon-stub
class="gl-vertical-align-middle"
name="external-link"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap
index f414359fef2..76deb4d0b36 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap
@@ -3,8 +3,8 @@
exports[`Source Editor component rendering matches the snapshot 1`] = `
<div
data-editor-loading=""
- data-qa-selector="source_editor_container"
- id="source-editor-snippet_777"
+ data-testid="source-editor-container"
+ id="reference-0"
>
<pre
class="editor-loading-content"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
deleted file mode 100644
index 62d75fbdc5f..00000000000
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ /dev/null
@@ -1,59 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`SplitButton renders actionItems 1`] = `
-<gl-dropdown-stub
- category="primary"
- clearalltext="Clear all"
- clearalltextclass="gl-px-5"
- headertext=""
- hideheaderborder="true"
- highlighteditemstitle="Selected"
- highlighteditemstitleclass="gl-px-5"
- menu-class=""
- size="medium"
- split="true"
- splithref=""
- text="professor"
- variant="default"
->
- <gl-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightarialabel=""
- iconrightname=""
- ischecked="true"
- ischeckitem="true"
- secondarytext=""
- >
- <strong>
- professor
- </strong>
-
- <div>
- very symphonic
- </div>
- </gl-dropdown-item-stub>
-
- <gl-dropdown-divider-stub />
- <gl-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightarialabel=""
- iconrightname=""
- ischeckitem="true"
- secondarytext=""
- >
- <strong>
- captain
- </strong>
-
- <div>
- warp drive
- </div>
- </gl-dropdown-item-stub>
-
- <!---->
-</gl-dropdown-stub>
-`;
diff --git a/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
index 24b2c54f20b..359aaacde0b 100644
--- a/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
+++ b/spec/frontend/vue_shared/components/badges/__snapshots__/beta_badge_spec.js.snap
@@ -11,7 +11,6 @@ exports[`Beta badge component renders the badge 1`] = `
>
Beta
</gl-badge-stub>
-
<gl-popover-stub
cssclasses=""
data-testid="beta-badge"
@@ -23,28 +22,23 @@ exports[`Beta badge component renders the badge 1`] = `
<p>
A Beta feature is not production-ready, but is unlikely to change drastically before it's released. We encourage users to try Beta features and provide feedback.
</p>
-
<p
class="gl-mb-0"
>
A Beta feature:
</p>
-
<ul
class="gl-pl-4"
>
<li>
May be unstable.
</li>
-
<li>
Should not cause data loss.
</li>
-
<li>
Is supported by a commercially reasonable effort.
</li>
-
<li>
Is complete or near completion.
</li>
diff --git a/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
index fbf3d17fd64..1f3f1fef365 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
+++ b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
@@ -3,80 +3,69 @@
exports[`Blob Simple Viewer component rendering matches the snapshot 1`] = `
<div>
<div
- class="file-content code js-syntax-highlight"
+ class="code file-content js-syntax-highlight"
>
<div
- class="line-numbers gl-pt-0!"
+ class="gl-pt-0! line-numbers"
>
<a
class="diff-line-num js-line-number"
data-line-number="1"
href="#LC1"
- id="L1"
+ id="reference-0"
>
<gl-icon-stub
name="link"
size="12"
/>
-
1
-
</a>
<a
class="diff-line-num js-line-number"
data-line-number="2"
href="#LC2"
- id="L2"
+ id="reference-1"
>
<gl-icon-stub
name="link"
size="12"
/>
-
2
-
</a>
<a
class="diff-line-num js-line-number"
data-line-number="3"
href="#LC3"
- id="L3"
+ id="reference-2"
>
<gl-icon-stub
name="link"
size="12"
/>
-
3
-
</a>
</div>
-
<div
class="blob-content"
>
<pre
- class="code highlight gl-p-0! gl-display-flex"
+ class="code gl-display-flex gl-p-0! highlight"
>
<code
data-blob-hash="foo-bar"
>
<span
- id="LC1"
+ id="reference-3"
>
First
</span>
-
-
<span
- id="LC2"
+ id="reference-4"
>
Second
</span>
-
-
<span
- id="LC3"
+ id="reference-5"
>
Third
</span>
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index fc8155bd381..eadcd452929 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -3,6 +3,10 @@ import { shallowMount } from '@vue/test-utils';
import { handleBlobRichViewer } from '~/blob/viewer';
import RichViewer from '~/vue_shared/components/blob_viewers/rich_viewer.vue';
import MarkdownFieldView from '~/vue_shared/components/markdown/field_view.vue';
+import {
+ MARKUP_FILE_TYPE,
+ CONTENT_LOADED_EVENT,
+} from '~/vue_shared/components/blob_viewers/constants';
import { handleLocationHash } from '~/lib/utils/common_utils';
jest.mock('~/blob/viewer');
@@ -10,10 +14,10 @@ jest.mock('~/lib/utils/common_utils');
describe('Blob Rich Viewer component', () => {
let wrapper;
- const content = '<h1 id="markdown">Foo Bar</h1>';
+ const dummyContent = '<h1 id="markdown">Foo Bar</h1>';
const defaultType = 'markdown';
- function createComponent(type = defaultType, richViewer) {
+ function createComponent(type = defaultType, richViewer, content = dummyContent) {
wrapper = shallowMount(RichViewer, {
propsData: {
richViewer,
@@ -23,26 +27,75 @@ describe('Blob Rich Viewer component', () => {
});
}
- beforeEach(() => {
- const execImmediately = (callback) => callback();
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
+ beforeEach(() => createComponent());
- createComponent();
- });
+ const findMarkdownFieldView = () => wrapper.findComponent(MarkdownFieldView);
+
+ describe('Markdown content', () => {
+ const generateDummyContent = (contentLength) => {
+ let generatedContent = '';
+ for (let i = 0; i < contentLength; i += 1) {
+ generatedContent += `<span>Line: ${i + 1}</span>\n`;
+ }
+
+ generatedContent += '<img src="x" onerror="alert(`XSS`)">'; // for testing against XSS
+ return `<div class="js-markup-content">${generatedContent}</div>`;
+ };
+
+ describe('Large file', () => {
+ const content = generateDummyContent(50);
+ beforeEach(() => createComponent(MARKUP_FILE_TYPE, null, content));
+
+ it('renders the top of the file immediately and does not emit a content loaded event', () => {
+ expect(wrapper.text()).toContain('Line: 10');
+ expect(wrapper.text()).not.toContain('Line: 50');
+ expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toBeUndefined();
+ expect(findMarkdownFieldView().props('isLoading')).toBe(true);
+ });
+
+ it('renders the rest of the file later and emits a content loaded event', async () => {
+ jest.runAllTimers();
+ await nextTick();
+
+ expect(wrapper.text()).toContain('Line: 10');
+ expect(wrapper.text()).toContain('Line: 50');
+ expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
+ expect(findMarkdownFieldView().props('isLoading')).toBe(false);
+ });
- it('listens to requestIdleCallback', () => {
- expect(window.requestIdleCallback).toHaveBeenCalled();
+ it('sanitizes the content', () => {
+ jest.runAllTimers();
+
+ expect(wrapper.html()).toContain('<img src="x">');
+ });
+ });
+
+ describe('Small file', () => {
+ const content = generateDummyContent(5);
+ beforeEach(() => createComponent(MARKUP_FILE_TYPE, null, content));
+
+ it('renders the entire file immediately and emits a content loaded event', () => {
+ expect(wrapper.text()).toContain('Line: 5');
+ expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
+ expect(findMarkdownFieldView().props('isLoading')).toBe(false);
+ });
+
+ it('sanitizes the content', () => {
+ expect(wrapper.html()).toContain('<img src="x">');
+ });
+ });
});
it('renders the passed content without transformations', () => {
- expect(wrapper.html()).toContain(content);
+ expect(wrapper.html()).toContain(dummyContent);
});
- it('renders the richViewer if one is present', async () => {
+ it('renders the richViewer if one is present and emits a content loaded event', async () => {
const richViewer = '<div class="js-pdf-viewer"></div>';
createComponent('pdf', richViewer);
await nextTick();
expect(wrapper.html()).toContain(richViewer);
+ expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
});
it('queries for advanced viewer', () => {
@@ -50,7 +103,7 @@ describe('Blob Rich Viewer component', () => {
});
it('is using Markdown View Field', () => {
- expect(wrapper.findComponent(MarkdownFieldView).exists()).toBe(true);
+ expect(findMarkdownFieldView().exists()).toBe(true);
});
it('scrolls to the hash location', () => {
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index 8c860c9b06f..c74964c13f5 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -145,7 +145,7 @@ describe('CI Badge Link Component', () => {
});
it('should render dynamic badge size', () => {
- createComponent({ status: statuses.success, badgeSize: 'lg' });
+ createComponent({ status: statuses.success, size: 'lg' });
expect(findBadge().props('size')).toBe('lg');
});
diff --git a/spec/frontend/vue_shared/components/code_block_highlighted_spec.js b/spec/frontend/vue_shared/components/code_block_highlighted_spec.js
index 5720f45f4dd..a8436af33f2 100644
--- a/spec/frontend/vue_shared/components/code_block_highlighted_spec.js
+++ b/spec/frontend/vue_shared/components/code_block_highlighted_spec.js
@@ -28,7 +28,7 @@ describe('Code Block Highlighted', () => {
>
const
</span>
- foo =
+ foo =
<span
class="hljs-number"
>
diff --git a/spec/frontend/vue_shared/components/code_block_spec.js b/spec/frontend/vue_shared/components/code_block_spec.js
index 0fdfb96cb23..1bcf0c3938c 100644
--- a/spec/frontend/vue_shared/components/code_block_spec.js
+++ b/spec/frontend/vue_shared/components/code_block_spec.js
@@ -17,12 +17,12 @@ describe('Code Block', () => {
createComponent({}, { default: 'DEFAULT SLOT' });
expect(wrapper.element).toMatchInlineSnapshot(`
- <pre
- class="code-block rounded code"
- >
- DEFAULT SLOT
- </pre>
- `);
+ <pre
+ class="code code-block rounded"
+ >
+ DEFAULT SLOT
+ </pre>
+ `);
});
it('renders with empty code prop', () => {
@@ -30,13 +30,11 @@ describe('Code Block', () => {
expect(wrapper.element).toMatchInlineSnapshot(`
<pre
- class="code-block rounded code"
+ class="code code-block rounded"
>
<code
class="d-block"
- >
-
- </code>
+ />
</pre>
`);
});
@@ -45,32 +43,32 @@ describe('Code Block', () => {
createComponent({ code });
expect(wrapper.element).toMatchInlineSnapshot(`
- <pre
- class="code-block rounded code"
+ <pre
+ class="code code-block rounded"
+ >
+ <code
+ class="d-block"
>
- <code
- class="d-block"
- >
- test-code
- </code>
- </pre>
- `);
+ test-code
+ </code>
+ </pre>
+ `);
});
it('sets maxHeight properly when provided', () => {
createComponent({ code, maxHeight: '200px' });
expect(wrapper.element).toMatchInlineSnapshot(`
- <pre
- class="code-block rounded code"
- style="max-height: 200px; overflow-y: auto;"
+ <pre
+ class="code code-block rounded"
+ style="max-height: 200px; overflow-y: auto;"
+ >
+ <code
+ class="d-block"
>
- <code
- class="d-block"
- >
- test-code
- </code>
- </pre>
- `);
+ test-code
+ </code>
+ </pre>
+ `);
});
});
diff --git a/spec/frontend/vue_shared/components/confidentiality_badge_spec.js b/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
index 92cd7597637..7f6d97e8e68 100644
--- a/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
+++ b/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
@@ -1,15 +1,20 @@
-import { GlBadge } from '@gitlab/ui';
+import { GlBadge, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { TYPE_ISSUE, TYPE_EPIC, WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
-const createComponent = ({ workspaceType = WORKSPACE_PROJECT, issuableType = TYPE_ISSUE } = {}) =>
+const createComponent = ({
+ workspaceType = WORKSPACE_PROJECT,
+ issuableType = TYPE_ISSUE,
+ hideTextInSmallScreens = false,
+} = {}) =>
shallowMount(ConfidentialityBadge, {
propsData: {
workspaceType,
issuableType,
+ hideTextInSmallScreens,
},
});
@@ -20,6 +25,11 @@ describe('ConfidentialityBadge', () => {
wrapper = createComponent();
});
+ const findConfidentialityBadgeText = () =>
+ wrapper.find('[data-testid="confidential-badge-text"]');
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findBadgeIcon = () => wrapper.findComponent(GlIcon);
+
it.each`
workspaceType | issuableType | expectedTooltip
${WORKSPACE_PROJECT} | ${TYPE_ISSUE} | ${'Only project members with at least the Reporter role, the author, and assignees can view or be notified about this issue.'}
@@ -32,14 +42,30 @@ describe('ConfidentialityBadge', () => {
issuableType,
});
- const badgeEl = wrapper.findComponent(GlBadge);
-
- expect(badgeEl.props()).toMatchObject({
- icon: 'eye-slash',
+ expect(findBadgeIcon().props('name')).toBe('eye-slash');
+ expect(findBadge().props()).toMatchObject({
variant: 'warning',
});
- expect(badgeEl.attributes('title')).toBe(expectedTooltip);
- expect(badgeEl.text()).toBe('Confidential');
+ expect(findBadge().attributes('title')).toBe(expectedTooltip);
+ expect(findBadge().text()).toBe('Confidential');
},
);
+
+ it('does not have `gl-sm-display-block` and `gl-display-none` when `hideTextInSmallScreens` is false', () => {
+ wrapper = createComponent({ hideTextInSmallScreens: false });
+
+ expect(findConfidentialityBadgeText().classes()).not.toContain(
+ 'gl-display-none',
+ 'gl-sm-display-block',
+ );
+ });
+
+ it('has `gl-sm-display-block` and `gl-display-none` when `hideTextInSmallScreens` is true', () => {
+ wrapper = createComponent({ hideTextInSmallScreens: true });
+
+ expect(findConfidentialityBadgeText().classes()).toContain(
+ 'gl-display-none',
+ 'gl-sm-display-block',
+ );
+ });
});
diff --git a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
index 0b5c8d9afc3..53218d794c7 100644
--- a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_modal_spec.js
@@ -31,6 +31,7 @@ describe('Confirm Danger Modal', () => {
propsData: {
modalId,
phrase,
+ visible: false,
},
provide,
stubs: { GlSprintf },
@@ -103,4 +104,16 @@ describe('Confirm Danger Modal', () => {
expect(wrapper.emitted('confirm')).not.toBeUndefined();
});
});
+
+ describe('v-model', () => {
+ it('emit `change` event', () => {
+ findModal().vm.$emit('change', true);
+
+ expect(wrapper.emitted('change')).toEqual([[true]]);
+ });
+
+ it('sets `visible` prop', () => {
+ expect(findModal().props('visible')).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js
deleted file mode 100644
index a3e5f187f9b..00000000000
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_input_spec.js
+++ /dev/null
@@ -1,62 +0,0 @@
-import { mount } from '@vue/test-utils';
-import DateTimePickerInput from '~/vue_shared/components/date_time_picker/date_time_picker_input.vue';
-
-const inputLabel = 'This is a label';
-const inputValue = 'something';
-
-describe('DateTimePickerInput', () => {
- let wrapper;
-
- const createComponent = (propsData = {}) => {
- wrapper = mount(DateTimePickerInput, {
- propsData: {
- state: null,
- value: '',
- label: '',
- ...propsData,
- },
- });
- };
-
- it('renders label above the input', () => {
- createComponent({
- label: inputLabel,
- });
-
- expect(wrapper.find('.gl-form-group label').text()).toBe(inputLabel);
- });
-
- it('renders the same `ID` for input and `for` for label', () => {
- createComponent({ label: inputLabel });
-
- expect(wrapper.find('.gl-form-group label').attributes('for')).toBe(
- wrapper.find('input').attributes('id'),
- );
- });
-
- it('renders valid input in gray color instead of green', () => {
- createComponent({
- state: true,
- });
-
- expect(wrapper.find('input').classes('is-valid')).toBe(false);
- });
-
- it('renders invalid input in red color', () => {
- createComponent({
- state: false,
- });
-
- expect(wrapper.find('input').classes('is-invalid')).toBe(true);
- });
-
- it('input event is emitted when focus is lost', () => {
- createComponent();
-
- const input = wrapper.find('input');
- input.setValue(inputValue);
- input.trigger('blur');
-
- expect(wrapper.emitted('input')[0][0]).toEqual(inputValue);
- });
-});
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
deleted file mode 100644
index 7a8f94b3746..00000000000
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
+++ /dev/null
@@ -1,190 +0,0 @@
-import timezoneMock from 'timezone-mock';
-
-import {
- isValidInputString,
- inputStringToIsoDate,
- isoDateToInputString,
-} from '~/vue_shared/components/date_time_picker/date_time_picker_lib';
-
-describe('date time picker lib', () => {
- describe('isValidInputString', () => {
- [
- {
- input: '2019-09-09T00:00:00.000Z',
- output: true,
- },
- {
- input: '2019-09-09T000:00.000Z',
- output: false,
- },
- {
- input: 'a2019-09-09T000:00.000Z',
- output: false,
- },
- {
- input: '2019-09-09T',
- output: false,
- },
- {
- input: '2019-09-09',
- output: true,
- },
- {
- input: '2019-9-9',
- output: true,
- },
- {
- input: '2019-9-',
- output: true,
- },
- {
- input: '2019--',
- output: false,
- },
- {
- input: '2019',
- output: true,
- },
- {
- input: '',
- output: false,
- },
- {
- input: null,
- output: false,
- },
- ].forEach(({ input, output }) => {
- it(`isValidInputString return ${output} for ${input}`, () => {
- expect(isValidInputString(input)).toBe(output);
- });
- });
- });
-
- describe('inputStringToIsoDate', () => {
- [
- '',
- 'null',
- undefined,
- 'abc',
- 'xxxx-xx-xx',
- '9999-99-19',
- '2019-19-23',
- '2019-09-23 x',
- '2019-09-29 24:24:24',
- ].forEach((input) => {
- it(`throws error for invalid input like ${input}`, () => {
- expect(() => inputStringToIsoDate(input)).toThrow();
- });
- });
-
- [
- {
- input: '2019-09-08 01:01:01',
- output: '2019-09-08T01:01:01Z',
- },
- {
- input: '2019-09-08 00:00:00',
- output: '2019-09-08T00:00:00Z',
- },
- {
- input: '2019-09-08 23:59:59',
- output: '2019-09-08T23:59:59Z',
- },
- {
- input: '2019-09-08',
- output: '2019-09-08T00:00:00Z',
- },
- {
- input: '2019-09-08',
- output: '2019-09-08T00:00:00Z',
- },
- {
- input: '2019-09-08 00:00:00',
- output: '2019-09-08T00:00:00Z',
- },
- {
- input: '2019-09-08 23:24:24',
- output: '2019-09-08T23:24:24Z',
- },
- {
- input: '2019-09-08 0:0:0',
- output: '2019-09-08T00:00:00Z',
- },
- ].forEach(({ input, output }) => {
- it(`returns ${output} from ${input}`, () => {
- expect(inputStringToIsoDate(input)).toBe(output);
- });
- });
-
- describe('timezone formatting', () => {
- const value = '2019-09-08 01:01:01';
- const utcResult = '2019-09-08T01:01:01Z';
- const localResult = '2019-09-08T08:01:01Z';
-
- it.each`
- val | locatTimezone | utc | result
- ${value} | ${'UTC'} | ${undefined} | ${utcResult}
- ${value} | ${'UTC'} | ${false} | ${utcResult}
- ${value} | ${'UTC'} | ${true} | ${utcResult}
- ${value} | ${'US/Pacific'} | ${undefined} | ${localResult}
- ${value} | ${'US/Pacific'} | ${false} | ${localResult}
- ${value} | ${'US/Pacific'} | ${true} | ${utcResult}
- `(
- 'when timezone is $locatTimezone, formats $result for utc = $utc',
- ({ val, locatTimezone, utc, result }) => {
- timezoneMock.register(locatTimezone);
-
- expect(inputStringToIsoDate(val, utc)).toBe(result);
-
- timezoneMock.unregister();
- },
- );
- });
- });
-
- describe('isoDateToInputString', () => {
- [
- {
- input: '2019-09-08T01:01:01Z',
- output: '2019-09-08 01:01:01',
- },
- {
- input: '2019-09-08T01:01:01.999Z',
- output: '2019-09-08 01:01:01',
- },
- {
- input: '2019-09-08T00:00:00Z',
- output: '2019-09-08 00:00:00',
- },
- ].forEach(({ input, output }) => {
- it(`returns ${output} for ${input}`, () => {
- expect(isoDateToInputString(input)).toBe(output);
- });
- });
-
- describe('timezone formatting', () => {
- const value = '2019-09-08T08:01:01Z';
- const utcResult = '2019-09-08 08:01:01';
- const localResult = '2019-09-08 01:01:01';
-
- it.each`
- val | locatTimezone | utc | result
- ${value} | ${'UTC'} | ${undefined} | ${utcResult}
- ${value} | ${'UTC'} | ${false} | ${utcResult}
- ${value} | ${'UTC'} | ${true} | ${utcResult}
- ${value} | ${'US/Pacific'} | ${undefined} | ${localResult}
- ${value} | ${'US/Pacific'} | ${false} | ${localResult}
- ${value} | ${'US/Pacific'} | ${true} | ${utcResult}
- `(
- 'when timezone is $locatTimezone, formats $result for utc = $utc',
- ({ val, locatTimezone, utc, result }) => {
- timezoneMock.register(locatTimezone);
-
- expect(isoDateToInputString(val, utc)).toBe(result);
-
- timezoneMock.unregister();
- },
- );
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
deleted file mode 100644
index 5620b569409..00000000000
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
+++ /dev/null
@@ -1,326 +0,0 @@
-import { mount } from '@vue/test-utils';
-import timezoneMock from 'timezone-mock';
-import { nextTick } from 'vue';
-import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
-import {
- defaultTimeRanges,
- defaultTimeRange,
-} from '~/vue_shared/components/date_time_picker/date_time_picker_lib';
-
-const optionsCount = defaultTimeRanges.length;
-
-describe('DateTimePicker', () => {
- let wrapper;
-
- const dropdownToggle = () => wrapper.find('.dropdown-toggle');
- const dropdownMenu = () => wrapper.find('.dropdown-menu');
- const cancelButton = () => wrapper.find('[data-testid="cancelButton"]');
- const applyButtonElement = () => wrapper.find('button.btn-confirm').element;
- const findQuickRangeItems = () => wrapper.findAll('.dropdown-item');
-
- const createComponent = (props) => {
- wrapper = mount(DateTimePicker, {
- propsData: {
- ...props,
- },
- });
- };
-
- it('renders dropdown toggle button with selected text', async () => {
- createComponent();
- await nextTick();
- expect(dropdownToggle().text()).toBe(defaultTimeRange.label);
- });
-
- it('renders dropdown toggle button with selected text and utc label', async () => {
- createComponent({ utc: true });
- await nextTick();
- expect(dropdownToggle().text()).toContain(defaultTimeRange.label);
- expect(dropdownToggle().text()).toContain('UTC');
- });
-
- it('renders dropdown with 2 custom time range inputs', async () => {
- createComponent();
- await nextTick();
- expect(wrapper.findAll('input').length).toBe(2);
- });
-
- describe('renders label with h/m/s truncated if possible', () => {
- [
- {
- start: '2019-10-10T00:00:00.000Z',
- end: '2019-10-10T00:00:00.000Z',
- label: '2019-10-10 to 2019-10-10',
- },
- {
- start: '2019-10-10T00:00:00.000Z',
- end: '2019-10-14T00:10:00.000Z',
- label: '2019-10-10 to 2019-10-14 00:10:00',
- },
- {
- start: '2019-10-10T00:00:00.000Z',
- end: '2019-10-10T00:00:01.000Z',
- label: '2019-10-10 to 2019-10-10 00:00:01',
- },
- {
- start: '2019-10-10T00:00:01.000Z',
- end: '2019-10-10T00:00:01.000Z',
- label: '2019-10-10 00:00:01 to 2019-10-10 00:00:01',
- },
- {
- start: '2019-10-10T00:00:01.000Z',
- end: '2019-10-10T00:00:01.000Z',
- utc: true,
- label: '2019-10-10 00:00:01 to 2019-10-10 00:00:01 UTC',
- },
- ].forEach(({ start, end, utc, label }) => {
- it(`for start ${start}, end ${end}, and utc ${utc}, label is ${label}`, async () => {
- createComponent({
- value: { start, end },
- utc,
- });
- await nextTick();
- expect(dropdownToggle().text()).toBe(label);
- });
- });
- });
-
- it(`renders dropdown with ${optionsCount} (default) items in quick range`, async () => {
- createComponent();
- dropdownToggle().trigger('click');
- await nextTick();
- expect(findQuickRangeItems().length).toBe(optionsCount);
- });
-
- it('renders dropdown with a default quick range item selected', async () => {
- createComponent();
- dropdownToggle().trigger('click');
- await nextTick();
- expect(wrapper.find('.dropdown-item.active').exists()).toBe(true);
- expect(wrapper.find('.dropdown-item.active').text()).toBe(defaultTimeRange.label);
- });
-
- it('renders a disabled apply button on wrong input', () => {
- createComponent({
- start: 'invalid-input-date',
- });
-
- expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
- });
-
- describe('user input', () => {
- const fillInputAndBlur = async (input, val) => {
- wrapper.find(input).setValue(val);
- await nextTick();
- wrapper.find(input).trigger('blur');
- await nextTick();
- };
-
- beforeEach(async () => {
- createComponent();
- await nextTick();
- });
-
- it('displays inline error message if custom time range inputs are invalid', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01abc');
- await fillInputAndBlur('#custom-time-to', '2019-10-10abc');
- expect(wrapper.findAll('.invalid-feedback').length).toBe(2);
- });
-
- it('keeps apply button disabled with invalid custom time range inputs', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01abc');
- await fillInputAndBlur('#custom-time-to', '2019-09-19');
- expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
- });
-
- it('enables apply button with valid custom time range inputs', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01');
- await fillInputAndBlur('#custom-time-to', '2019-10-19');
- expect(applyButtonElement().getAttribute('disabled')).toBeNull();
- });
-
- describe('when "apply" is clicked', () => {
- it('emits iso dates', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01 00:00:00');
- await fillInputAndBlur('#custom-time-to', '2019-10-19 00:00:00');
- applyButtonElement().click();
-
- expect(wrapper.emitted().input).toHaveLength(1);
- expect(wrapper.emitted().input[0]).toEqual([
- {
- end: '2019-10-19T00:00:00Z',
- start: '2019-10-01T00:00:00Z',
- },
- ]);
- });
-
- it('emits iso dates, for dates without time of day', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01');
- await fillInputAndBlur('#custom-time-to', '2019-10-19');
- applyButtonElement().click();
-
- expect(wrapper.emitted().input).toHaveLength(1);
- expect(wrapper.emitted().input[0]).toEqual([
- {
- end: '2019-10-19T00:00:00Z',
- start: '2019-10-01T00:00:00Z',
- },
- ]);
- });
-
- describe('when timezone is different', () => {
- beforeAll(() => {
- timezoneMock.register('US/Pacific');
- });
- afterAll(() => {
- timezoneMock.unregister();
- });
-
- it('emits iso dates', async () => {
- await fillInputAndBlur('#custom-time-from', '2019-10-01 00:00:00');
- await fillInputAndBlur('#custom-time-to', '2019-10-19 12:00:00');
- applyButtonElement().click();
-
- expect(wrapper.emitted().input).toHaveLength(1);
- expect(wrapper.emitted().input[0]).toEqual([
- {
- start: '2019-10-01T07:00:00Z',
- end: '2019-10-19T19:00:00Z',
- },
- ]);
- });
-
- it('emits iso dates with utc format', async () => {
- wrapper.setProps({ utc: true });
- await nextTick();
- await fillInputAndBlur('#custom-time-from', '2019-10-01 00:00:00');
- await fillInputAndBlur('#custom-time-to', '2019-10-19 12:00:00');
- applyButtonElement().click();
-
- expect(wrapper.emitted().input).toHaveLength(1);
- expect(wrapper.emitted().input[0]).toEqual([
- {
- start: '2019-10-01T00:00:00Z',
- end: '2019-10-19T12:00:00Z',
- },
- ]);
- });
- });
- });
-
- it('unchecks quick range when text is input is clicked', async () => {
- const findActiveItems = () =>
- findQuickRangeItems().filter((w) => w.classes().includes('active'));
-
- expect(findActiveItems().length).toBe(1);
-
- await fillInputAndBlur('#custom-time-from', '2019-10-01');
- expect(findActiveItems().length).toBe(0);
- });
-
- it('emits dates in an object when a is clicked', () => {
- findQuickRangeItems()
- .at(3) // any item
- .trigger('click');
-
- expect(wrapper.emitted().input).toHaveLength(1);
- expect(wrapper.emitted().input[0][0]).toMatchObject({
- duration: {
- seconds: expect.any(Number),
- },
- });
- });
-
- it('hides the popover with cancel button', async () => {
- dropdownToggle().trigger('click');
-
- await nextTick();
- cancelButton().trigger('click');
-
- await nextTick();
- expect(dropdownMenu().classes('show')).toBe(false);
- });
- });
-
- describe('when using non-default time windows', () => {
- const MOCK_NOW = Date.UTC(2020, 0, 23, 20);
-
- const otherTimeRanges = [
- {
- label: '1 minute',
- duration: { seconds: 60 },
- },
- {
- label: '2 minutes',
- duration: { seconds: 60 * 2 },
- default: true,
- },
- {
- label: '5 minutes',
- duration: { seconds: 60 * 5 },
- },
- ];
-
- beforeEach(() => {
- jest.spyOn(Date, 'now').mockImplementation(() => MOCK_NOW);
- });
-
- it('renders dropdown with a label in the quick range', async () => {
- createComponent({
- value: {
- duration: { seconds: 60 * 5 },
- },
- options: otherTimeRanges,
- });
- dropdownToggle().trigger('click');
- await nextTick();
- expect(dropdownToggle().text()).toBe('5 minutes');
- });
-
- it('renders dropdown with a label in the quick range and utc label', async () => {
- createComponent({
- value: {
- duration: { seconds: 60 * 5 },
- },
- utc: true,
- options: otherTimeRanges,
- });
- dropdownToggle().trigger('click');
- await nextTick();
- expect(dropdownToggle().text()).toBe('5 minutes UTC');
- });
-
- it('renders dropdown with quick range items', async () => {
- createComponent({
- value: {
- duration: { seconds: 60 * 2 },
- },
- options: otherTimeRanges,
- });
- dropdownToggle().trigger('click');
- await nextTick();
- const items = findQuickRangeItems();
-
- expect(items.length).toBe(Object.keys(otherTimeRanges).length);
- expect(items.at(0).text()).toBe('1 minute');
- expect(items.at(0).classes()).not.toContain('active');
-
- expect(items.at(1).text()).toBe('2 minutes');
- expect(items.at(1).classes()).toContain('active');
-
- expect(items.at(2).text()).toBe('5 minutes');
- expect(items.at(2).classes()).not.toContain('active');
- });
-
- it('renders dropdown with a label not in the quick range', async () => {
- createComponent({
- value: {
- duration: { seconds: 60 * 4 },
- },
- });
- dropdownToggle().trigger('click');
- await nextTick();
- expect(dropdownToggle().text()).toBe('2020-01-23 19:56:00 to 2020-01-23 20:00:00');
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap
index eb0adb0bebd..a0b1bb7df09 100644
--- a/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap
+++ b/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap
@@ -3,20 +3,18 @@
exports[`Design note pin component should match the snapshot of note with index 1`] = `
<button
aria-label="Comment '1' position"
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm js-image-badge design-note-pin gl-absolute"
+ class="design-note-pin gl-absolute gl-align-items-center gl-display-flex gl-font-sm gl-justify-content-center js-image-badge"
style="left: 10px; top: 10px;"
type="button"
>
-
- 1
-
+ 1
</button>
`;
exports[`Design note pin component should match the snapshot of note without index 1`] = `
<button
aria-label="Comment form position"
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator gl-absolute"
+ class="btn-transparent comment-indicator gl-absolute gl-align-items-center gl-display-flex gl-font-sm gl-justify-content-center"
style="left: 10px; top: 10px;"
type="button"
>
@@ -30,7 +28,7 @@ exports[`Design note pin component should match the snapshot of note without ind
exports[`Design note pin component should match the snapshot when pin is resolved 1`] = `
<button
aria-label="Comment form position"
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator resolved gl-absolute"
+ class="btn-transparent comment-indicator gl-absolute gl-align-items-center gl-display-flex gl-font-sm gl-justify-content-center resolved"
style="left: 10px; top: 10px;"
type="button"
>
@@ -44,7 +42,7 @@ exports[`Design note pin component should match the snapshot when pin is resolve
exports[`Design note pin component should match the snapshot when position is absent 1`] = `
<button
aria-label="Comment form position"
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator"
+ class="btn-transparent comment-indicator gl-align-items-center gl-display-flex gl-font-sm gl-justify-content-center"
type="button"
>
<gl-icon-stub
diff --git a/spec/frontend/vue_shared/components/entity_select/utils_spec.js b/spec/frontend/vue_shared/components/entity_select/utils_spec.js
index 9aa1baf204e..1d73924aa58 100644
--- a/spec/frontend/vue_shared/components/entity_select/utils_spec.js
+++ b/spec/frontend/vue_shared/components/entity_select/utils_spec.js
@@ -2,12 +2,16 @@ import { groupsPath } from '~/vue_shared/components/entity_select/utils';
describe('entity_select utils', () => {
describe('groupsPath', () => {
+ beforeEach(() => {
+ window.gon = { api_version: 'v4' };
+ });
+
it.each`
groupsFilter | parentGroupID | expectedPath
- ${undefined} | ${undefined} | ${'/api/:version/groups.json'}
- ${undefined} | ${1} | ${'/api/:version/groups.json'}
- ${'descendant_groups'} | ${1} | ${'/api/:version/groups/1/descendant_groups'}
- ${'subgroups'} | ${1} | ${'/api/:version/groups/1/subgroups'}
+ ${undefined} | ${undefined} | ${'/api/v4/groups.json'}
+ ${undefined} | ${1} | ${'/api/v4/groups.json'}
+ ${'descendant_groups'} | ${1} | ${'/api/v4/groups/1/descendant_groups'}
+ ${'subgroups'} | ${1} | ${'/api/v4/groups/1/subgroups'}
`(
'returns $expectedPath with groupsFilter = $groupsFilter and parentGroupID = $parentGroupID',
({ groupsFilter, parentGroupID, expectedPath }) => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index b2f4c780f51..a22ad4c450e 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -84,6 +84,19 @@ export const mockMilestones = [
mockEscapedMilestone,
];
+export const projectMilestonesResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ attributes: {
+ nodes: mockMilestones,
+ __typename: 'MilestoneConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
export const mockCrmContacts = [
{
__typename: 'CustomerRelationsContact',
@@ -257,7 +270,8 @@ export const mockMilestoneToken = {
symbol: '%',
token: MilestoneToken,
operators: OPERATORS_IS,
- fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
+ fullPath: 'gitlab-org',
+ isProject: true,
};
export const mockReleaseToken = {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index db51b4a05b1..36e82b39df4 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -6,17 +6,27 @@ import {
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { sortMilestonesByDueDate } from '~/milestones/utils';
+import searchMilestonesQuery from '~/issues/list/queries/search_milestones.query.graphql';
import { DEFAULT_MILESTONES } from '~/vue_shared/components/filtered_search_bar/constants';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-import { mockMilestoneToken, mockMilestones, mockRegularMilestone } from '../mock_data';
+import {
+ mockMilestoneToken,
+ mockMilestones,
+ mockRegularMilestone,
+ projectMilestonesResponse,
+} from '../mock_data';
+
+Vue.use(VueApollo);
jest.mock('~/alert');
jest.mock('~/milestones/utils');
@@ -31,6 +41,9 @@ const defaultStubs = {
},
};
+const milestonesQueryHandler = jest.fn().mockResolvedValue(projectMilestonesResponse);
+const mockApollo = createMockApollo([[searchMilestonesQuery, milestonesQueryHandler]]);
+
function createComponent(options = {}) {
const {
config = { ...mockMilestoneToken, shouldSkipSort: true },
@@ -39,6 +52,7 @@ function createComponent(options = {}) {
stubs = defaultStubs,
} = options;
return mount(MilestoneToken, {
+ apolloProvider: mockApollo,
propsData: {
config,
value,
@@ -102,6 +116,33 @@ describe('MilestoneToken', () => {
});
});
+ describe('default - when fetchMilestones function is not provided in config', () => {
+ beforeEach(() => {
+ wrapper = createComponent({});
+ return triggerFetchMilestones();
+ });
+
+ it('calls searchMilestonesQuery to fetch milestones', () => {
+ expect(milestonesQueryHandler).toHaveBeenCalledWith({
+ fullPath: mockMilestoneToken.fullPath,
+ isProject: mockMilestoneToken.isProject,
+ search: null,
+ });
+ });
+
+ it('calls searchMilestonesQuery with search parameter when provided', async () => {
+ const searchTerm = 'foo';
+
+ await triggerFetchMilestones(searchTerm);
+
+ expect(milestonesQueryHandler).toHaveBeenCalledWith({
+ fullPath: mockMilestoneToken.fullPath,
+ isProject: mockMilestoneToken.isProject,
+ search: searchTerm,
+ });
+ });
+ });
+
describe('when request is successful', () => {
const searchTerm = 'foo';
diff --git a/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap b/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap
index 6f98a74a82f..52684cf4259 100644
--- a/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap
+++ b/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Form Footer Actions renders content properly 1`] = `
<footer
- class="gl-mt-5 footer-block"
+ class="footer-block gl-mt-5"
>
Bar Foo Abrakadabra
</footer>
diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index 271214907fc..36efcb9efa8 100644
--- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -1,5 +1,5 @@
import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createWrapper } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
@@ -123,24 +123,24 @@ describe('GlModalVuex', () => {
state.isVisible = false;
factory();
- const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+ const rootWrapper = createWrapper(wrapper.vm.$root);
state.isVisible = true;
await nextTick();
- expect(rootEmit).toHaveBeenCalledWith(BV_SHOW_MODAL, TEST_MODAL_ID);
+ expect(rootWrapper.emitted(BV_SHOW_MODAL)[0]).toContain(TEST_MODAL_ID);
});
it('calls bootstrap hide when isVisible changes', async () => {
state.isVisible = true;
factory();
- const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+ const rootWrapper = createWrapper(wrapper.vm.$root);
state.isVisible = false;
await nextTick();
- expect(rootEmit).toHaveBeenCalledWith(BV_HIDE_MODAL, TEST_MODAL_ID);
+ expect(rootWrapper.emitted(BV_HIDE_MODAL)[0]).toContain(TEST_MODAL_ID);
});
it.each(['ok', 'cancel'])(
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
index 877de4f4695..cba9f78790d 100644
--- a/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
@@ -9,6 +9,9 @@ import {
} from '~/visibility_level/constants';
import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.vue';
import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
+import ListActions from '~/vue_shared/components/list_actions/list_actions.vue';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
+import DangerConfirmModal from '~/vue_shared/components/confirm_danger/confirm_danger_modal.vue';
import { groups } from './mock_data';
describe('GroupsListItem', () => {
@@ -30,6 +33,8 @@ describe('GroupsListItem', () => {
const findAvatarLabeled = () => wrapper.findComponent(GlAvatarLabeled);
const findGroupDescription = () => wrapper.findByTestId('group-description');
const findVisibilityIcon = () => findAvatarLabeled().findComponent(GlIcon);
+ const findListActions = () => wrapper.findComponent(ListActions);
+ const findConfirmationModal = () => wrapper.findComponent(DangerConfirmModal);
it('renders group avatar', () => {
createComponent();
@@ -179,4 +184,68 @@ describe('GroupsListItem', () => {
expect(wrapper.findByTestId('group-icon').exists()).toBe(false);
});
});
+
+ describe('when group has actions', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays actions dropdown', () => {
+ expect(findListActions().props()).toMatchObject({
+ actions: {
+ [ACTION_EDIT]: {
+ href: group.editPath,
+ },
+ [ACTION_DELETE]: {
+ action: expect.any(Function),
+ },
+ },
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
+ });
+ });
+
+ describe('when delete action is fired', () => {
+ beforeEach(() => {
+ findListActions().props('actions')[ACTION_DELETE].action();
+ });
+
+ it('displays confirmation modal with correct props', () => {
+ expect(findConfirmationModal().props()).toMatchObject({
+ visible: true,
+ phrase: group.fullName,
+ });
+ });
+
+ describe('when deletion is confirmed', () => {
+ beforeEach(() => {
+ findConfirmationModal().vm.$emit('confirm');
+ });
+
+ it('emits `delete` event', () => {
+ expect(wrapper.emitted('delete')).toMatchObject([[group]]);
+ });
+ });
+ });
+ });
+
+ describe('when group does not have actions', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ group: {
+ ...group,
+ availableActions: [],
+ },
+ },
+ });
+ });
+
+ it('does not display actions dropdown', () => {
+ expect(findListActions().exists()).toBe(false);
+ });
+
+ it('does not display confirmation modal', () => {
+ expect(findConfirmationModal().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
index c65aa347bcf..ec6a1dc9576 100644
--- a/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
@@ -31,4 +31,18 @@ describe('GroupsList', () => {
})),
);
});
+
+ describe('when `GroupsListItem` emits `delete` event', () => {
+ const [firstGroup] = defaultPropsData.groups;
+
+ beforeEach(() => {
+ createComponent();
+
+ wrapper.findComponent(GroupsListItem).vm.$emit('delete', firstGroup);
+ });
+
+ it('emits `delete` event', () => {
+ expect(wrapper.emitted('delete')).toEqual([[firstGroup]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/groups_list/mock_data.js b/spec/frontend/vue_shared/components/groups_list/mock_data.js
index 0dad27f8311..08ee962892c 100644
--- a/spec/frontend/vue_shared/components/groups_list/mock_data.js
+++ b/spec/frontend/vue_shared/components/groups_list/mock_data.js
@@ -1,3 +1,5 @@
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
+
export const groups = [
{
id: 1,
@@ -14,6 +16,8 @@ export const groups = [
accessLevel: {
integerValue: 10,
},
+ editPath: 'http://127.0.0.1:3000/groups/gitlab-org/-/edit',
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
},
{
id: 2,
@@ -31,5 +35,7 @@ export const groups = [
accessLevel: {
integerValue: 20,
},
+ editPath: 'http://127.0.0.1:3000/groups/gitlab-org/test-subgroup/-/edit',
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
},
];
diff --git a/spec/frontend/vue_shared/components/list_actions/list_actions_spec.js b/spec/frontend/vue_shared/components/list_actions/list_actions_spec.js
new file mode 100644
index 00000000000..ae70cf091a5
--- /dev/null
+++ b/spec/frontend/vue_shared/components/list_actions/list_actions_spec.js
@@ -0,0 +1,135 @@
+import { GlDisclosureDropdown } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ListActions from '~/vue_shared/components/list_actions/list_actions.vue';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
+
+describe('ListActions', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ actions: {
+ [ACTION_EDIT]: {
+ href: '/-/edit',
+ },
+ [ACTION_DELETE]: {
+ action: () => {},
+ },
+ },
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(ListActions, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const getDropdownItemsProp = () => findDropdown().props('items');
+
+ it('allows extending of base actions', () => {
+ createComponent();
+
+ expect(getDropdownItemsProp()).toEqual([
+ {
+ text: 'Edit',
+ href: '/-/edit',
+ },
+ {
+ text: 'Delete',
+ extraAttrs: {
+ class: 'gl-text-red-500!',
+ },
+ action: expect.any(Function),
+ },
+ ]);
+ });
+
+ it('allows adding custom actions', () => {
+ const ACTION_LEAVE = 'leave';
+
+ createComponent({
+ propsData: {
+ actions: {
+ ...defaultPropsData.actions,
+ [ACTION_LEAVE]: {
+ text: 'Leave project',
+ action: () => {},
+ },
+ },
+ availableActions: [ACTION_EDIT, ACTION_LEAVE, ACTION_DELETE],
+ },
+ });
+
+ expect(getDropdownItemsProp()).toEqual([
+ {
+ text: 'Edit',
+ href: '/-/edit',
+ },
+ {
+ text: 'Leave project',
+ action: expect.any(Function),
+ },
+ {
+ text: 'Delete',
+ extraAttrs: {
+ class: 'gl-text-red-500!',
+ },
+ action: expect.any(Function),
+ },
+ ]);
+ });
+
+ it('only shows available actions', () => {
+ createComponent({
+ propsData: {
+ availableActions: [ACTION_EDIT],
+ },
+ });
+
+ expect(getDropdownItemsProp()).toEqual([
+ {
+ text: 'Edit',
+ href: '/-/edit',
+ },
+ ]);
+ });
+
+ it('displays actions in the order set in `availableActions` prop', () => {
+ createComponent({
+ propsData: {
+ availableActions: [ACTION_DELETE, ACTION_EDIT],
+ },
+ });
+
+ expect(getDropdownItemsProp()).toEqual([
+ {
+ text: 'Delete',
+ extraAttrs: {
+ class: 'gl-text-red-500!',
+ },
+ action: expect.any(Function),
+ },
+ {
+ text: 'Edit',
+ href: '/-/edit',
+ },
+ ]);
+ });
+
+ it('renders `GlDisclosureDropdown` with expected appearance related props', () => {
+ createComponent();
+
+ expect(findDropdown().props()).toMatchObject({
+ icon: 'ellipsis_v',
+ noCaret: true,
+ toggleText: 'Actions',
+ textSrOnly: true,
+ placement: 'right',
+ category: 'tertiary',
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
index 3b49536799c..baf40115e7a 100644
--- a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -13,9 +13,8 @@ exports[`Suggestion Diff component matches snapshot 1`] = `
isbatched="true"
suggestionscount="0"
/>
-
<table
- class="mb-3 md-suggestion-diff js-syntax-highlight code"
+ class="code js-syntax-highlight mb-3 md-suggestion-diff"
>
<tbody>
<suggestion-diff-row-stub
diff --git a/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js b/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
index 8aab867f32a..cdbdbfab9d1 100644
--- a/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/apply_suggestion_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlFormTextarea, GlButton, GlAlert } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlFormTextarea, GlButton, GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ApplySuggestionComponent from '~/vue_shared/components/markdown/apply_suggestion.vue';
@@ -10,10 +10,11 @@ describe('Apply Suggestion component', () => {
wrapper = shallowMount(ApplySuggestionComponent, { propsData: { ...propsData, ...props } });
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findTextArea = () => wrapper.findComponent(GlFormTextarea);
const findApplyButton = () => wrapper.findComponent(GlButton);
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findHelpText = () => wrapper.find('span');
beforeEach(() => createWrapper());
@@ -22,7 +23,7 @@ describe('Apply Suggestion component', () => {
const dropdown = findDropdown();
expect(dropdown.exists()).toBe(true);
- expect(dropdown.props('text')).toBe('Apply suggestion');
+ expect(dropdown.props('toggleText')).toBe('Apply suggestion');
expect(dropdown.props('disabled')).toBe(false);
});
@@ -41,6 +42,22 @@ describe('Apply Suggestion component', () => {
});
});
+ describe('help text', () => {
+ describe('when applying a single suggestion', () => {
+ it('renders the correct help text', () => {
+ expect(findHelpText().text()).toEqual('This also resolves this thread');
+ });
+ });
+
+ describe('when applying in batch', () => {
+ it('renders the correct help text', () => {
+ createWrapper({ batchSuggestionsCount: 3 });
+
+ expect(findHelpText().text()).toEqual('This also resolves all related threads');
+ });
+ });
+ });
+
describe('disabled', () => {
it('disables the dropdown', () => {
createWrapper({ disabled: true });
diff --git a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
index cd9f27dccbd..11c57fc5768 100644
--- a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
@@ -12,6 +12,7 @@ import savedRepliesQuery from '~/vue_shared/components/markdown/saved_replies.qu
import {
TRACKING_SAVED_REPLIES_USE,
TRACKING_SAVED_REPLIES_USE_IN_MR,
+ TRACKING_SAVED_REPLIES_USE_IN_OTHER,
} from '~/vue_shared/components/markdown/constants';
let wrapper;
@@ -87,6 +88,12 @@ describe('Comment templates dropdown', () => {
});
describe('tracking', () => {
+ it('always sends two tracking events', async () => {
+ await selectSavedReply();
+
+ expect(trackingSpy).toHaveBeenCalledTimes(2);
+ });
+
it('tracks overall usage', async () => {
await selectSavedReply();
@@ -108,7 +115,6 @@ describe('Comment templates dropdown', () => {
TRACKING_SAVED_REPLIES_USE_IN_MR,
expect.any(Object),
);
- expect(trackingSpy).toHaveBeenCalledTimes(2);
});
it('is not sent when not in an MR', async () => {
@@ -121,7 +127,32 @@ describe('Comment templates dropdown', () => {
TRACKING_SAVED_REPLIES_USE_IN_MR,
expect.any(Object),
);
- expect(trackingSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('non-MR usage event', () => {
+ it('is sent when not in an MR', async () => {
+ window.location.toString.mockReturnValue('this/looks/like/a/-/issues/1');
+
+ await selectSavedReply();
+
+ expect(trackingSpy).toHaveBeenCalledWith(
+ expect.any(String),
+ TRACKING_SAVED_REPLIES_USE_IN_OTHER,
+ expect.any(Object),
+ );
+ });
+
+ it('is not sent when in an MR', async () => {
+ window.location.toString.mockReturnValue('this/looks/like/a/-/merge_requests/1');
+
+ await selectSavedReply();
+
+ expect(trackingSpy).not.toHaveBeenCalledWith(
+ expect.any(String),
+ TRACKING_SAVED_REPLIES_USE_IN_OTHER,
+ expect.any(Object),
+ );
});
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/field_view_spec.js b/spec/frontend/vue_shared/components/markdown/field_view_spec.js
index 1bbbe0896f2..f61c67c4f9b 100644
--- a/spec/frontend/vue_shared/components/markdown/field_view_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_view_spec.js
@@ -6,15 +6,27 @@ import { renderGFM } from '~/behaviors/markdown/render_gfm';
jest.mock('~/behaviors/markdown/render_gfm');
describe('Markdown Field View component', () => {
- function createComponent() {
- shallowMount(MarkdownFieldView);
+ function createComponent(isLoading = false) {
+ shallowMount(MarkdownFieldView, { propsData: { isLoading } });
}
- beforeEach(() => {
+ it('processes rendering with GFM', () => {
createComponent();
- });
- it('processes rendering with GFM', () => {
expect(renderGFM).toHaveBeenCalledTimes(1);
});
+
+ describe('watchers', () => {
+ it('does not process rendering with GFM if isLoading is true', () => {
+ createComponent(true);
+
+ expect(renderGFM).not.toHaveBeenCalled();
+ });
+
+ it('processes rendering with GFM when isLoading is updated to `false`', () => {
+ createComponent(false);
+
+ expect(renderGFM).toHaveBeenCalledTimes(1);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
index 31c0fa6f699..c69b18bca88 100644
--- a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -489,6 +489,11 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(localStorage.getItem('autosave/issue/1234')).toBe(newValue);
});
+ it('does not autofocus the content editor', () => {
+ buildWrapper({ propsData: { autosaveKey: 'issue/1234' } });
+ expect(findContentEditor().props().autofocus).toBe(false);
+ });
+
it('bubbles up keydown event', () => {
const event = new Event('keydown');
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index 9768bc7a6dd..bc82357cb81 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -219,12 +219,11 @@ describe('Suggestion Diff component', () => {
describe('tooltip message for apply button', () => {
const findTooltip = () => getBinding(findApplyButton().element, 'gl-tooltip');
- it('renders correct tooltip message when button is applicable', () => {
- createComponent({ batchSuggestionsCount: 0 });
+ it('renders no tooltip message when button is applicable', () => {
+ createComponent({ batchSuggestionsCount: 1, isBatched: true });
const tooltip = findTooltip();
- expect(tooltip.modifiers.viewport).toBe(true);
- expect(tooltip.value).toBe('This also resolves this thread');
+ expect(tooltip.value).toBe(false);
});
it('renders the inapplicable reason in the tooltip when button is not applicable', () => {
diff --git a/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap b/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
index 015049795a1..2dd7149069f 100644
--- a/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
+++ b/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Metrics upload item render the metrics image component 1`] = `
<gl-card-stub
bodyclass="gl-border-1,gl-border-t-solid,gl-border-gray-100,[object Object]"
- class="collapsible-card border gl-p-0 gl-mb-5"
+ class="border collapsible-card gl-mb-5 gl-p-0"
footerclass=""
headerclass="gl-display-flex gl-align-items-center gl-border-b-0 gl-py-3"
>
@@ -18,12 +18,10 @@ exports[`Metrics upload item render the metrics image component 1`] = `
size="sm"
titletag="h4"
>
-
<p>
Are you sure you wish to delete this image?
</p>
</gl-modal-stub>
-
<gl-modal-stub
actioncancel="[object Object]"
actionprimary="[object Object]"
@@ -35,7 +33,6 @@ exports[`Metrics upload item render the metrics image component 1`] = `
size="sm"
titletag="h4"
>
-
<gl-form-group-stub
label="Text (optional)"
label-for="upload-text-input"
@@ -44,10 +41,9 @@ exports[`Metrics upload item render the metrics image component 1`] = `
>
<gl-form-input-stub
data-testid="metric-image-text-field"
- id="upload-text-input"
+ id="reference-0"
/>
</gl-form-group-stub>
-
<gl-form-group-stub
description="Must start with http or https"
label="Link (optional)"
@@ -57,17 +53,16 @@ exports[`Metrics upload item render the metrics image component 1`] = `
>
<gl-form-input-stub
data-testid="metric-image-url-field"
- id="upload-url-input"
+ id="reference-1"
/>
</gl-form-group-stub>
</gl-modal-stub>
-
<div
class="gl-display-flex gl-flex-direction-column"
data-testid="metric-image-body"
>
<img
- class="gl-max-w-full gl-align-self-center"
+ class="gl-align-self-center gl-max-w-full"
src="test_file_path"
/>
</div>
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
index 8a187f3cb1f..891b0c95f0e 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
@@ -2,10 +2,7 @@
exports[`Issue Warning Component when issue is locked but not confidential renders information about locked issue 1`] = `
<span>
-
- This issue is locked.
- Only project members can comment.
-
+ This issue is locked. Only project members can comment.
<gl-link-stub
href="locked-path"
target="_blank"
@@ -17,10 +14,7 @@ exports[`Issue Warning Component when issue is locked but not confidential rende
exports[`Issue Warning Component when noteable is confidential but not locked renders information about confidential issue 1`] = `
<span>
-
- This is a confidential issue.
- People without permission will never get a notification.
-
+ This is a confidential issue. People without permission will never get a notification.
<gl-link-stub
href="confidential-path"
target="_blank"
@@ -33,14 +27,14 @@ exports[`Issue Warning Component when noteable is confidential but not locked re
exports[`Issue Warning Component when noteable is locked and confidential renders information about locked and confidential noteable 1`] = `
<span>
<span>
- This issue is
+ This issue is
<gl-link-stub
href=""
target="_blank"
>
confidential
</gl-link-stub>
- and
+ and
<gl-link-stub
href=""
target="_blank"
@@ -49,8 +43,6 @@ exports[`Issue Warning Component when noteable is locked and confidential render
</gl-link-stub>
.
</span>
-
- People without permission will never get a notification and won't be able to comment.
-
+ People without permission will never get a notification and won't be able to comment.
</span>
`;
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
index de53caa66c7..c489fb08be5 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Issue placeholder note component matches snapshot 1`] = `
<timeline-entry-item-stub
- class="note note-wrapper note-comment being-posted fade-in-half"
+ class="being-posted fade-in-half note note-comment note-wrapper"
>
<div
- class="timeline-avatar gl-float-left"
+ class="gl-float-left timeline-avatar"
>
<gl-avatar-link-stub
href="/root"
@@ -20,9 +20,8 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
/>
</gl-avatar-link-stub>
</div>
-
<div
- class="timeline-content discussion"
+ class="discussion timeline-content"
>
<div
class="note-header"
@@ -34,11 +33,10 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
href="/root"
>
<span
- class="d-none d-sm-inline-block bold"
+ class="bold d-none d-sm-inline-block"
>
Root
</span>
-
<span
class="note-headline-light"
>
@@ -47,7 +45,6 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
</a>
</div>
</div>
-
<div
class="timeline-discussion-body"
>
@@ -55,15 +52,13 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
class="note-body"
>
<div
- class="note-text md"
+ class="md note-text"
>
<p
dir="auto"
>
Foo
</p>
-
-
</div>
</div>
</div>
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap
index 10c33269107..a609df5e775 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_system_note_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Placeholder system note component matches snapshot 1`] = `
<timeline-entry-item-stub
- class="note system-note being-posted fade-in-half"
+ class="being-posted fade-in-half note system-note"
>
<div
class="timeline-content"
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index e5b641c61fd..335c5bdfc46 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -291,7 +291,7 @@ describe('AlertManagementEmptyState', () => {
it('renders the search component for incidents', () => {
const filteredSearchBar = findFilteredSearchBar();
- expect(filteredSearchBar.props('searchInputPlaceholder')).toBe('Search or filter results…');
+
expect(filteredSearchBar.props('tokens')).toEqual([
{
type: TOKEN_TYPE_AUTHOR,
diff --git a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
index 2a1a6342c38..4cb1c1f3616 100644
--- a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
@@ -1,4 +1,4 @@
-import { GlPagination, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlPagination, GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
@@ -42,8 +42,8 @@ describe('Pagination bar', () => {
});
it('emits set-page-size event when page size is selected', () => {
- const firstItemInPageSizeDropdown = wrapper.findComponent(GlDropdownItem);
- firstItemInPageSizeDropdown.vm.$emit('click');
+ const firstItemInPageSizeDropdown = wrapper.findComponent(GlDisclosureDropdownItem);
+ firstItemInPageSizeDropdown.vm.$emit('action');
const [emittedPageSizeChange] = wrapper.emitted('set-page-size')[0];
expect(firstItemInPageSizeDropdown.text()).toMatchInterpolatedText(
@@ -62,9 +62,9 @@ describe('Pagination bar', () => {
},
});
- expect(wrapper.findComponent(GlDropdown).find('button').text()).toMatchInterpolatedText(
- `${CURRENT_PAGE_SIZE} items per page`,
- );
+ expect(
+ wrapper.findComponent(GlDisclosureDropdown).find('button').text(),
+ ).toMatchInterpolatedText(`${CURRENT_PAGE_SIZE} items per page`);
});
it('renders current page information', () => {
diff --git a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
index 2490422e4e8..7cf560745b6 100644
--- a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
@@ -1,10 +1,10 @@
-import { GlAvatarLabeled, GlBadge, GlIcon, GlPopover, GlDisclosureDropdown } from '@gitlab/ui';
+import { GlAvatarLabeled, GlBadge, GlIcon, GlPopover } from '@gitlab/ui';
import uniqueId from 'lodash/uniqueId';
import projects from 'test_fixtures/api/users/projects/get.json';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { __ } from '~/locale';
import ProjectsListItem from '~/vue_shared/components/projects_list/projects_list_item.vue';
-import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/projects_list/constants';
+import ListActions from '~/vue_shared/components/list_actions/list_actions.vue';
+import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import {
@@ -43,6 +43,7 @@ describe('ProjectsListItem', () => {
const findPopover = () => findProjectTopics().findComponent(GlPopover);
const findProjectDescription = () => wrapper.findByTestId('project-description');
const findVisibilityIcon = () => findAvatarLabeled().findComponent(GlIcon);
+ const findListActions = () => wrapper.findComponent(ListActions);
beforeEach(() => {
uniqueId.mockImplementation(jest.requireActual('lodash/uniqueId'));
@@ -327,7 +328,7 @@ describe('ProjectsListItem', () => {
propsData: {
project: {
...project,
- actions: [ACTION_EDIT, ACTION_DELETE],
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
isForked: true,
editPath,
},
@@ -336,32 +337,22 @@ describe('ProjectsListItem', () => {
});
it('displays actions dropdown', () => {
- expect(wrapper.findComponent(GlDisclosureDropdown).props()).toMatchObject({
- items: [
- {
- id: ACTION_EDIT,
- text: __('Edit'),
+ expect(findListActions().props()).toMatchObject({
+ actions: {
+ [ACTION_EDIT]: {
href: editPath,
},
- {
- id: ACTION_DELETE,
- text: __('Delete'),
- extraAttrs: {
- class: 'gl-text-red-500!',
- },
+ [ACTION_DELETE]: {
action: expect.any(Function),
},
- ],
+ },
+ availableActions: [ACTION_EDIT, ACTION_DELETE],
});
});
describe('when delete action is fired', () => {
beforeEach(() => {
- wrapper
- .findComponent(GlDisclosureDropdown)
- .props('items')
- .find((item) => item.id === ACTION_DELETE)
- .action();
+ findListActions().props('actions')[ACTION_DELETE].action();
});
it('displays confirmation modal with correct props', () => {
diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
index eadcb6ceeb7..64bab2de3b7 100644
--- a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
+++ b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
@@ -7,14 +7,12 @@ exports[`Package code instruction multiline to match the snapshot 1`] = `
>
foo_label
</label>
-
<div>
<pre
class="gl-font-monospace"
data-testid="multiline-instruction"
>
- this is some
-multiline text
+ this is somemultiline text
</pre>
</div>
</div>
@@ -23,25 +21,23 @@ multiline text
exports[`Package code instruction single line to match the default snapshot 1`] = `
<div>
<label
- for="instruction-input_1"
+ for="reference-0"
>
foo_label
</label>
-
<div
class="gl-mb-3"
>
<div
- class="input-group gl-mb-3"
+ class="gl-mb-3 input-group"
>
<input
class="form-control gl-font-monospace"
data-testid="instruction-input"
- id="instruction-input_1"
- readonly="readonly"
+ id="reference-0"
+ readonly=""
type="text"
/>
-
<span
class="input-group-append"
data-testid="instruction-button"
diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
index 5c487754b87..8eb0e08908b 100644
--- a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
+++ b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
@@ -2,20 +2,19 @@
exports[`History Item renders the correct markup 1`] = `
<li
- class="timeline-entry system-note note-wrapper"
+ class="note-wrapper system-note timeline-entry"
>
<div
class="timeline-entry-inner"
>
<div
- class="gl--flex-center gl-rounded-full gl-mt-n1 gl-ml-2 gl-w-6 gl-h-6 gl-bg-gray-50 gl-text-gray-600 gl-float-left"
+ class="gl--flex-center gl-bg-gray-50 gl-float-left gl-h-6 gl-ml-2 gl-mt-n1 gl-rounded-full gl-text-gray-600 gl-w-6"
>
<gl-icon-stub
name="pencil"
size="16"
/>
</div>
-
<div
class="timeline-content"
>
@@ -30,7 +29,6 @@ exports[`History Item renders the correct markup 1`] = `
/>
</div>
</div>
-
<div
class="note-body"
>
diff --git a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap
index 65427374e1b..22cfe8a5fc7 100644
--- a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap
+++ b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap
@@ -23,7 +23,7 @@ exports[`Resizable Skeleton Loader default setup renders the bars, labels, and g
/>
<defs>
<clippath
- id="null-idClip"
+ id="reference-0"
>
<rect
data-testid="skeleton-chart-grid"
@@ -46,7 +46,6 @@ exports[`Resizable Skeleton Loader default setup renders the bars, labels, and g
x="0"
y="90%"
/>
-
<rect
data-testid="skeleton-chart-bar"
height="5%"
@@ -111,7 +110,6 @@ exports[`Resizable Skeleton Loader default setup renders the bars, labels, and g
x="90%"
y="10%"
/>
-
<rect
data-testid="skeleton-chart-label"
height="3%"
@@ -178,7 +176,7 @@ exports[`Resizable Skeleton Loader default setup renders the bars, labels, and g
/>
</clippath>
<lineargradient
- id="null-idGradient"
+ id="reference-1"
>
<stop
class="primary-stop"
@@ -242,7 +240,7 @@ exports[`Resizable Skeleton Loader with custom settings renders the correct posi
/>
<defs>
<clippath
- id="-idClip"
+ id="reference-0"
>
<rect
data-testid="skeleton-chart-grid"
@@ -265,7 +263,6 @@ exports[`Resizable Skeleton Loader with custom settings renders the correct posi
x="0"
y="90%"
/>
-
<rect
data-testid="skeleton-chart-bar"
height="5%"
@@ -330,7 +327,6 @@ exports[`Resizable Skeleton Loader with custom settings renders the correct posi
x="90.9375%"
y="10%"
/>
-
<rect
data-testid="skeleton-chart-label"
height="2%"
@@ -397,7 +393,7 @@ exports[`Resizable Skeleton Loader with custom settings renders the correct posi
/>
</clippath>
<lineargradient
- id="-idGradient"
+ id="reference-1"
>
<stop
class="primary-stop"
diff --git a/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap
index a0f46f07d6a..91d1b0accf1 100644
--- a/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap
+++ b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap
@@ -9,11 +9,11 @@ exports[`Settings Block renders the correct markup 1`] = `
>
<h4>
<span
- aria-controls="settings_content_3"
+ aria-controls="reference-1"
aria-expanded="false"
class="gl-cursor-pointer"
data-testid="section-title-button"
- id="settings_label_2"
+ id="reference-0"
role="button"
tabindex="0"
>
@@ -22,9 +22,8 @@ exports[`Settings Block renders the correct markup 1`] = `
/>
</span>
</h4>
-
<gl-button-stub
- aria-controls="settings_content_3"
+ aria-controls="reference-1"
aria-expanded="false"
aria-label="Expand settings section"
buttontextclasses=""
@@ -33,22 +32,18 @@ exports[`Settings Block renders the correct markup 1`] = `
size="medium"
variant="default"
>
-
Expand
-
</gl-button-stub>
-
<p>
<div
data-testid="description-slot"
/>
</p>
</div>
-
<div
- aria-labelledby="settings_label_2"
+ aria-labelledby="reference-0"
class="settings-content"
- id="settings_content_3"
+ id="reference-1"
role="region"
style="display: none;"
tabindex="-1"
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap b/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap
index 26c9a6f8d5a..bab1920fd3a 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap
+++ b/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap
@@ -2,23 +2,20 @@
exports[`Chunk component rendering isHighlighted is true renders line numbers 1`] = `
<div
- class="gl-p-0! gl-z-index-3 diff-line-num gl-border-r gl-display-flex line-links line-numbers"
+ class="diff-line-num gl-border-r gl-display-flex gl-p-0! gl-z-index-3 line-links line-numbers"
data-testid="line-numbers"
>
<a
- class="gl-user-select-none gl-shadow-none! file-line-blame"
+ class="file-line-blame gl-shadow-none! gl-user-select-none"
href="some/blame/path.js#L71"
/>
-
<a
- class="gl-user-select-none gl-shadow-none! file-line-num"
+ class="file-line-num gl-shadow-none! gl-user-select-none"
data-line-number="71"
href="#L71"
- id="L71"
+ id="reference-0"
>
-
- 71
-
+ 71
</a>
</div>
`;
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
index 1154c930e5d..852598b13dc 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
@@ -35,6 +35,7 @@ describe('Chunk component', () => {
await nextTick();
expect(findContent().exists()).toBe(true);
+ expect(wrapper.emitted('appear')).toHaveLength(1);
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
index 431ede17954..1a498d0c5b1 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -58,7 +58,8 @@ describe('Source Viewer component', () => {
describe('hash highlighting', () => {
it('calls highlightHash with expected parameter', () => {
- expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
+ const scrollEnabled = false;
+ expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash, scrollEnabled);
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index a486d13a856..2043f36443d 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -1,10 +1,9 @@
import hljs from 'highlight.js/lib/core';
-import Vue from 'vue';
-import VueRouter from 'vue-router';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
+import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
import {
@@ -24,11 +23,10 @@ import LineHighlighter from '~/blob/line_highlighter';
import eventHub from '~/notes/event_hub';
import Tracking from '~/tracking';
-jest.mock('~/blob/line_highlighter');
+const lineHighlighter = new LineHighlighter();
+jest.mock('~/blob/line_highlighter', () => jest.fn().mockReturnValue({ highlightHash: jest.fn() }));
jest.mock('highlight.js/lib/core');
jest.mock('~/vue_shared/components/source_viewer/plugins/index');
-Vue.use(VueRouter);
-const router = new VueRouter();
const mockAxios = new MockAdapter(axios);
const generateContent = (content, totalLines = 1, delimiter = '\n') => {
@@ -44,6 +42,7 @@ const execImmediately = (callback) => callback();
describe('Source Viewer component', () => {
let wrapper;
const language = 'docker';
+ const selectedRangeHash = '#L1-2';
const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
const chunk1 = generateContent('// Some source code 1', 70);
const chunk2 = generateContent('// Some source code 2', 70);
@@ -55,11 +54,13 @@ describe('Source Viewer component', () => {
const fileType = 'javascript';
const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, blamePath, fileType };
const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
+ const currentRef = 'main';
+ const projectPath = 'test/project';
const createComponent = async (blob = {}) => {
wrapper = shallowMountExtended(SourceViewer, {
- router,
- propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blob } },
+ propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blob }, currentRef, projectPath },
+ mocks: { $route: { hash: selectedRangeHash } },
});
await waitForPromises();
};
@@ -268,5 +269,25 @@ describe('Source Viewer component', () => {
it('instantiates the lineHighlighter class', () => {
expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
});
+
+ it('highlights the range when chunk appears', () => {
+ findChunks().at(0).vm.$emit('appear');
+ const scrollEnabled = false;
+ expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(selectedRangeHash, scrollEnabled);
+ });
+ });
+
+ describe('Codeowners validation', () => {
+ const findCodeownersValidation = () => wrapper.findComponent(CodeownersValidation);
+
+ it('does not render codeowners validation when file is not CODEOWNERS', async () => {
+ await createComponent();
+ expect(findCodeownersValidation().exists()).toBe(false);
+ });
+
+ it('renders codeowners validation when file is CODEOWNERS', async () => {
+ await createComponent({ name: CODEOWNERS_FILE_NAME });
+ expect(findCodeownersValidation().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
deleted file mode 100644
index ffa25ae8448..00000000000
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ /dev/null
@@ -1,117 +0,0 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import { nextTick } from 'vue';
-import { assertProps } from 'helpers/assert_props';
-import SplitButton from '~/vue_shared/components/split_button.vue';
-
-const mockActionItems = [
- {
- eventName: 'concert',
- title: 'professor',
- description: 'very symphonic',
- },
- {
- eventName: 'apocalypse',
- title: 'captain',
- description: 'warp drive',
- },
-];
-
-describe('SplitButton', () => {
- let wrapper;
-
- const createComponent = (propsData) => {
- wrapper = shallowMount(SplitButton, {
- propsData,
- });
- };
-
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItem = (index = 0) =>
- findDropdown().findAllComponents(GlDropdownItem).at(index);
- const selectItem = async (index) => {
- findDropdownItem(index).vm.$emit('click');
-
- await nextTick();
- };
- const clickToggleButton = async () => {
- findDropdown().vm.$emit('click');
-
- await nextTick();
- };
-
- it('fails for empty actionItems', () => {
- const actionItems = [];
- expect(() => assertProps(SplitButton, { actionItems })).toThrow();
- });
-
- it('fails for single actionItems', () => {
- const actionItems = [mockActionItems[0]];
- expect(() => assertProps(SplitButton, { actionItems })).toThrow();
- });
-
- it('renders actionItems', () => {
- createComponent({ actionItems: mockActionItems });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('toggle button text', () => {
- beforeEach(() => {
- createComponent({ actionItems: mockActionItems });
- });
-
- it('defaults to first actionItems title', () => {
- expect(findDropdown().props().text).toBe(mockActionItems[0].title);
- });
-
- it('changes to selected actionItems title', () =>
- selectItem(1).then(() => {
- expect(findDropdown().props().text).toBe(mockActionItems[1].title);
- }));
- });
-
- describe('emitted event', () => {
- let eventHandler;
- let changeEventHandler;
-
- beforeEach(() => {
- createComponent({ actionItems: mockActionItems });
- });
-
- const addEventHandler = ({ eventName }) => {
- eventHandler = jest.fn();
- wrapper.vm.$once(eventName, () => eventHandler());
- };
-
- const addChangeEventHandler = () => {
- changeEventHandler = jest.fn();
- wrapper.vm.$once('change', (item) => changeEventHandler(item));
- };
-
- it('defaults to first actionItems event', () => {
- addEventHandler(mockActionItems[0]);
-
- return clickToggleButton().then(() => {
- expect(eventHandler).toHaveBeenCalled();
- });
- });
-
- it('changes to selected actionItems event', () =>
- selectItem(1)
- .then(() => addEventHandler(mockActionItems[1]))
- .then(clickToggleButton)
- .then(() => {
- expect(eventHandler).toHaveBeenCalled();
- }));
-
- it('change to selected actionItem emits change event', () => {
- addChangeEventHandler();
-
- return selectItem(1).then(() => {
- expect(changeEventHandler).toHaveBeenCalledWith(mockActionItems[1]);
- });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
index c816fe790a8..cffe26f8175 100644
--- a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
+++ b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
@@ -2,14 +2,14 @@
exports[`Upload dropzone component correctly overrides description and drop messages 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -17,7 +17,6 @@ exports[`Upload dropzone component correctly overrides description and drop mess
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
@@ -28,50 +27,37 @@ exports[`Upload dropzone component correctly overrides description and drop mess
</p>
</div>
</button>
-
<input
accept="image/jpg,image/jpeg"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style="display: none;"
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Test drop-to-start message.
</span>
@@ -83,14 +69,14 @@ exports[`Upload dropzone component correctly overrides description and drop mess
exports[`Upload dropzone component when dragging renders correct template when drag event contains files 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -98,65 +84,49 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style=""
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -168,14 +138,14 @@ exports[`Upload dropzone component when dragging renders correct template when d
exports[`Upload dropzone component when dragging renders correct template when drag event contains files and text 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -183,65 +153,49 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style=""
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -253,14 +207,14 @@ exports[`Upload dropzone component when dragging renders correct template when d
exports[`Upload dropzone component when dragging renders correct template when drag event contains text 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -268,66 +222,50 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style=""
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style=""
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -339,14 +277,14 @@ exports[`Upload dropzone component when dragging renders correct template when d
exports[`Upload dropzone component when dragging renders correct template when drag event is empty 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -354,66 +292,50 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style=""
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style=""
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -425,14 +347,14 @@ exports[`Upload dropzone component when dragging renders correct template when d
exports[`Upload dropzone component when dragging renders correct template when dragging stops 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -440,66 +362,50 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style="display: none;"
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style=""
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -511,14 +417,14 @@ exports[`Upload dropzone component when dragging renders correct template when d
exports[`Upload dropzone component when no slot provided renders default dropzone card 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
+ class="card gl-align-items-center gl-h-full gl-justify-content-center gl-mb-0 gl-px-5 gl-py-4 gl-w-full upload-dropzone-border upload-dropzone-card"
type="button"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ class="gl-align-items-center gl-display-flex gl-flex-direction-column gl-justify-content-center gl-text-center"
data-testid="dropzone-area"
>
<gl-icon-stub
@@ -526,65 +432,49 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
name="upload"
size="24"
/>
-
<p
class="gl-mb-0"
data-testid="upload-text"
>
- Drop or
+ Drop or
<gl-link-stub>
-
- upload
-
+ upload
</gl-link-stub>
- files to attach
+ files to attach
</p>
</div>
</button>
-
<input
accept="image/*"
class="hide"
- multiple="multiple"
+ multiple=""
name="upload_file"
type="file"
/>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style="display: none;"
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
@@ -596,47 +486,35 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
exports[`Upload dropzone component when slot provided renders dropzone with slot content 1`] = `
<div
- class="gl-w-full gl-relative"
+ class="gl-relative gl-w-full"
>
<div>
dropzone slot
</div>
-
<transition-stub
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
+ class="card gl-absolute gl-align-items-center gl-display-flex gl-h-full gl-justify-content-center gl-p-4 gl-w-full upload-dropzone-border upload-dropzone-overlay"
style="display: none;"
>
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
style="display: none;"
>
- <h3
- class=""
- >
-
- Oh no!
-
+ <h3>
+ Oh no!
</h3>
-
<span>
You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico.
</span>
</div>
-
<div
- class="mw-50 gl-text-center"
+ class="gl-text-center mw-50"
>
- <h3
- class=""
- >
-
- Incoming!
-
+ <h3>
+ Incoming!
</h3>
-
<span>
Drop your files to start your upload.
</span>
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 8c7657da8bc..119b892392f 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -5,17 +5,17 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
-import searchUsersQueryOnMR from '~/graphql_shared/queries/users_search_with_mr_permissions.graphql';
+import searchUsersQuery from '~/graphql_shared/queries/project_autocomplete_users.query.graphql';
+import searchUsersQueryOnMR from '~/graphql_shared/queries/project_autocomplete_users_with_mr_permissions.query.graphql';
import { TYPE_MERGE_REQUEST } from '~/issues/constants';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
import getIssueParticipantsQuery from '~/sidebar/queries/get_issue_participants.query.graphql';
import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
import {
- searchResponse,
- searchResponseOnMR,
- projectMembersResponse,
+ projectAutocompleteMembersResponse,
+ searchAutocompleteQueryResponse,
+ searchAutocompleteResponseOnMR,
participantsQueryResponse,
mockUser1,
mockUser2,
@@ -59,7 +59,7 @@ describe('User select dropdown', () => {
const findUnassignLink = () => wrapper.findByTestId('unassign');
const findEmptySearchResults = () => wrapper.findAllByTestId('empty-results');
- const searchQueryHandlerSuccess = jest.fn().mockResolvedValue(projectMembersResponse);
+ const searchQueryHandlerSuccess = jest.fn().mockResolvedValue(projectAutocompleteMembersResponse);
const participantsQueryHandlerSuccess = jest.fn().mockResolvedValue(participantsQueryResponse);
const createComponent = ({
@@ -69,7 +69,7 @@ describe('User select dropdown', () => {
} = {}) => {
fakeApollo = createMockApollo([
[searchUsersQuery, searchQueryHandler],
- [searchUsersQueryOnMR, jest.fn().mockResolvedValue(searchResponseOnMR)],
+ [searchUsersQueryOnMR, jest.fn().mockResolvedValue(searchAutocompleteResponseOnMR)],
[getIssueParticipantsQuery, participantsQueryHandler],
]);
wrapper = shallowMountExtended(UserSelect, {
@@ -200,7 +200,7 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- expect(findUnselectedParticipantByIndex(0).props('user')).toEqual(mockUser2);
+ expect(findUnselectedParticipantByIndex(0).props('user')).toMatchObject(mockUser2);
});
it('moves issuable author on top of unassigned list after current user, if author and current user are unassigned project members', async () => {
@@ -372,7 +372,9 @@ describe('User select dropdown', () => {
});
it('renders a list of found users and external participants matching search term', async () => {
- createComponent({ searchQueryHandler: jest.fn().mockResolvedValue(searchResponse) });
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue(searchAutocompleteQueryResponse),
+ });
await waitForPromises();
findSearchField().vm.$emit('input', 'ro');
@@ -382,7 +384,9 @@ describe('User select dropdown', () => {
});
it('renders a list of found users only if no external participants match search term', async () => {
- createComponent({ searchQueryHandler: jest.fn().mockResolvedValue(searchResponse) });
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue(searchAutocompleteQueryResponse),
+ });
await waitForPromises();
findSearchField().vm.$emit('input', 'roo');
@@ -392,8 +396,8 @@ describe('User select dropdown', () => {
});
it('shows a message about no matches if search returned an empty list', async () => {
- const responseCopy = cloneDeep(searchResponse);
- responseCopy.data.workspace.users.nodes = [];
+ const responseCopy = cloneDeep(searchAutocompleteQueryResponse);
+ responseCopy.data.workspace.users = [];
createComponent({
searchQueryHandler: jest.fn().mockResolvedValue(responseCopy),
diff --git a/spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap b/spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap
deleted file mode 100644
index 1d4aa1afeaf..00000000000
--- a/spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap
+++ /dev/null
@@ -1,30 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`IssuableBlockedIcon on mouseenter on blocked icon with more than three blocking issues matches the snapshot 1`] = `
-"<div class=\\"gl-display-inline\\"><svg data-testid=\\"issuable-blocked-icon\\" role=\\"img\\" aria-hidden=\\"true\\" class=\\"issuable-blocked-icon gl-mr-2 gl-cursor-pointer gl-text-red-500 gl-icon s16\\" id=\\"blocked-icon-uniqueId\\">
- <use href=\\"file-mock#issue-block\\"></use>
- </svg>
- <div class=\\"gl-popover\\">
- <ul class=\\"gl-list-style-none gl-p-0 gl-mb-0\\">
- <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/6\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#6</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-3\\">
- blocking issue title 1
- </p>
- </li>
- <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/5\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#5</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-3\\">
- blocking issue title 2 + blocking issue title 2 + blocking issue title 2 + bloc…
- </p>
- </li>
- <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/4\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#4</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-0\\">
- blocking issue title 3
- </p>
- </li>
- </ul>
- <div class=\\"gl-mt-4\\">
- <p data-testid=\\"hidden-blocking-count\\" class=\\"gl-mb-3\\">+ 1 more issue</p> <a data-testid=\\"view-all-issues\\" href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/0#related-issues\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">View all blocking issues</a>
- </div><span data-testid=\\"popover-title\\">Blocked by 4 issues</span>
- </div>
-</div>"
-`;
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
index 338dc80b43e..62361705843 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
@@ -81,7 +81,7 @@ describe('IssuableForm', () => {
ariaLabel: __('Description'),
class: 'rspec-issuable-form-description',
placeholder: __('Write a comment or drag your files here…'),
- dataQaSelector: 'issuable_form_description_field',
+ dataTestid: 'issuable-form-description-field',
id: 'issuable-description',
name: 'issuable-description',
},
diff --git a/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js
index d5603d4ba4b..6512da07125 100644
--- a/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js
+++ b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js
@@ -237,10 +237,6 @@ describe('IssuableBlockedIcon', () => {
await mouseenter();
});
- it('matches the snapshot', () => {
- expect(wrapper.html()).toMatchSnapshot();
- });
-
it('should render popover title with correct blocking issuable count', () => {
expect(findPopoverTitle().text()).toBe('Blocked by 4 issues');
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 77333a878d1..9f7254ba0e6 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -1,5 +1,6 @@
import { GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
import { nextTick } from 'vue';
+import { escape } from 'lodash';
import { useFakeDate } from 'helpers/fake_date';
import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper';
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
@@ -63,6 +64,14 @@ describe('IssuableItem', () => {
});
});
+ describe('externalAuthor', () => {
+ it('returns `externalAuthor` reference', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.vm.externalAuthor).toEqual(mockIssuable.externalAuthor);
+ });
+ });
+
describe('authorId', () => {
it.each`
authorId | returnValue
@@ -279,10 +288,23 @@ describe('IssuableItem', () => {
expect(titleEl.exists()).toBe(true);
expect(titleEl.findComponent(GlLink).attributes('href')).toBe(expectedHref);
expect(titleEl.findComponent(GlLink).attributes('target')).toBe(expectedTarget);
- expect(titleEl.findComponent(GlLink).text()).toBe(mockIssuable.title);
+ expect(titleEl.findComponent(GlLink).html()).toContain(mockIssuable.titleHtml);
},
);
+ it('renders issuable title with escaped markup when issue tracker is external', () => {
+ const mockTitle = '<script>foobar</script>';
+ wrapper = createComponent({
+ issuable: {
+ ...mockIssuable,
+ title: mockTitle,
+ externalTracker: 'jira',
+ },
+ });
+
+ expect(wrapper.findByTestId('issuable-title').html()).toContain(escape(mockTitle));
+ });
+
it('renders checkbox when `showCheckbox` prop is true', async () => {
wrapper = createComponent({
showCheckbox: true,
@@ -437,6 +459,15 @@ describe('IssuableItem', () => {
expect(authorEl.text()).toBe(mockAuthor.name);
});
+ it('renders issuable external author info via author slot', () => {
+ wrapper = createComponent({
+ issuableSymbol: '#',
+ issuable: { ...mockIssuable, externalAuthor: 'client@example.com' },
+ });
+
+ expect(wrapper.findByTestId('external-author').text()).toBe('client@example.com via');
+ });
+
it('renders timeframe via slot', () => {
wrapper = createComponent({
issuableSymbol: '#',
diff --git a/spec/frontend/vue_shared/issuable/list/mock_data.js b/spec/frontend/vue_shared/issuable/list/mock_data.js
index f8cf3ba5271..b39d177f292 100644
--- a/spec/frontend/vue_shared/issuable/list/mock_data.js
+++ b/spec/frontend/vue_shared/issuable/list/mock_data.js
@@ -42,7 +42,7 @@ export const mockCurrentUserTodo = {
export const mockIssuable = {
iid: '30',
title: 'Dismiss Cipher with no integrity',
- titleHtml: 'Dismiss Cipher with no integrity',
+ titleHtml: '<gl-emoji title="party-parrot"></gl-emoji>Dismiss Cipher with no integrity',
description: 'fortitudinis _fomentis_ dolor mitigari solet.',
descriptionHtml: 'fortitudinis <i>fomentis</i> dolor mitigari solet.',
state: 'opened',
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
index 4d08ad54e58..3b6f06d835b 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
@@ -2,13 +2,7 @@ import { GlBadge, GlButton, GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import {
- STATUS_CLOSED,
- STATUS_OPEN,
- STATUS_REOPENED,
- TYPE_ISSUE,
- WORKSPACE_PROJECT,
-} from '~/issues/constants';
+import { STATUS_CLOSED, STATUS_OPEN, STATUS_REOPENED, TYPE_ISSUE } from '~/issues/constants';
import { __ } from '~/locale';
import ConfidentialityBadge from '~/vue_shared/components/confidentiality_badge.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@@ -45,7 +39,6 @@ describe('IssuableHeader component', () => {
...mockIssuableShowProps,
issuableState: STATUS_OPEN,
issuableType: TYPE_ISSUE,
- workspaceType: WORKSPACE_PROJECT,
...props,
},
slots: {
@@ -107,6 +100,7 @@ describe('IssuableHeader component', () => {
expect(findConfidentialityBadge().props()).toEqual({
issuableType: 'issue',
workspaceType: 'project',
+ hideTextInSmallScreens: false,
});
});
@@ -169,7 +163,7 @@ describe('IssuableHeader component', () => {
expect(findWorkItemTypeIcon().props()).toMatchObject({
showText: true,
- workItemType: 'ISSUE',
+ workItemType: 'issue',
});
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
index 39316dfa249..eefc9142064 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
@@ -66,11 +66,11 @@ describe('IssuableTitle', () => {
});
await nextTick();
- const titleEl = wrapperWithTitle.find('[data-testid="title"]');
+ const titleEl = wrapperWithTitle.find('[data-testid="issuable-title"]');
expect(titleEl.exists()).toBe(true);
expect(titleEl.html()).toBe(
- '<h1 dir="auto" data-qa-selector="title_content" data-testid="title" class="title gl-font-size-h-display"><b>Sample</b> title</h1>',
+ '<h1 dir="auto" data-testid="issuable-title" class="title gl-font-size-h-display"><b>Sample</b> title</h1>',
);
wrapperWithTitle.destroy();
diff --git a/spec/frontend/vue_shared/issuable/show/mock_data.js b/spec/frontend/vue_shared/issuable/show/mock_data.js
index 5ec205a2d5c..946ad33555d 100644
--- a/spec/frontend/vue_shared/issuable/show/mock_data.js
+++ b/spec/frontend/vue_shared/issuable/show/mock_data.js
@@ -38,6 +38,7 @@ export const mockIssuableShowProps = {
showFieldTitle: false,
statusIcon: 'issues',
statusIconClass: 'gl-sm-display-none',
+ workspaceType: 'project',
taskCompletionStatus: {
completedCount: 0,
count: 5,
diff --git a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
index a7ddcbdd8bc..109b7732539 100644
--- a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
@@ -1,6 +1,6 @@
import { GlBreadcrumb } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import LegacyContainer from '~/vue_shared/new_namespace/components/legacy_container.vue';
import WelcomePage from '~/vue_shared/new_namespace/components/welcome.vue';
import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
@@ -14,6 +14,7 @@ describe('Experimental new namespace creation app', () => {
const findWelcomePage = () => wrapper.findComponent(WelcomePage);
const findLegacyContainer = () => wrapper.findComponent(LegacyContainer);
+ const findTopBar = () => wrapper.findByTestId('top-bar');
const findBreadcrumb = () => wrapper.findComponent(GlBreadcrumb);
const findImage = () => wrapper.find('img');
const findNewTopLevelGroupAlert = () => wrapper.findComponent(NewTopLevelGroupAlert);
@@ -30,7 +31,7 @@ describe('Experimental new namespace creation app', () => {
};
const createComponent = ({ slots, propsData } = {}) => {
- wrapper = shallowMount(NewNamespacePage, {
+ wrapper = shallowMountExtended(NewNamespacePage, {
slots,
propsData: {
...DEFAULT_PROPS,
@@ -167,4 +168,19 @@ describe('Experimental new namespace creation app', () => {
});
});
});
+
+ describe('top bar', () => {
+ it('adds "top-bar-fixed" and "container-fluid" classes when new navigation enabled', () => {
+ gon.use_new_navigation = true;
+ createComponent();
+
+ expect(findTopBar().classes()).toEqual(['top-bar-fixed', 'container-fluid']);
+ });
+
+ it('does not add classes when new navigation is not enabled', () => {
+ createComponent();
+
+ expect(findTopBar().classes()).toEqual([]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
index 299a3d62421..f5bc23a91fd 100644
--- a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import SecurityReportDownloadDropdown from '~/vue_shared/security_reports/components/security_report_download_dropdown.vue';
@@ -12,8 +12,7 @@ describe('SecurityReportDownloadDropdown component', () => {
});
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
describe('given report artifacts', () => {
beforeEach(() => {
@@ -28,21 +27,36 @@ describe('SecurityReportDownloadDropdown component', () => {
},
];
- createComponent({ artifacts });
+ createComponent({ artifacts, text: 'test' });
});
it('renders a dropdown', () => {
expect(findDropdown().props('loading')).toBe(false);
+ expect(findDropdown().props('toggleText')).toBe('test');
+ expect(findDropdown().attributes()).toMatchObject({
+ placement: 'right',
+ size: 'small',
+ icon: 'download',
+ });
});
- it('renders a dropdown item for each artifact', () => {
- artifacts.forEach((artifact, i) => {
- const item = findDropdownItems().at(i);
- expect(item.text()).toContain(artifact.name);
-
- expect(item.element.getAttribute('href')).toBe(artifact.path);
- expect(item.element.getAttribute('download')).toBeDefined();
- });
+ it('passes artifacts as items', () => {
+ expect(findDropdown().props('items')).toMatchObject([
+ {
+ text: 'Download foo',
+ href: '/foo.json',
+ extraAttrs: {
+ download: '',
+ },
+ },
+ {
+ text: 'Download bar',
+ href: '/bar.json',
+ extraAttrs: {
+ download: '',
+ },
+ },
+ ]);
});
});
@@ -56,31 +70,13 @@ describe('SecurityReportDownloadDropdown component', () => {
});
});
- describe('given title props', () => {
+ describe('given it is not loading and no artifacts', () => {
beforeEach(() => {
- createComponent({ artifacts: [], loading: true, title: 'test title' });
- });
-
- it('should render title', () => {
- expect(findDropdown().attributes('title')).toBe('test title');
- });
-
- it('should not render text', () => {
- expect(findDropdown().text().trim()).toBe('');
- });
- });
-
- describe('given text props', () => {
- beforeEach(() => {
- createComponent({ artifacts: [], loading: true, text: 'test text' });
- });
-
- it('should not render title', () => {
- expect(findDropdown().props().title).not.toBeDefined();
+ createComponent({ artifacts: [], loading: false });
});
- it('should render text', () => {
- expect(findDropdown().props().text).toContain('test text');
+ it('does not render dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/webhooks/components/__snapshots__/push_events_spec.js.snap b/spec/frontend/webhooks/components/__snapshots__/push_events_spec.js.snap
index aec0f84cb82..4150bd75c16 100644
--- a/spec/frontend/webhooks/components/__snapshots__/push_events_spec.js.snap
+++ b/spec/frontend/webhooks/components/__snapshots__/push_events_spec.js.snap
@@ -11,7 +11,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -21,50 +21,34 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
-
+ />
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
+ />
</gl-form-radio-group-stub>
`;
@@ -79,7 +63,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -89,43 +73,31 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
-
+ />
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
>
@@ -136,9 +108,8 @@ exports[`Webhook push events form editor component Different push events rules w
value="foo"
/>
</div>
-
<p
- class="form-text text-muted custom-control"
+ class="custom-control form-text text-muted"
>
<gl-sprintf-stub
message="Regular expressions such as %{REGEX_CODE} are supported."
@@ -158,7 +129,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -168,21 +139,17 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
>
@@ -193,36 +160,27 @@ exports[`Webhook push events form editor component Different push events rules w
value="foo"
/>
</div>
-
<p
- class="form-text text-muted custom-control"
+ class="custom-control form-text text-muted"
>
<gl-sprintf-stub
message="Wildcards such as %{WILDCARD_CODE_STABLE} or %{WILDCARD_CODE_PRODUCTION} are supported."
/>
</p>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
+ />
</gl-form-radio-group-stub>
`;
@@ -237,7 +195,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -247,50 +205,34 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
-
+ />
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
+ />
</gl-form-radio-group-stub>
`;
@@ -305,7 +247,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -315,43 +257,31 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
-
+ />
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
>
@@ -362,9 +292,8 @@ exports[`Webhook push events form editor component Different push events rules w
value=""
/>
</div>
-
<p
- class="form-text text-muted custom-control"
+ class="custom-control form-text text-muted"
>
<gl-sprintf-stub
message="Regular expressions such as %{REGEX_CODE} are supported."
@@ -384,7 +313,7 @@ exports[`Webhook push events form editor component Different push events rules w
valuefield="value"
>
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_all_branches"
value="all_branches"
>
@@ -394,21 +323,17 @@ exports[`Webhook push events form editor component Different push events rules w
All branches
</div>
</gl-form-radio-stub>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_wildcard"
value="wildcard"
>
<div
data-qa-selector="strategy_radio_wildcard"
>
-
- Wildcard pattern
-
+ Wildcard pattern
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
>
@@ -419,35 +344,26 @@ exports[`Webhook push events form editor component Different push events rules w
value=""
/>
</div>
-
<p
- class="form-text text-muted custom-control"
+ class="custom-control form-text text-muted"
>
<gl-sprintf-stub
message="Wildcards such as %{WILDCARD_CODE_STABLE} or %{WILDCARD_CODE_PRODUCTION} are supported."
/>
</p>
-
<gl-form-radio-stub
- class="gl-mt-2 branch-filter-strategy-radio"
+ class="branch-filter-strategy-radio gl-mt-2"
data-testid="rule_regex"
value="regex"
>
<div
data-qa-selector="strategy_radio_regex"
>
-
- Regular expression
-
+ Regular expression
</div>
</gl-form-radio-stub>
-
<div
class="gl-ml-6"
- >
- <!---->
- </div>
-
- <!---->
+ />
</gl-form-radio-group-stub>
`;
diff --git a/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap
index 52838dcd0bc..841b8b57f88 100644
--- a/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap
+++ b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap
@@ -1,9 +1,41 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Work Item Note Body should have the wrapper to show the note body 1`] = `
-"<div data-testid=\\"work-item-note-body\\" class=\\"note-text md\\">
- <p dir=\\"auto\\" data-sourcepos=\\"1:1-1:76\\">
- <gl-emoji data-unicode-version=\\"6.0\\" data-name=\\"wave\\" title=\\"waving hand sign\\">👋</gl-emoji> Hi <a title=\\"Sherie Nitzsche\\" class=\\"gfm gfm-project_member js-user-link\\" data-placement=\\"top\\" data-container=\\"body\\" data-user=\\"3\\" data-reference-type=\\"user\\" href=\\"/fredda.brekke\\">@fredda.brekke</a> How are you ? what do you think about this ? <gl-emoji data-unicode-version=\\"6.0\\" data-name=\\"pray\\" title=\\"person with folded hands\\">🙏</gl-emoji>
+<div
+ class="md note-text"
+ data-testid="work-item-note-body"
+>
+ <p
+ data-sourcepos="1:1-1:76"
+ dir="auto"
+ >
+ <gl-emoji
+ data-name="wave"
+ data-unicode-version="6.0"
+ title="waving hand sign"
+ >
+ 👋
+ </gl-emoji>
+ Hi
+ <a
+ class="gfm gfm-project_member js-user-link"
+ data-container="body"
+ data-placement="top"
+ data-reference-type="user"
+ data-user="3"
+ href="/fredda.brekke"
+ title="Sherie Nitzsche"
+ >
+ @fredda.brekke
+ </a>
+ How are you ? what do you think about this ?
+ <gl-emoji
+ data-name="pray"
+ data-unicode-version="6.0"
+ title="person with folded hands"
+ >
+ 🙏
+ </gl-emoji>
</p>
-</div>"
+</div>
`;
diff --git a/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_replying_spec.js.snap b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_replying_spec.js.snap
index 30577dc60cf..af930f56509 100644
--- a/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_replying_spec.js.snap
+++ b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_replying_spec.js.snap
@@ -1,3 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Work Item Note Replying should have the note body and header 1`] = `"<note-header-stub author=\\"[object Object]\\" actiontext=\\"\\" noteabletype=\\"\\" expanded=\\"true\\" showspinner=\\"true\\" noteurl=\\"\\" emailparticipant=\\"\\"></note-header-stub>"`;
+exports[`Work Item Note Replying should have the note body and header 1`] = `
+<note-header-stub
+ actiontext=""
+ author="[object Object]"
+ emailparticipant=""
+ expanded="true"
+ noteabletype=""
+ noteurl=""
+ showspinner="true"
+/>
+`;
diff --git a/spec/frontend/work_items/components/notes/work_item_activity_sort_filter_spec.js b/spec/frontend/work_items/components/notes/work_item_activity_sort_filter_spec.js
index 5ed9d581446..0d0235f4b20 100644
--- a/spec/frontend/work_items/components/notes/work_item_activity_sort_filter_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_activity_sort_filter_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import WorkItemActivitySortFilter from '~/work_items/components/notes/work_item_activity_sort_filter.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
@@ -19,15 +19,13 @@ describe('Work Item Activity/Discussions Filtering', () => {
let wrapper;
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findByDataTestId = (dataTestId) => wrapper.findByTestId(dataTestId);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const createComponent = ({
loading = false,
workItemType = 'Task',
sortFilterProp = ASC,
- filterOptions = WORK_ITEM_ACTIVITY_SORT_OPTIONS,
+ items = WORK_ITEM_ACTIVITY_SORT_OPTIONS,
trackingLabel = 'item_track_notes_sorting',
trackingAction = 'work_item_notes_sort_order_changed',
filterEvent = 'changeSort',
@@ -39,7 +37,7 @@ describe('Work Item Activity/Discussions Filtering', () => {
loading,
workItemType,
sortFilterProp,
- filterOptions,
+ items,
trackingLabel,
trackingAction,
filterEvent,
@@ -50,13 +48,13 @@ describe('Work Item Activity/Discussions Filtering', () => {
};
describe.each`
- usedFor | filterOptions | storageKey | filterEvent | newInputOption | trackingLabel | trackingAction | defaultSortFilterProp | sortFilterProp | nonDefaultDataTestId
- ${'Sorting'} | ${WORK_ITEM_ACTIVITY_SORT_OPTIONS} | ${WORK_ITEM_NOTES_SORT_ORDER_KEY} | ${'changeSort'} | ${DESC} | ${'item_track_notes_sorting'} | ${'work_item_notes_sort_order_changed'} | ${ASC} | ${ASC} | ${'newest-first'}
- ${'Filtering'} | ${WORK_ITEM_ACTIVITY_FILTER_OPTIONS} | ${WORK_ITEM_NOTES_FILTER_KEY} | ${'changeFilter'} | ${WORK_ITEM_NOTES_FILTER_ONLY_COMMENTS} | ${'item_track_notes_sorting'} | ${'work_item_notes_filter_changed'} | ${WORK_ITEM_NOTES_FILTER_ALL_NOTES} | ${WORK_ITEM_NOTES_FILTER_ALL_NOTES} | ${'comments-activity'}
+ usedFor | items | storageKey | filterEvent | newInputOption | trackingLabel | trackingAction | defaultSortFilterProp | sortFilterProp | nonDefaultValue
+ ${'Sorting'} | ${WORK_ITEM_ACTIVITY_SORT_OPTIONS} | ${WORK_ITEM_NOTES_SORT_ORDER_KEY} | ${'changeSort'} | ${DESC} | ${'item_track_notes_sorting'} | ${'work_item_notes_sort_order_changed'} | ${ASC} | ${ASC} | ${DESC}
+ ${'Filtering'} | ${WORK_ITEM_ACTIVITY_FILTER_OPTIONS} | ${WORK_ITEM_NOTES_FILTER_KEY} | ${'changeFilter'} | ${WORK_ITEM_NOTES_FILTER_ONLY_COMMENTS} | ${'item_track_notes_sorting'} | ${'work_item_notes_filter_changed'} | ${WORK_ITEM_NOTES_FILTER_ALL_NOTES} | ${WORK_ITEM_NOTES_FILTER_ALL_NOTES} | ${WORK_ITEM_NOTES_FILTER_ONLY_COMMENTS}
`(
'When used for $usedFor',
({
- filterOptions,
+ items,
storageKey,
filterEvent,
trackingLabel,
@@ -64,12 +62,12 @@ describe('Work Item Activity/Discussions Filtering', () => {
newInputOption,
defaultSortFilterProp,
sortFilterProp,
- nonDefaultDataTestId,
+ nonDefaultValue,
}) => {
beforeEach(() => {
createComponent({
sortFilterProp,
- filterOptions,
+ items,
trackingLabel,
trackingAction,
filterEvent,
@@ -79,8 +77,7 @@ describe('Work Item Activity/Discussions Filtering', () => {
});
it('has a dropdown with options equal to the length of `filterOptions`', () => {
- expect(findDropdown().exists()).toBe(true);
- expect(findAllDropdownItems()).toHaveLength(filterOptions.length);
+ expect(findListbox().props('items')).toEqual(items);
});
it('has local storage sync with the correct props', () => {
@@ -96,7 +93,7 @@ describe('Work Item Activity/Discussions Filtering', () => {
it('emits tracking event when the a non default dropdown item is clicked', () => {
const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- findByDataTestId(nonDefaultDataTestId).vm.$emit('click');
+ findListbox().vm.$emit('select', nonDefaultValue);
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, trackingAction, {
category: TRACKING_CATEGORY_SHOW,
diff --git a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
index 4b1b7b27ad9..826fc2b2230 100644
--- a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
@@ -255,6 +255,20 @@ describe('Work item add note', () => {
expect(wrapper.emitted('error')).toEqual([[error]]);
});
+
+ it('sends confidential prop to work item comment form', async () => {
+ await createComponent({ isEditing: true, signedIn: true });
+
+ const {
+ data: {
+ workspace: {
+ workItems: { nodes },
+ },
+ },
+ } = workItemByIidResponseFactory({ canUpdate: true, canCreateNote: true });
+
+ expect(findCommentForm().props('isWorkItemConfidential')).toBe(nodes[0].confidential);
+ });
});
});
diff --git a/spec/frontend/work_items/components/notes/work_item_note_spec.js b/spec/frontend/work_items/components/notes/work_item_note_spec.js
index c5d1decfb42..9049a69656a 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_spec.js
@@ -388,6 +388,13 @@ describe('Work Item Note', () => {
});
});
+ it('confidential information on note', async () => {
+ createComponent();
+ await findNoteActions().vm.$emit('startEditing');
+ const { confidential } = workItemByIidResponseFactory().data.workspace.workItems.nodes[0];
+ expect(findCommentForm().props('isWorkItemConfidential')).toBe(confidential);
+ });
+
describe('author and user role badges', () => {
describe('author badge props', () => {
it.each`
diff --git a/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
index 9a20e2ec98f..b86f9ff34ae 100644
--- a/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_link_child_contents_spec.js
@@ -10,7 +10,7 @@ import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip
import WorkItemLinkChildContents from '~/work_items/components/shared/work_item_link_child_contents.vue';
import WorkItemLinksMenu from '~/work_items/components/shared/work_item_links_menu.vue';
-import { TASK_TYPE_NAME, WORK_ITEM_TYPE_VALUE_OBJECTIVE } from '~/work_items/constants';
+import { WORK_ITEM_TYPE_VALUE_OBJECTIVE } from '~/work_items/constants';
import {
workItemTask,
@@ -26,11 +26,9 @@ jest.mock('~/alert');
describe('WorkItemLinkChildContents', () => {
Vue.use(VueApollo);
- const WORK_ITEM_ID = 'gid://gitlab/WorkItem/2';
let wrapper;
const { LABELS } = workItemObjectiveMetadataWidgets;
const mockLabels = LABELS.labels.nodes;
- const mockFullPath = 'gitlab-org/gitlab-test';
const findStatusIconComponent = () =>
wrapper.findByTestId('item-status-icon').findComponent(GlIcon);
@@ -43,19 +41,11 @@ describe('WorkItemLinkChildContents', () => {
const findScopedLabel = () => findAllLabels().at(1);
const findLinksMenuComponent = () => wrapper.findComponent(WorkItemLinksMenu);
- const createComponent = ({
- canUpdate = true,
- parentWorkItemId = WORK_ITEM_ID,
- childItem = workItemTask,
- workItemType = TASK_TYPE_NAME,
- } = {}) => {
+ const createComponent = ({ canUpdate = true, childItem = workItemTask } = {}) => {
wrapper = shallowMountExtended(WorkItemLinkChildContents, {
propsData: {
canUpdate,
- parentWorkItemId,
childItem,
- workItemType,
- fullPath: mockFullPath,
childPath: '/gitlab-org/gitlab-test/-/work_items/4',
},
});
diff --git a/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js b/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js
index 721db6c3315..338a70feae4 100644
--- a/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_links_menu_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import WorkItemLinksMenu from '~/work_items/components/shared/work_item_links_menu.vue';
@@ -10,8 +10,8 @@ describe('WorkItemLinksMenu', () => {
wrapper = shallowMountExtended(WorkItemLinksMenu);
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findRemoveDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findRemoveDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
beforeEach(() => {
createComponent();
@@ -23,7 +23,7 @@ describe('WorkItemLinksMenu', () => {
});
it('emits removeChild event on click Remove', () => {
- findRemoveDropdownItem().vm.$emit('click');
+ findRemoveDropdownItem().vm.$emit('action');
expect(wrapper.emitted('removeChild')).toHaveLength(1);
});
diff --git a/spec/frontend/work_items/components/shared/work_item_token_input_spec.js b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
new file mode 100644
index 00000000000..075b69415cf
--- /dev/null
+++ b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
@@ -0,0 +1,81 @@
+import Vue from 'vue';
+import { GlTokenSelector } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import WorkItemTokenInput from '~/work_items/components/shared/work_item_token_input.vue';
+import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
+import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import { availableWorkItemsResponse, searchedWorkItemsResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
+
+describe('WorkItemTokenInput', () => {
+ let wrapper;
+
+ const availableWorkItemsResolver = jest.fn().mockResolvedValue(availableWorkItemsResponse);
+ const searchedWorkItemResolver = jest.fn().mockResolvedValue(searchedWorkItemsResponse);
+
+ const createComponent = async ({
+ workItemsToAdd = [],
+ parentConfidential = false,
+ childrenType = WORK_ITEM_TYPE_ENUM_TASK,
+ areWorkItemsToAddValid = true,
+ workItemsResolver = searchedWorkItemResolver,
+ } = {}) => {
+ wrapper = shallowMountExtended(WorkItemTokenInput, {
+ apolloProvider: createMockApollo([[projectWorkItemsQuery, workItemsResolver]]),
+ propsData: {
+ value: workItemsToAdd,
+ childrenType,
+ childrenIds: [],
+ fullPath: 'test-project-path',
+ parentWorkItemId: 'gid://gitlab/WorkItem/1',
+ parentConfidential,
+ areWorkItemsToAddValid,
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+
+ it('searches for available work items on focus', async () => {
+ createComponent({ workItemsResolver: availableWorkItemsResolver });
+ findTokenSelector().vm.$emit('focus');
+ await waitForPromises();
+
+ expect(availableWorkItemsResolver).toHaveBeenCalledWith({
+ fullPath: 'test-project-path',
+ searchTerm: '',
+ types: [WORK_ITEM_TYPE_ENUM_TASK],
+ in: undefined,
+ });
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(3);
+ });
+
+ it('searches for available work items when typing in input', async () => {
+ createComponent({ workItemsResolver: searchedWorkItemResolver });
+ findTokenSelector().vm.$emit('focus');
+ findTokenSelector().vm.$emit('text-input', 'Task 2');
+ await waitForPromises();
+
+ expect(searchedWorkItemResolver).toHaveBeenCalledWith({
+ fullPath: 'test-project-path',
+ searchTerm: 'Task 2',
+ types: [WORK_ITEM_TYPE_ENUM_TASK],
+ in: 'TITLE',
+ });
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(1);
+ });
+
+ it('renders red border around token selector input when work item is not valid', () => {
+ createComponent({
+ areWorkItemsToAddValid: false,
+ });
+
+ expect(findTokenSelector().props('containerClass')).toBe('gl-inset-border-1-red-500!');
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js
index 0fe517d7d74..0098a2e0864 100644
--- a/spec/frontend/work_items/components/work_item_actions_spec.js
+++ b/spec/frontend/work_items/components/work_item_actions_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownDivider, GlModal, GlToggle } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlDropdownDivider, GlModal, GlToggle } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -140,7 +140,12 @@ describe('WorkItemActions component', () => {
stubs: {
GlModal: stubComponent(GlModal, {
methods: {
- show: modalShowSpy,
+ show: jest.fn(),
+ },
+ }),
+ GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
+ methods: {
+ close: modalShowSpy,
},
}),
},
@@ -208,7 +213,7 @@ describe('WorkItemActions component', () => {
it('emits `toggleWorkItemConfidentiality` event when clicked', () => {
createComponent();
- findConfidentialityToggleButton().vm.$emit('click');
+ findConfidentialityToggleButton().vm.$emit('action');
expect(wrapper.emitted('toggleWorkItemConfidentiality')[0]).toEqual([true]);
});
@@ -228,7 +233,7 @@ describe('WorkItemActions component', () => {
it('shows confirm modal when clicked', () => {
createComponent();
- findDeleteButton().vm.$emit('click');
+ findDeleteButton().vm.$emit('action');
expect(modalShowSpy).toHaveBeenCalled();
});
@@ -359,7 +364,7 @@ describe('WorkItemActions component', () => {
await waitForPromises();
expect(findPromoteButton().exists()).toBe(true);
- findPromoteButton().vm.$emit('click');
+ findPromoteButton().vm.$emit('action');
await waitForPromises();
@@ -378,7 +383,7 @@ describe('WorkItemActions component', () => {
await waitForPromises();
expect(findPromoteButton().exists()).toBe(true);
- findPromoteButton().vm.$emit('click');
+ findPromoteButton().vm.$emit('action');
await waitForPromises();
@@ -394,7 +399,7 @@ describe('WorkItemActions component', () => {
createComponent();
expect(findCopyReferenceButton().exists()).toBe(true);
- findCopyReferenceButton().vm.$emit('click');
+ findCopyReferenceButton().vm.$emit('action');
expect(toast).toHaveBeenCalledWith('Reference copied');
});
@@ -416,7 +421,7 @@ describe('WorkItemActions component', () => {
createComponent();
expect(findCopyCreateNoteEmailButton().exists()).toBe(true);
- findCopyCreateNoteEmailButton().vm.$emit('click');
+ findCopyCreateNoteEmailButton().vm.$emit('action');
expect(toast).toHaveBeenCalledWith('Email address copied');
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index d3c7c9e2074..fec6d0673c6 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -20,6 +20,7 @@ import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_up
import WorkItemAttributesWrapper from '~/work_items/components/work_item_attributes_wrapper.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
+import WorkItemRelationships from '~/work_items/components/work_item_relationships/work_item_relationships.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
@@ -37,6 +38,7 @@ import {
workItemByIidResponseFactory,
objectiveType,
mockWorkItemCommentNote,
+ mockBlockingLinkedItem,
} from '../mock_data';
jest.mock('~/lib/utils/common_utils');
@@ -76,6 +78,7 @@ describe('WorkItemDetail component', () => {
const findCloseButton = () => wrapper.findByTestId('work-item-close');
const findWorkItemType = () => wrapper.findByTestId('work-item-type');
const findHierarchyTree = () => wrapper.findComponent(WorkItemTree);
+ const findWorkItemRelationships = () => wrapper.findComponent(WorkItemRelationships);
const findNotesWidget = () => wrapper.findComponent(WorkItemNotes);
const findModal = () => wrapper.findComponent(WorkItemDetailModal);
const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
@@ -96,6 +99,7 @@ describe('WorkItemDetail component', () => {
confidentialityMock = [updateWorkItemMutation, jest.fn()],
error = undefined,
workItemsMvc2Enabled = false,
+ linkedWorkItemsEnabled = false,
} = {}) => {
const handlers = [
[workItemByIidQuery, handler],
@@ -119,6 +123,7 @@ describe('WorkItemDetail component', () => {
provide: {
glFeatures: {
workItemsMvc2: workItemsMvc2Enabled,
+ linkedWorkItems: linkedWorkItemsEnabled,
},
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
@@ -581,12 +586,91 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('relationship widget', () => {
+ it('does not render linked items by default', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findWorkItemRelationships().exists()).toBe(false);
+ });
+
+ describe('work item has children', () => {
+ const mockWorkItemLinkedItem = workItemByIidResponseFactory({
+ linkedItems: mockBlockingLinkedItem,
+ });
+ const handler = jest.fn().mockResolvedValue(mockWorkItemLinkedItem);
+
+ it('renders relationship widget when work item has linked items', async () => {
+ createComponent({ handler, linkedWorkItemsEnabled: true });
+ await waitForPromises();
+
+ expect(findWorkItemRelationships().exists()).toBe(true);
+ });
+
+ it('opens the modal with the linked item when `showModal` is emitted', async () => {
+ createComponent({
+ handler,
+ linkedWorkItemsEnabled: true,
+ workItemsMvc2Enabled: true,
+ });
+ await waitForPromises();
+
+ const event = {
+ preventDefault: jest.fn(),
+ };
+
+ findWorkItemRelationships().vm.$emit('showModal', {
+ event,
+ modalWorkItem: { id: 'childWorkItemId' },
+ });
+ await waitForPromises();
+
+ expect(findModal().props().workItemId).toBe('childWorkItemId');
+ expect(showModalHandler).toHaveBeenCalled();
+ });
+
+ describe('linked work item is rendered in a modal and has linked items', () => {
+ beforeEach(async () => {
+ createComponent({
+ isModal: true,
+ handler,
+ workItemsMvc2Enabled: true,
+ linkedWorkItemsEnabled: true,
+ });
+
+ await waitForPromises();
+ });
+
+ it('does not render a new modal', () => {
+ expect(findModal().exists()).toBe(false);
+ });
+
+ it('emits `update-modal` when `show-modal` is emitted', async () => {
+ const event = {
+ preventDefault: jest.fn(),
+ };
+
+ findWorkItemRelationships().vm.$emit('showModal', {
+ event,
+ modalWorkItem: { id: 'childWorkItemId' },
+ });
+ await waitForPromises();
+
+ expect(wrapper.emitted('update-modal')).toBeDefined();
+ });
+ });
+ });
+ });
+
describe('notes widget', () => {
it('renders notes by default', async () => {
createComponent();
await waitForPromises();
+ const { confidential } = workItemQueryResponse.data.workspace.workItems.nodes[0];
+
expect(findNotesWidget().exists()).toBe(true);
+ expect(findNotesWidget().props('isWorkItemConfidential')).toBe(confidential);
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
index 803ff950cbe..a624bbe8567 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
@@ -93,8 +93,6 @@ describe('WorkItemLinkChild', () => {
expect(findWorkItemLinkChildContents().props()).toEqual({
childItem: workItemObjectiveWithChild,
canUpdate: true,
- parentWorkItemId: 'gid://gitlab/WorkItem/2',
- workItemType: 'Objective',
childPath: '/gitlab-org/gitlab-test/-/work_items/12',
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index 8caacc2dc97..aaab22fd18d 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -1,11 +1,12 @@
import Vue from 'vue';
-import { GlForm, GlFormInput, GlFormCheckbox, GlTooltip, GlTokenSelector } from '@gitlab/ui';
+import { GlForm, GlFormInput, GlFormCheckbox, GlTooltip } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import { sprintf, s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
+import WorkItemTokenInput from '~/work_items/components/shared/work_item_token_input.vue';
import {
FORM_TYPES,
WORK_ITEM_TYPE_ENUM_TASK,
@@ -70,10 +71,12 @@ describe('WorkItemLinksForm', () => {
};
const findForm = () => wrapper.findComponent(GlForm);
- const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+ const findWorkItemTokenInput = () => wrapper.findComponent(WorkItemTokenInput);
const findInput = () => wrapper.findComponent(GlFormInput);
const findConfidentialCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
const findAddChildButton = () => wrapper.findByTestId('add-child-button');
+ const findValidationElement = () => wrapper.findByTestId('work-items-invalid');
describe('creating a new work item', () => {
beforeEach(async () => {
@@ -84,7 +87,7 @@ describe('WorkItemLinksForm', () => {
expect(findForm().exists()).toBe(true);
expect(findInput().exists()).toBe(true);
expect(findAddChildButton().text()).toBe('Create task');
- expect(findTokenSelector().exists()).toBe(false);
+ expect(findWorkItemTokenInput().exists()).toBe(false);
});
it('creates child task in non confidential parent', async () => {
@@ -137,7 +140,7 @@ describe('WorkItemLinksForm', () => {
const confidentialCheckbox = findConfidentialCheckbox();
expect(confidentialCheckbox.exists()).toBe(true);
- expect(wrapper.findComponent(GlTooltip).exists()).toBe(false);
+ expect(findTooltip().exists()).toBe(false);
expect(confidentialCheckbox.text()).toBe(
sprintf(I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_LABEL, {
workItemType: WORK_ITEM_TYPE_ENUM_TASK.toLocaleLowerCase(),
@@ -149,12 +152,11 @@ describe('WorkItemLinksForm', () => {
createComponent({ parentConfidential: true });
const confidentialCheckbox = findConfidentialCheckbox();
- const confidentialTooltip = wrapper.findComponent(GlTooltip);
expect(confidentialCheckbox.attributes('disabled')).toBeDefined();
expect(confidentialCheckbox.attributes('checked')).toBe('true');
- expect(confidentialTooltip.exists()).toBe(true);
- expect(confidentialTooltip.text()).toBe(
+ expect(findTooltip().exists()).toBe(true);
+ expect(findTooltip().text()).toBe(
sprintf(I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_TOOLTIP, {
workItemType: WORK_ITEM_TYPE_ENUM_TASK.toLocaleLowerCase(),
parentWorkItemType: WORK_ITEM_TYPE_VALUE_ISSUE.toLocaleLowerCase(),
@@ -165,14 +167,11 @@ describe('WorkItemLinksForm', () => {
});
describe('adding an existing work item', () => {
- const selectAvailableWorkItemTokens = async () => {
- findTokenSelector().vm.$emit(
+ const selectAvailableWorkItemTokens = () => {
+ findWorkItemTokenInput().vm.$emit(
'input',
availableWorkItemsResponse.data.workspace.workItems.nodes,
);
- findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
-
- await waitForPromises();
};
beforeEach(async () => {
@@ -181,24 +180,31 @@ describe('WorkItemLinksForm', () => {
it('renders add form', () => {
expect(findForm().exists()).toBe(true);
- expect(findTokenSelector().exists()).toBe(true);
+ expect(findWorkItemTokenInput().exists()).toBe(true);
expect(findAddChildButton().text()).toBe('Add task');
expect(findInput().exists()).toBe(false);
expect(findConfidentialCheckbox().exists()).toBe(false);
});
- it('searches for available work items as prop when typing in input', async () => {
- findTokenSelector().vm.$emit('focus');
- findTokenSelector().vm.$emit('text-input', 'Task');
- await waitForPromises();
-
- expect(availableWorkItemsResolver).toHaveBeenCalled();
+ it('renders work item token input with default props', () => {
+ expect(findWorkItemTokenInput().props()).toMatchObject({
+ value: [],
+ fullPath: 'project/path',
+ childrenType: WORK_ITEM_TYPE_ENUM_TASK,
+ childrenIds: [],
+ parentWorkItemId: 'gid://gitlab/WorkItem/1',
+ areWorkItemsToAddValid: true,
+ });
});
it('selects and adds children', async () => {
await selectAvailableWorkItemTokens();
expect(findAddChildButton().text()).toBe('Add tasks');
+ expect(findWorkItemTokenInput().props('areWorkItemsToAddValid')).toBe(true);
+ expect(findWorkItemTokenInput().props('value')).toBe(
+ availableWorkItemsResponse.data.workspace.workItems.nodes,
+ );
findForm().vm.$emit('submit', {
preventDefault: jest.fn(),
});
@@ -211,9 +217,9 @@ describe('WorkItemLinksForm', () => {
await selectAvailableWorkItemTokens();
- const validationEl = wrapper.findByTestId('work-items-invalid');
- expect(validationEl.exists()).toBe(true);
- expect(validationEl.text().trim()).toBe(
+ expect(findWorkItemTokenInput().props('areWorkItemsToAddValid')).toBe(false);
+ expect(findValidationElement().exists()).toBe(true);
+ expect(findValidationElement().text().trim()).toBe(
sprintf(
s__(
'WorkItem|%{invalidWorkItemsList} cannot be added: Cannot assign a non-confidential %{childWorkItemType} to a confidential parent %{parentWorkItemType}. Make the selected %{childWorkItemType} confidential and try again.',
diff --git a/spec/frontend/work_items/components/work_item_notes_spec.js b/spec/frontend/work_items/components/work_item_notes_spec.js
index c2821cc99f9..35f01c85ec8 100644
--- a/spec/frontend/work_items/components/work_item_notes_spec.js
+++ b/spec/frontend/work_items/components/work_item_notes_spec.js
@@ -88,6 +88,7 @@ describe('WorkItemNotes component', () => {
defaultWorkItemNotesQueryHandler = workItemNotesQueryHandler,
deleteWINoteMutationHandler = deleteWorkItemNoteMutationSuccessHandler,
isModal = false,
+ isWorkItemConfidential = false,
} = {}) => {
wrapper = shallowMount(WorkItemNotes, {
apolloProvider: createMockApollo([
@@ -106,6 +107,7 @@ describe('WorkItemNotes component', () => {
workItemType: 'task',
reportAbusePath: '/report/abuse/path',
isModal,
+ isWorkItemConfidential,
},
stubs: {
GlModal: stubComponent(GlModal, { methods: { show: showModal } }),
@@ -344,4 +346,14 @@ describe('WorkItemNotes component', () => {
});
});
});
+
+ it('passes confidential props when the work item is confidential', async () => {
+ createComponent({
+ isWorkItemConfidential: true,
+ defaultWorkItemNotesQueryHandler: workItemNotesWithCommentsQueryHandler,
+ });
+ await waitForPromises();
+
+ expect(findWorkItemCommentNoteAtIndex(0).props('isWorkItemConfidential')).toBe(true);
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap b/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap
new file mode 100644
index 00000000000..9105e4de5e0
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_relationships/__snapshots__/work_item_relationship_list_spec.js.snap
@@ -0,0 +1,29 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`WorkItemRelationshipList renders linked item list 1`] = `
+<div>
+ <h4
+ class="gl-font-sm gl-font-weight-semibold gl-mb-2 gl-mt-3 gl-mx-2 gl-text-gray-700"
+ data-testid="work-items-list-heading"
+ >
+ Blocking
+ </h4>
+ <div
+ class="work-items-list-body"
+ >
+ <ul
+ class="content-list work-items-list"
+ >
+ <li
+ class="gl-border-b-0! gl-pb-0! gl-pt-0!"
+ >
+ <work-item-link-child-contents-stub
+ canupdate="true"
+ childitem="[object Object]"
+ childpath="/test-project-path/-/work_items/83"
+ />
+ </li>
+ </ul>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js b/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js
new file mode 100644
index 00000000000..759ab7e14da
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_relationships/work_item_relationship_list_spec.js
@@ -0,0 +1,41 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WorkItemRelationshipList from '~/work_items/components/work_item_relationships/work_item_relationship_list.vue';
+import WorkItemLinkChildContents from '~/work_items/components/shared/work_item_link_child_contents.vue';
+
+import { mockBlockingLinkedItem } from '../../mock_data';
+
+describe('WorkItemRelationshipList', () => {
+ let wrapper;
+ const mockLinkedItems = mockBlockingLinkedItem.linkedItems.nodes;
+
+ const createComponent = ({ linkedItems = [], heading = 'Blocking', canUpdate = true } = {}) => {
+ wrapper = shallowMountExtended(WorkItemRelationshipList, {
+ propsData: {
+ linkedItems,
+ heading,
+ canUpdate,
+ workItemFullPath: 'test-project-path',
+ },
+ });
+ };
+
+ const findHeading = () => wrapper.findByTestId('work-items-list-heading');
+ const findWorkItemLinkChildContents = () => wrapper.findComponent(WorkItemLinkChildContents);
+
+ beforeEach(() => {
+ createComponent({ linkedItems: mockLinkedItems });
+ });
+
+ it('renders linked item list', () => {
+ expect(findHeading().text()).toBe('Blocking');
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+
+ it('renders work item link child contents with correct props', () => {
+ expect(findWorkItemLinkChildContents().props()).toMatchObject({
+ childItem: mockLinkedItems[0].workItem,
+ canUpdate: true,
+ childPath: '/test-project-path/-/work_items/83',
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js b/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js
new file mode 100644
index 00000000000..c9a2499b127
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_relationships/work_item_relationships_spec.js
@@ -0,0 +1,93 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import WidgetWrapper from '~/work_items/components/widget_wrapper.vue';
+import WorkItemRelationships from '~/work_items/components/work_item_relationships/work_item_relationships.vue';
+import WorkItemRelationshipList from '~/work_items/components/work_item_relationships/work_item_relationship_list.vue';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+
+import {
+ workItemByIidResponseFactory,
+ mockLinkedItems,
+ mockBlockingLinkedItem,
+} from '../../mock_data';
+
+describe('WorkItemRelationships', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+ const emptyLinkedWorkItemsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory());
+ const linkedWorkItemsQueryHandler = jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockLinkedItems }));
+ const blockingLinkedWorkItemQueryHandler = jest
+ .fn()
+ .mockResolvedValue(workItemByIidResponseFactory({ linkedItems: mockBlockingLinkedItem }));
+
+ const createComponent = async ({
+ workItemQueryHandler = emptyLinkedWorkItemsQueryHandler,
+ } = {}) => {
+ const mockApollo = createMockApollo([[workItemByIidQuery, workItemQueryHandler]]);
+
+ wrapper = shallowMountExtended(WorkItemRelationships, {
+ apolloProvider: mockApollo,
+ propsData: {
+ workItemIid: '1',
+ workItemFullPath: 'test-project-path',
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findWidgetWrapper = () => wrapper.findComponent(WidgetWrapper);
+ const findEmptyRelatedMessageContainer = () => wrapper.findByTestId('links-empty');
+ const findLinkedItemsCountContainer = () => wrapper.findByTestId('linked-items-count');
+ const findAllWorkItemRelationshipListComponents = () =>
+ wrapper.findAllComponents(WorkItemRelationshipList);
+
+ it('shows loading icon when query is not processed', () => {
+ createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('renders the component with empty message when there are no items', async () => {
+ await createComponent();
+
+ expect(wrapper.find('.work-item-relationships').exists()).toBe(true);
+ expect(findEmptyRelatedMessageContainer().exists()).toBe(true);
+ });
+
+ it('renders blocking linked item lists', async () => {
+ await createComponent({ workItemQueryHandler: blockingLinkedWorkItemQueryHandler });
+
+ expect(findAllWorkItemRelationshipListComponents().length).toBe(1);
+ expect(findLinkedItemsCountContainer().text()).toBe('1');
+ });
+
+ it('renders blocking, blocked by and related to linked item lists with proper count', async () => {
+ await createComponent({ workItemQueryHandler: linkedWorkItemsQueryHandler });
+
+ // renders all 3 lists: blocking, blocked by and related to
+ expect(findAllWorkItemRelationshipListComponents().length).toBe(3);
+ expect(findLinkedItemsCountContainer().text()).toBe('3');
+ });
+
+ it('shows an alert when list loading fails', async () => {
+ const errorMessage = 'Some error';
+ await createComponent({
+ workItemQueryHandler: jest.fn().mockRejectedValue(new Error(errorMessage)),
+ });
+
+ expect(findWidgetWrapper().props('error')).toBe(errorMessage);
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_state_badge_spec.js b/spec/frontend/work_items/components/work_item_state_badge_spec.js
index 888d712cc5a..248f16a4081 100644
--- a/spec/frontend/work_items/components/work_item_state_badge_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_badge_spec.js
@@ -1,4 +1,4 @@
-import { GlBadge } from '@gitlab/ui';
+import { GlBadge, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { STATE_OPEN, STATE_CLOSED } from '~/work_items/constants';
import WorkItemStateBadge from '~/work_items/components/work_item_state_badge.vue';
@@ -14,6 +14,7 @@ describe('WorkItemStateBadge', () => {
});
};
const findStatusBadge = () => wrapper.findComponent(GlBadge);
+ const findStatusBadgeIcon = () => wrapper.findComponent(GlIcon);
it.each`
state | icon | stateText | variant
@@ -24,7 +25,7 @@ describe('WorkItemStateBadge', () => {
({ state, icon, stateText, variant }) => {
createComponent({ workItemState: state });
- expect(findStatusBadge().props('icon')).toBe(icon);
+ expect(findStatusBadgeIcon().props('name')).toBe(icon);
expect(findStatusBadge().props('variant')).toBe(variant);
expect(findStatusBadge().text()).toBe(stateText);
},
diff --git a/spec/frontend/work_items/list/components/work_items_list_app_spec.js b/spec/frontend/work_items/list/components/work_items_list_app_spec.js
index c92d092eb43..96083478e77 100644
--- a/spec/frontend/work_items/list/components/work_items_list_app_spec.js
+++ b/spec/frontend/work_items/list/components/work_items_list_app_spec.js
@@ -2,6 +2,8 @@ import * as Sentry from '@sentry/browser';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
+import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { STATUS_OPEN } from '~/issues/constants';
@@ -20,6 +22,8 @@ describe('WorkItemsListApp component', () => {
const defaultQueryHandler = jest.fn().mockResolvedValue(groupWorkItemsQueryResponse);
const findIssuableList = () => wrapper.findComponent(IssuableList);
+ const findIssueCardStatistics = () => wrapper.findComponent(IssueCardStatistics);
+ const findIssueCardTimeInfo = () => wrapper.findComponent(IssueCardTimeInfo);
const mountComponent = ({ queryHandler = defaultQueryHandler } = {}) => {
wrapper = shallowMount(WorkItemsListApp, {
@@ -37,9 +41,9 @@ describe('WorkItemsListApp component', () => {
currentTab: STATUS_OPEN,
error: '',
issuables: [],
+ issuablesLoading: true,
namespace: 'work-items',
recentSearchesStorageKey: 'issues',
- searchInputPlaceholder: 'Search or filter results...',
searchTokens: [],
showWorkItemTypeIcon: true,
sortOptions: [],
@@ -47,6 +51,18 @@ describe('WorkItemsListApp component', () => {
});
});
+ it('renders IssueCardStatistics component', () => {
+ mountComponent();
+
+ expect(findIssueCardStatistics().exists()).toBe(true);
+ });
+
+ it('renders IssueCardTimeInfo component', () => {
+ mountComponent();
+
+ expect(findIssueCardTimeInfo().exists()).toBe(true);
+ });
+
it('renders work items', async () => {
mountComponent();
await waitForPromises();
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 05e83c0df3d..ba244b19eb5 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -1,3 +1,5 @@
+import { WIDGET_TYPE_LINKED_ITEMS } from '~/work_items/constants';
+
export const mockAssignees = [
{
__typename: 'UserCore',
@@ -451,6 +453,126 @@ export const objectiveType = {
iconName: 'issue-type-objective',
};
+export const mockEmptyLinkedItems = {
+ type: WIDGET_TYPE_LINKED_ITEMS,
+ blocked: false,
+ blockedByCount: 0,
+ blockingCount: 0,
+ linkedItems: {
+ nodes: [],
+ __typename: 'LinkedWorkItemTypeConnection',
+ },
+ __typename: 'WorkItemWidgetLinkedItems',
+};
+
+export const mockBlockingLinkedItem = {
+ type: WIDGET_TYPE_LINKED_ITEMS,
+ linkedItems: {
+ nodes: [
+ {
+ linkId: 'gid://gitlab/WorkItems::RelatedWorkItemLink/8',
+ linkType: 'blocks',
+ workItem: {
+ id: 'gid://gitlab/WorkItem/675',
+ iid: '83',
+ confidential: true,
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ iconName: 'issue-type-task',
+ __typename: 'WorkItemType',
+ },
+ title: 'Task 1201',
+ state: 'OPEN',
+ createdAt: '2023-03-28T10:50:16Z',
+ closedAt: null,
+ widgets: [],
+ __typename: 'WorkItem',
+ },
+ __typename: 'LinkedWorkItemType',
+ },
+ ],
+ __typename: 'LinkedWorkItemTypeConnection',
+ },
+ __typename: 'WorkItemWidgetLinkedItems',
+};
+
+export const mockLinkedItems = {
+ type: WIDGET_TYPE_LINKED_ITEMS,
+ linkedItems: {
+ nodes: [
+ {
+ linkId: 'gid://gitlab/WorkItems::RelatedWorkItemLink/8',
+ linkType: 'relates_to',
+ workItem: {
+ id: 'gid://gitlab/WorkItem/675',
+ iid: '83',
+ confidential: true,
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ iconName: 'issue-type-task',
+ __typename: 'WorkItemType',
+ },
+ title: 'Task 1201',
+ state: 'OPEN',
+ createdAt: '2023-03-28T10:50:16Z',
+ closedAt: null,
+ widgets: [],
+ __typename: 'WorkItem',
+ },
+ __typename: 'LinkedWorkItemType',
+ },
+ {
+ linkId: 'gid://gitlab/WorkItems::RelatedWorkItemLink/9',
+ linkType: 'is_blocked_by',
+ workItem: {
+ id: 'gid://gitlab/WorkItem/646',
+ iid: '55',
+ confidential: true,
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ name: 'Objective',
+ iconName: 'issue-type-objective',
+ __typename: 'WorkItemType',
+ },
+ title: 'Multilevel Objective 1',
+ state: 'OPEN',
+ createdAt: '2023-03-28T10:50:16Z',
+ closedAt: null,
+ widgets: [],
+ __typename: 'WorkItem',
+ },
+ __typename: 'LinkedWorkItemType',
+ },
+ {
+ linkId: 'gid://gitlab/WorkItems::RelatedWorkItemLink/10',
+ linkType: 'blocks',
+ workItem: {
+ id: 'gid://gitlab/WorkItem/647',
+ iid: '56',
+ confidential: true,
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ name: 'Objective',
+ iconName: 'issue-type-objective',
+ __typename: 'WorkItemType',
+ },
+ title: 'Multilevel Objective 2',
+ state: 'OPEN',
+ createdAt: '2023-03-28T10:50:16Z',
+ closedAt: null,
+ widgets: [],
+ __typename: 'WorkItem',
+ },
+ __typename: 'LinkedWorkItemType',
+ },
+ ],
+ __typename: 'LinkedWorkItemTypeConnection',
+ },
+ __typename: 'WorkItemWidgetLinkedItems',
+};
+
export const workItemResponseFactory = ({
iid = '1',
canUpdate = false,
@@ -473,6 +595,7 @@ export const workItemResponseFactory = ({
confidential = false,
canInviteMembers = false,
labelsWidgetPresent = true,
+ linkedItemsWidgetPresent = true,
labels = mockLabels,
allowsScopedLabels = false,
lastEditedAt = null,
@@ -485,6 +608,7 @@ export const workItemResponseFactory = ({
updatedAt = '2022-08-08T12:32:54Z',
awardEmoji = mockAwardsWidget,
state = 'OPEN',
+ linkedItems = mockEmptyLinkedItems,
} = {}) => ({
data: {
workItem: {
@@ -683,6 +807,7 @@ export const workItemResponseFactory = ({
awardEmoji,
}
: { type: 'MOCK TYPE' },
+ linkedItemsWidgetPresent ? linkedItems : { type: 'MOCK TYPE' },
],
},
},
@@ -1471,6 +1596,27 @@ export const availableWorkItemsResponse = {
},
};
+export const searchedWorkItemsResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/459',
+ title: 'Task 2',
+ state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
export const projectMembersResponseWithCurrentUser = {
data: {
workspace: {
@@ -1883,8 +2029,7 @@ export const mockWorkItemNotesResponse = {
systemNoteIconName: 'link',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_199',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_199',
lastEditedBy: null,
system: true,
internal: false,
@@ -1934,8 +2079,7 @@ export const mockWorkItemNotesResponse = {
systemNoteIconName: 'clock',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_201',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_201',
lastEditedBy: null,
system: true,
internal: false,
@@ -1984,8 +2128,7 @@ export const mockWorkItemNotesResponse = {
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_202',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_202',
lastEditedBy: null,
system: true,
internal: false,
@@ -2097,8 +2240,7 @@ export const mockWorkItemNotesByIidResponse = {
systemNoteIconName: 'link',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2153,8 +2295,7 @@ export const mockWorkItemNotesByIidResponse = {
systemNoteIconName: 'clock',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2210,8 +2351,7 @@ export const mockWorkItemNotesByIidResponse = {
systemNoteIconName: 'iteration',
createdAt: '2022-11-14T04:19:00Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2325,8 +2465,7 @@ export const mockMoreWorkItemNotesResponse = {
systemNoteIconName: 'link',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2381,8 +2520,7 @@ export const mockMoreWorkItemNotesResponse = {
systemNoteIconName: 'clock',
createdAt: '2022-11-14T04:18:59Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2435,8 +2573,7 @@ export const mockMoreWorkItemNotesResponse = {
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2511,7 +2648,7 @@ export const createWorkItemNoteResponse = {
systemNoteIconName: null,
createdAt: '2023-01-25T04:49:46Z',
lastEditedAt: null,
- url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
maxAccessLevelOfAuthor: 'Owner',
authorIsContributor: false,
@@ -2565,7 +2702,7 @@ export const mockWorkItemCommentNote = {
systemNoteIconName: false,
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: false,
internal: false,
@@ -2665,8 +2802,7 @@ export const mockWorkItemNotesResponseWithComments = {
systemNoteIconName: null,
createdAt: '2023-01-12T07:47:40Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
maxAccessLevelOfAuthor: 'Owner',
authorIsContributor: false,
@@ -2708,8 +2844,7 @@ export const mockWorkItemNotesResponseWithComments = {
systemNoteIconName: null,
createdAt: '2023-01-18T09:09:54Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
maxAccessLevelOfAuthor: 'Owner',
authorIsContributor: false,
@@ -2758,8 +2893,7 @@ export const mockWorkItemNotesResponseWithComments = {
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url:
- 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: false,
internal: false,
@@ -2821,7 +2955,7 @@ export const workItemNotesCreateSubscriptionResponse = {
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2836,7 +2970,7 @@ export const workItemNotesCreateSubscriptionResponse = {
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
@@ -2914,7 +3048,7 @@ export const workItemNotesUpdateSubscriptionResponse = {
systemNoteIconName: 'pencil',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
- url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
+ url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37#note_191',
lastEditedBy: null,
system: true,
internal: false,
diff --git a/spec/frontend/work_items/utils_spec.js b/spec/frontend/work_items/utils_spec.js
index aa24b80cf08..8a49140119d 100644
--- a/spec/frontend/work_items/utils_spec.js
+++ b/spec/frontend/work_items/utils_spec.js
@@ -1,4 +1,4 @@
-import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils';
+import { autocompleteDataSources, markdownPreviewPath, workItemPath } from '~/work_items/utils';
describe('autocompleteDataSources', () => {
beforeEach(() => {
@@ -25,3 +25,14 @@ describe('markdownPreviewPath', () => {
);
});
});
+
+describe('workItemPath', () => {
+ it('returns corrrect data sources', () => {
+ expect(workItemPath('project/group', '2')).toEqual('/project/group/-/work_items/2');
+ });
+
+ it('returns corrrect data sources with relative url root', () => {
+ gon.relative_url_root = '/foobar';
+ expect(workItemPath('project/group', '2')).toEqual('/foobar/project/group/-/work_items/2');
+ });
+});
diff --git a/spec/graphql/mutations/base_mutation_spec.rb b/spec/graphql/mutations/base_mutation_spec.rb
index 6b366b0c234..a73d914f48f 100644
--- a/spec/graphql/mutations/base_mutation_spec.rb
+++ b/spec/graphql/mutations/base_mutation_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Mutations::BaseMutation do
+RSpec.describe ::Mutations::BaseMutation, feature_category: :api do
include GraphqlHelpers
describe 'argument nullability' do
diff --git a/spec/graphql/mutations/design_management/delete_spec.rb b/spec/graphql/mutations/design_management/delete_spec.rb
index a76943b9ff8..9a2efb61e55 100644
--- a/spec/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/graphql/mutations/design_management/delete_spec.rb
@@ -90,12 +90,12 @@ RSpec.describe Mutations::DesignManagement::Delete do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
filenames.each(&:present?) # ignore setup
- # Queries: as of 2022-06-15
+ # Queries: as of 2022-08-30
# -------------
# 01. routing query
- # 02. find project by id
- # 03. project.project_features
- # 04. find namespace by id and type
+ # 02. policy query: find namespace by type and id
+ # 03. policy query: find namespace by id
+ # 04. policy query: project.project_feature
# 05,06. project.authorizations for user (same query twice)
# 07. find issue by iid
# 08. find project by id
@@ -109,21 +109,22 @@ RSpec.describe Mutations::DesignManagement::Delete do
# 16, 17 project.authorizations for user (same query as 5)
# 18. find design_management_repository for project
# 19. find route by id and source_type
- # 20. find plan for standard context
# ------------- our queries are below:
- # 21. start transaction 1
- # 22. start transaction 2
- # 23. find version by sha and issue
- # 24. exists version with sha and issue?
- # 25. leave transaction 2
- # 26. create version with sha and issue
- # 27. create design-version links
- # 28. validate version.actions.present?
- # 29. validate version.issue.present?
- # 30. validate version.sha is unique
- # 31. leave transaction 1
+ # 20. start transaction
+ # 21. create version with sha and issue
+ # 22. create design-version links
+ # 23. validate version.actions.present?
+ # 24. validate version.sha is unique
+ # 25. validate version.issue.present?
+ # 26. leave transaction
+ # 27. find project by id (same query as 8)
+ # 28. find namespace by id (same query as 9)
+ # 29. find project by id (same query as 8)
+ # 30. find project by id (same query as 8)
+ # 31. create event
+ # 32. find plan for standard context
#
- expect { run_mutation }.not_to exceed_query_limit(31)
+ expect { run_mutation }.not_to exceed_query_limit(32)
end
end
diff --git a/spec/graphql/mutations/work_items/linked_items/base_spec.rb b/spec/graphql/mutations/work_items/linked_items/base_spec.rb
index 7061c37abd3..bc52aee443e 100644
--- a/spec/graphql/mutations/work_items/linked_items/base_spec.rb
+++ b/spec/graphql/mutations/work_items/linked_items/base_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Mutations::WorkItems::LinkedItems::Base, feature_category: :group
it 'raises a NotImplementedError error if the update_links method is called on the base class' do
mutation = described_class.new(context: { current_user: user }, object: nil, field: nil)
- expect { mutation.resolve(id: work_item.to_gid) }.to raise_error(NotImplementedError)
+ expect { mutation.resolve(id: work_item.to_gid) }
+ .to raise_error(NotImplementedError, "#{described_class} does not implement update_links")
end
end
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index d80a61fd318..27c62da31c3 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::BaseResolver do
+RSpec.describe Resolvers::BaseResolver, feature_category: :api do
include GraphqlHelpers
let(:resolver) do
diff --git a/spec/graphql/resolvers/blame_resolver_spec.rb b/spec/graphql/resolvers/blame_resolver_spec.rb
new file mode 100644
index 00000000000..a3344132928
--- /dev/null
+++ b/spec/graphql/resolvers/blame_resolver_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BlameResolver, feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:path) { 'files/ruby/popen.rb' }
+ let(:commit) { project.commit('master') }
+ let(:blob) { project.repository.blob_at(commit.id, path) }
+ let(:args) { { from_line: 1, to_line: 2 } }
+
+ subject(:resolve_blame) { resolve(described_class, obj: blob, args: args, ctx: { current_user: user }) }
+
+ context 'when unauthorized' do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_blame
+ end
+ end
+ end
+
+ context 'when authorized' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(graphql_git_blame: false)
+ end
+
+ it 'returns nothing' do
+ expect(subject).to be_nil
+ end
+ end
+
+ shared_examples 'argument error' do
+ it 'raises an ArgumentError' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError,
+ '`from_line` and `to_line` must be greater than or equal to 1') do
+ resolve_blame
+ end
+ end
+ end
+
+ context 'when feature is enabled' do
+ context 'when from_line is below 1' do
+ let(:args) { { from_line: 0, to_line: 2 } }
+
+ it_behaves_like 'argument error'
+ end
+
+ context 'when to_line is below 1' do
+ let(:args) { { from_line: 1, to_line: 0 } }
+
+ it_behaves_like 'argument error'
+ end
+
+ context 'when to_line less than from_line' do
+ let(:args) { { from_line: 3, to_line: 1 } }
+
+ it 'returns blame object' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError,
+ '`to_line` must be greater than or equal to `from_line`') do
+ resolve_blame
+ end
+ end
+ end
+
+ it 'returns blame object' do
+ expect(resolve_blame).to be_an_instance_of(Gitlab::Blame)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/branch_commit_resolver_spec.rb b/spec/graphql/resolvers/branch_commit_resolver_spec.rb
index 3d5702539fa..f901306a355 100644
--- a/spec/graphql/resolvers/branch_commit_resolver_spec.rb
+++ b/spec/graphql/resolvers/branch_commit_resolver_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Resolvers::BranchCommitResolver do
commit_a = repository.commits('master', limit: 1).last
commit_b = repository.commits('spooky-stuff', limit: 1).last
- commits = batch_sync(max_queries: 1) do
+ commits = batch_sync(max_queries: 2) do
[
resolve(described_class, obj: branch),
resolve(described_class, obj: repository.find_branch('spooky-stuff'))
diff --git a/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb
index fddc73fadfe..6b9e3a484b1 100644
--- a/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb
@@ -5,37 +5,114 @@ require 'spec_helper'
RSpec.describe Resolvers::Ci::AllJobsResolver, feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:successful_job) { create(:ci_build, :success, name: 'Job One') }
- let_it_be(:successful_job_two) { create(:ci_build, :success, name: 'Job Two') }
- let_it_be(:failed_job) { create(:ci_build, :failed, name: 'Job Three') }
- let_it_be(:pending_job) { create(:ci_build, :pending, name: 'Job Three') }
+ let_it_be(:instance_runner) { create(:ci_runner, :instance) }
+ let_it_be(:successful_job) { create(:ci_build, :success, name: 'successful_job') }
+ let_it_be(:successful_job_two) { create(:ci_build, :success, name: 'successful_job_two') }
+ let_it_be(:failed_job) { create(:ci_build, :failed, name: 'failed_job') }
+ let_it_be(:pending_job) { create(:ci_build, :pending, name: 'pending_job') }
let(:args) { {} }
- subject { resolve_jobs(args) }
-
describe '#resolve' do
- context 'with admin' do
- let(:current_user) { create(:admin) }
+ subject(:request) { resolve_jobs(args) }
+
+ context 'when current user is an admin' do
+ let_it_be(:current_user) { create(:admin) }
shared_examples 'executes as admin' do
- context 'with statuses argument' do
- let(:args) { { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS')] } }
+ context "with argument `statuses`" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:statuses, :expected_jobs) do
+ nil | lazy { [successful_job, successful_job_two, failed_job, pending_job] }
+ %w[SUCCESS] | lazy { [successful_job, successful_job_two] }
+ %w[SUCCESS FAILED] | lazy { [successful_job, successful_job_two, failed_job] }
+ %w[CANCELED] | lazy { [] }
+ end
- it { is_expected.to contain_exactly(successful_job, successful_job_two) }
+ with_them do
+ let(:args) do
+ { statuses: statuses&.map { |status| Types::Ci::JobStatusEnum.coerce_isolated_input(status) } }
+ end
+
+ it { is_expected.to contain_exactly(*expected_jobs) }
+ end
end
- context 'with multiple statuses' do
- let(:args) do
- { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS'),
- Types::Ci::JobStatusEnum.coerce_isolated_input('FAILED')] }
+ context "with argument `runner_types`" do
+ let_it_be(:successful_job_with_instance_runner) do
+ create(:ci_build, :success, name: 'successful_job_with_instance_runner', runner: instance_runner)
end
- it { is_expected.to contain_exactly(successful_job, successful_job_two, failed_job) }
+ context 'with feature flag :admin_jobs_filter_runner_type enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:runner_types, :expected_jobs) do
+ nil | lazy do
+ [
+ successful_job,
+ successful_job_two,
+ failed_job,
+ pending_job,
+ successful_job_with_instance_runner
+ ]
+ end
+ %w[INSTANCE_TYPE] | lazy { [successful_job_with_instance_runner] }
+ %w[INSTANCE_TYPE GROUP_TYPE] | lazy { [successful_job_with_instance_runner] }
+ %w[PROJECT_TYPE] | lazy { [] }
+ end
+
+ with_them do
+ let(:args) do
+ {
+ runner_types: runner_types&.map { |type| Types::Ci::RunnerTypeEnum.coerce_isolated_input(type) }
+ }
+ end
+
+ it { is_expected.to match_array(expected_jobs) }
+ end
+ end
end
- context 'without statuses argument' do
- it { is_expected.to contain_exactly(successful_job, successful_job_two, failed_job, pending_job) }
+ context "with argument combination" do
+ let_it_be(:successful_job_with_instance_runner) do
+ create(
+ :ci_build,
+ :success,
+ name: 'successful_job_with_instance_runner',
+ runner: instance_runner
+ )
+ end
+
+ let_it_be(:running_job_with_group_runner) do
+ create(:ci_build, :running, name: 'running_job_with_instance_runner', runner: create(:ci_runner, :group))
+ end
+
+ context 'with feature flag :admin_jobs_filter_runner_type enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:statuses, :runner_types, :expected_jobs) do
+ %w[SUCCESS] | %w[INSTANCE_TYPE] | lazy { [successful_job_with_instance_runner] }
+ %w[CANCELED] | %w[INSTANCE_TYPE] | lazy { [] }
+ %w[SUCCESS RUNNING] | %w[INSTANCE_TYPE GROUP_TYPE] | lazy do
+ [
+ successful_job_with_instance_runner,
+ running_job_with_group_runner
+ ]
+ end
+ end
+
+ with_them do
+ let(:args) do
+ {
+ statuses: statuses&.map { |status| Types::Ci::JobStatusEnum.coerce_isolated_input(status) },
+ runner_types: runner_types&.map { |type| Types::Ci::RunnerTypeEnum.coerce_isolated_input(type) }
+ }
+ end
+
+ it { is_expected.to contain_exactly(*expected_jobs) }
+ end
+ end
end
end
@@ -55,7 +132,9 @@ RSpec.describe Resolvers::Ci::AllJobsResolver, feature_category: :continuous_int
end
context 'with unauthorized user' do
- let(:current_user) { nil }
+ let_it_be(:unauth_user) { create(:user) }
+
+ let(:current_user) { unauth_user }
it { is_expected.to be_empty }
end
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index ff343f3f43d..fedae5c86a8 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
include GraphqlHelpers
describe '#resolve' do
- subject do
+ subject(:resolve_scope) do
resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
arg_style: :internal)
end
@@ -18,8 +18,10 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
# First, we can do a couple of basic real tests to verify common cases. That ensures that the code works.
context 'when user cannot see runners' do
- it 'returns no runners' do
- expect(subject.items.to_a).to eq([])
+ it 'returns Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_scope
+ end
end
end
@@ -29,14 +31,16 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
end
it 'returns all the runners' do
- expect(subject.items.to_a).to contain_exactly(inactive_project_runner, offline_project_runner, group_runner, subgroup_runner)
+ expect(resolve_scope.items.to_a).to contain_exactly(
+ inactive_project_runner, offline_project_runner, group_runner, subgroup_runner
+ )
end
context 'with membership direct' do
let(:args) { { membership: :direct } }
it 'returns only direct runners' do
- expect(subject.items.to_a).to contain_exactly(group_runner)
+ expect(resolve_scope.items.to_a).to contain_exactly(group_runner)
end
end
end
@@ -46,7 +50,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
let(:obj) { nil }
it 'raises an error' do
- expect { subject }.to raise_error('Expected group missing')
+ expect { resolve_scope }.to raise_error('Expected group missing')
end
end
@@ -54,7 +58,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
let(:obj) { build(:project) }
it 'raises an error' do
- expect { subject }.to raise_error('Expected group missing')
+ expect { resolve_scope }.to raise_error('Expected group missing')
end
end
@@ -90,7 +94,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :runner_fl
allow(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to eq([:execute_return_value])
end
end
end
diff --git a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
index 83435db2ea7..55a98106baf 100644
--- a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
include GraphqlHelpers
describe '#resolve' do
- subject do
+ subject(:resolve_scope) do
resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
arg_style: :internal)
end
@@ -17,8 +17,10 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
let(:args) { {} }
context 'when user cannot see runners' do
- it 'returns no runners' do
- expect(subject.items.to_a).to eq([])
+ it 'returns Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_scope
+ end
end
end
@@ -30,7 +32,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
let(:available_runners) { [inactive_project_runner, offline_project_runner, group_runner, instance_runner] }
it 'returns all runners available to the project' do
- expect(subject.items.to_a).to match_array(available_runners)
+ expect(resolve_scope.items.to_a).to match_array(available_runners)
end
end
@@ -38,7 +40,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
let(:obj) { nil }
it 'raises an error' do
- expect { subject }.to raise_error('Expected project missing')
+ expect { resolve_scope }.to raise_error('Expected project missing')
end
end
@@ -46,7 +48,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
let(:obj) { build(:group) }
it 'raises an error' do
- expect { subject }.to raise_error('Expected project missing')
+ expect { resolve_scope }.to raise_error('Expected project missing')
end
end
@@ -79,7 +81,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :runner_
params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to contain_exactly(:execute_return_value)
end
end
end
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 02fc7d28255..35831579799 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -20,8 +20,10 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
context 'when user cannot see runners' do
let(:user) { build(:user) }
- it 'returns no runners' do
- expect(subject.items.to_a).to eq([])
+ it 'returns Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_scope
+ end
end
end
@@ -30,20 +32,26 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
context 'when admin mode setting is disabled', :do_not_mock_admin_mode_setting do
it 'returns all the runners' do
- expect(subject.items.to_a).to contain_exactly(inactive_project_runner, offline_project_runner, group_runner, subgroup_runner, instance_runner)
+ expect(resolve_scope.items.to_a).to contain_exactly(
+ inactive_project_runner, offline_project_runner, group_runner, subgroup_runner, instance_runner
+ )
end
end
context 'when admin mode setting is enabled' do
context 'when in admin mode', :enable_admin_mode do
it 'returns all the runners' do
- expect(subject.items.to_a).to contain_exactly(inactive_project_runner, offline_project_runner, group_runner, subgroup_runner, instance_runner)
+ expect(resolve_scope.items.to_a).to contain_exactly(
+ inactive_project_runner, offline_project_runner, group_runner, subgroup_runner, instance_runner
+ )
end
end
context 'when not in admin mode' do
- it 'returns no runners' do
- expect(subject.items.to_a).to eq([])
+ it 'returns Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_scope
+ end
end
end
end
@@ -54,7 +62,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
let(:obj) { build(:project) }
it 'raises an error' do
- expect { subject }.to raise_error(a_string_including('Unexpected parent type'))
+ expect { resolve_scope }.to raise_error(a_string_including('Unexpected parent type'))
end
end
@@ -93,7 +101,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to contain_exactly :execute_return_value
end
end
@@ -116,7 +124,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to contain_exactly :execute_return_value
end
end
@@ -136,7 +144,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to contain_exactly :execute_return_value
end
end
@@ -153,7 +161,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :runner_fleet d
expect(::Ci::RunnersFinder).to receive(:new).with(current_user: user, params: expected_params).once.and_return(finder)
allow(finder).to receive(:execute).once.and_return([:execute_return_value])
- expect(subject.items.to_a).to eq([:execute_return_value])
+ expect(resolve_scope.items.to_a).to contain_exactly :execute_return_value
end
end
end
diff --git a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
index 75e0a816086..a4b957ef8e9 100644
--- a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
+++ b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
@@ -24,22 +24,8 @@ RSpec.describe Resolvers::Metrics::Dashboards::AnnotationResolver, feature_categ
environment.project.add_developer(current_user)
end
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
context 'with annotation records' do
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns nothing' do
- expect(resolve_annotations).to be_nil
- end
- end
-
- it 'returns [] all the time' do
+ it 'returns empty all the time' do
expect(resolve_annotations).to be_empty
end
end
diff --git a/spec/graphql/resolvers/work_items_resolver_spec.rb b/spec/graphql/resolvers/work_items_resolver_spec.rb
index 6da62e3adb7..c856f990e7a 100644
--- a/spec/graphql/resolvers/work_items_resolver_spec.rb
+++ b/spec/graphql/resolvers/work_items_resolver_spec.rb
@@ -46,11 +46,6 @@ RSpec.describe Resolvers::WorkItemsResolver do
expect(resolve_items).to contain_exactly(item1, item2)
end
- it 'filters by state' do
- expect(resolve_items(state: 'opened')).to contain_exactly(item1)
- expect(resolve_items(state: 'closed')).to contain_exactly(item2)
- end
-
context 'when searching items' do
it_behaves_like 'graphql query for searching issuables' do
let_it_be(:parent) { project }
diff --git a/spec/graphql/types/base_argument_spec.rb b/spec/graphql/types/base_argument_spec.rb
index 8f5f2e08799..0ce6aa3667d 100644
--- a/spec/graphql/types/base_argument_spec.rb
+++ b/spec/graphql/types/base_argument_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::BaseArgument do
+RSpec.describe Types::BaseArgument, feature_category: :api do
let_it_be(:field) do
Types::BaseField.new(name: 'field', type: String, null: true)
end
diff --git a/spec/graphql/types/base_edge_spec.rb b/spec/graphql/types/base_edge_spec.rb
index b02ccbaffef..0cc0c838fac 100644
--- a/spec/graphql/types/base_edge_spec.rb
+++ b/spec/graphql/types/base_edge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::BaseEdge do
+RSpec.describe Types::BaseEdge, feature_category: :api do
include GraphqlHelpers
let_it_be(:test_schema) do
diff --git a/spec/graphql/types/base_enum_spec.rb b/spec/graphql/types/base_enum_spec.rb
index 65a345052c7..db8fb877390 100644
--- a/spec/graphql/types/base_enum_spec.rb
+++ b/spec/graphql/types/base_enum_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::BaseEnum do
+RSpec.describe Types::BaseEnum, feature_category: :api do
describe '.from_rails_enum' do
let(:enum_type) { Class.new(described_class) }
let(:template) { "The name is '%{name}', James %{name}." }
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 9f8a8717efb..831d36950db 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::BaseField do
+RSpec.describe Types::BaseField, feature_category: :api do
describe 'authorized?' do
let(:object) { double }
let(:current_user) { nil }
diff --git a/spec/graphql/types/base_object_spec.rb b/spec/graphql/types/base_object_spec.rb
index 3c42c708187..af0639e84d3 100644
--- a/spec/graphql/types/base_object_spec.rb
+++ b/spec/graphql/types/base_object_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::BaseObject do
+RSpec.describe Types::BaseObject, feature_category: :api do
include GraphqlHelpers
describe 'scoping items' do
diff --git a/spec/graphql/types/blame/blame_type_spec.rb b/spec/graphql/types/blame/blame_type_spec.rb
new file mode 100644
index 00000000000..15846130edb
--- /dev/null
+++ b/spec/graphql/types/blame/blame_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Blame::BlameType, feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ specify { expect(described_class.graphql_name).to eq('Blame') }
+
+ specify do
+ expect(described_class).to have_graphql_fields(
+ :first_line,
+ :groups
+ ).at_least
+ end
+end
diff --git a/spec/graphql/types/blame/commit_data_type_spec.rb b/spec/graphql/types/blame/commit_data_type_spec.rb
new file mode 100644
index 00000000000..432f09bf8f7
--- /dev/null
+++ b/spec/graphql/types/blame/commit_data_type_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Blame::CommitDataType,
+ feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ specify { expect(described_class.graphql_name).to eq('CommitData') }
+
+ specify do
+ expect(described_class).to have_graphql_fields(
+ :age_map_class,
+ :author_avatar,
+ :commit_author_link,
+ :commit_link,
+ :project_blame_link,
+ :time_ago_tooltip
+ ).at_least
+ end
+end
diff --git a/spec/graphql/types/blame/groups_type_spec.rb b/spec/graphql/types/blame/groups_type_spec.rb
new file mode 100644
index 00000000000..9e870e1cdc0
--- /dev/null
+++ b/spec/graphql/types/blame/groups_type_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Blame::GroupsType, feature_category: :source_code_management do
+ include GraphqlHelpers
+
+ specify { expect(described_class.graphql_name).to eq('Groups') }
+
+ specify do
+ expect(described_class).to have_graphql_fields(
+ :commit,
+ :commit_data,
+ :lineno,
+ :lines,
+ :span
+ ).at_least
+ end
+end
diff --git a/spec/graphql/types/ci/job_base_field_spec.rb b/spec/graphql/types/ci/job_base_field_spec.rb
new file mode 100644
index 00000000000..2d283ce854d
--- /dev/null
+++ b/spec/graphql/types/ci/job_base_field_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::JobBaseField, feature_category: :runner_fleet do
+ describe 'authorized?' do
+ let_it_be(:current_user) { create(:user) }
+
+ let(:object) { double }
+ let(:ctx) { { current_user: current_user, current_field: current_field } }
+ let(:current_field) { instance_double(described_class, original_name: current_field_name.to_sym) }
+ let(:args) { {} }
+
+ subject(:field) do
+ described_class.new(name: current_field_name, type: GraphQL::Types::String, null: true, **args)
+ end
+
+ context 'when :job_field_authorization is specified' do
+ let(:ctx) { { current_user: current_user, current_field: current_field, job_field_authorization: :foo } }
+
+ context 'with public field' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:current_field_name) do
+ %i[allow_failure duration id kind status created_at finished_at queued_at queued_duration updated_at runner]
+ end
+
+ with_them do
+ it 'returns true without authorizing' do
+ is_expected.to be_authorized(object, nil, ctx)
+ end
+ end
+ end
+
+ context 'with private field' do
+ let(:current_field_name) { 'short_sha' }
+
+ context 'when permission is not allowed' do
+ it 'returns false' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
+
+ is_expected.not_to be_authorized(object, nil, ctx)
+ end
+ end
+
+ context 'when permission is allowed' do
+ it 'returns true' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
+
+ is_expected.to be_authorized(object, nil, ctx)
+ end
+ end
+ end
+ end
+
+ context 'when :job_field_authorization is not specified' do
+ let(:current_field_name) { 'status' }
+
+ it 'defaults to true' do
+ is_expected.to be_authorized(object, nil, ctx)
+ end
+
+ context 'when field is authorized' do
+ let(:args) { { authorize: :foo } }
+
+ it 'tests the field authorization' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests the field authorization, if provided, when it succeeds' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
+
+ expect(field).to be_authorized(object, nil, ctx)
+ end
+ end
+
+ context 'with field resolver' do
+ let(:resolver) { Class.new }
+ let(:args) { { resolver_class: resolver } }
+
+ it 'only tests the resolver authorization if it authorizes_object?' do
+ is_expected.to be_authorized(object, nil, ctx)
+ end
+
+ context 'when resolver authorizes object' do
+ let(:resolver) do
+ Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+ end
+
+ it 'tests the resolver authorization, if provided' do
+ expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ context 'when field is authorized' do
+ let(:args) { { authorize: :foo, resolver_class: resolver } }
+
+ it 'tests field authorization before resolver authorization, when field auth fails' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
+ expect(resolver).not_to receive(:authorized?)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests field authorization before resolver authorization, when field auth succeeds' do
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
+ expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when late_extensions is given' do
+ it 'registers the late extensions after the regular extensions' do
+ extension_class = Class.new(GraphQL::Schema::Field::ConnectionExtension)
+ field = described_class.new(name: 'private_field', type: GraphQL::Types::String.connection_type,
+ null: true, late_extensions: [extension_class])
+
+ expect(field.extensions.last.class).to be(extension_class)
+ end
+ end
+ end
+
+ include_examples 'Gitlab-style deprecations' do
+ def subject(args = {})
+ base_args = { name: 'private_field', type: GraphQL::Types::String, null: true }
+
+ described_class.new(**base_args.merge(args))
+ end
+ end
+end
diff --git a/spec/graphql/types/ci/job_kind_enum_spec.rb b/spec/graphql/types/ci/job_kind_enum_spec.rb
index b48d20b71e2..a09cd89ec8b 100644
--- a/spec/graphql/types/ci/job_kind_enum_spec.rb
+++ b/spec/graphql/types/ci/job_kind_enum_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['CiJobKind'] do
it 'exposes some job type values' do
expect(described_class.values.keys).to match_array(
- (%w[BRIDGE BUILD])
+ %w[BRIDGE BUILD]
)
end
end
diff --git a/spec/graphql/types/ci/job_trace_type_spec.rb b/spec/graphql/types/ci/job_trace_type_spec.rb
index 71803aa9ece..69123445b8b 100644
--- a/spec/graphql/types/ci/job_trace_type_spec.rb
+++ b/spec/graphql/types/ci/job_trace_type_spec.rb
@@ -13,15 +13,187 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
expect(described_class).to have_graphql_fields(*expected_fields)
end
- it 'shows the correct trace contents' do
- job.trace.set('BUILD TRACE')
+ describe 'htmlSummary' do
+ subject(:resolved_field) { resolve_field(:html_summary, job.trace, args: args) }
- expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
- expect(trace).to receive(:html).with(last_lines: 10).and_call_original
+ context 'when trace contains few lines' do
+ before do
+ job.trace.set('BUILD TRACE')
+ end
+
+ context 'when last_lines is set to 10' do
+ let(:args) { { last_lines: 10 } }
+
+ it 'shows the correct trace contents' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq('<span>BUILD TRACE</span>')
+ end
+ end
+ end
+
+ context 'when trace contains many lines' do
+ before do
+ job.trace.set((1..200).map { |i| "Line #{i}" }.join("\n"))
+ end
+
+ def expected_html_trace_contents(line_count)
+ "<span>#{((200 - (line_count - 1))..200).map { |i| "Line #{i}" }.join('<br/>')}</span>"
+ end
+
+ context 'when last_lines is not set' do
+ let(:args) { {} }
+
+ it 'shows the last 10 lines of trace contents' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq expected_html_trace_contents(10)
+ end
+ end
+
+ context 'when last_lines is set to a negative number' do
+ let(:args) { { last_lines: -10 } }
+
+ it 'shows the last line of trace contents' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 1, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq expected_html_trace_contents(1)
+ end
+ end
+
+ context 'when last_lines is set to 10' do
+ let(:args) { { last_lines: 10 } }
+
+ it 'shows the correct trace contents' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq expected_html_trace_contents(10)
+ end
+ end
+
+ context 'when last_lines is set to 150' do
+ let(:args) { { last_lines: 150 } }
+
+ it 'shows the last 100 lines of trace contents' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 100, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq expected_html_trace_contents(100)
+ end
+ end
+ end
+
+ context 'when trace contains long lines' do
+ before do
+ # Creates lines of "aaaaaaaa...aaaaaaaa"
+ job.trace.set((1..20).map { (1..1024).map { "a" }.join("") }.join("\n"))
+ end
+
+ context 'when last_lines is lower than 16KB' do
+ let(:args) { {} }
+
+ it 'shows the whole lines' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq "<span>#{(1..10).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
+ end
+ end
+
+ context 'when last_lines is higher than 16KB' do
+ let(:args) { { last_lines: 20 } }
+
+ it 'shows only the latest byte' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 20, max_size: 16384).and_call_original
+ end
+
+ is_expected.to eq "<span>#{(1..1009).map { 'a' }.join('')}<br/>" \
+ "#{(1..15).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
+ end
+ end
+
+ context 'when FF graphql_job_trace_html_summary_max_size is disabled' do
+ before do
+ stub_feature_flags(graphql_job_trace_html_summary_max_size: false)
+ end
+
+ let(:args) { { last_lines: 20 } }
+
+ it 'does not limit the read size from the raw trace' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 20, max_size: nil).and_call_original
+ end
+
+ is_expected.to eq "<span>#{(1..20).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
+ end
+ end
+
+ context 'when trace is cut in middle of a line' do
+ let(:args) { {} }
+
+ before do
+ stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 1536)
+ end
+
+ it 'shows only the latest byte' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 1536).and_call_original
+ end
+
+ is_expected.to eq "<span>#{(1..511).map { 'a' }.join('')}<br/>#{(1..1024).map { 'a' }.join('')}</span>"
+ end
+ end
+
+ context 'when trace is cut at end of a line' do
+ let(:args) { {} }
+
+ before do
+ stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 2050)
+ end
+
+ it 'shows only the latest byte' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 2050).and_call_original
+ end
+
+ is_expected.to eq "<span><br/>#{(1..2).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
+ end
+ end
end
- resolved_field = resolve_field(:html_summary, job.trace)
+ context 'when trace contains multi-bytes UTF-8' do
+ before do
+ # Creates lines of 4 pound symbol, pound symbol is 2 byte wise in UTF-8
+ # Append an "a" (1 byte character) at the end to cut in the middle of UTF-8
+ job.trace.set((1..20).map { (1..4).map { "£" }.join("") }.join("\n"))
+ end
+
+ context 'when cut in the middle of a codepoint' do
+ before do
+ stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 5)
+ end
+
+ let(:args) { {} }
- expect(resolved_field).to eq("<span>BUILD TRACE</span>")
+ it 'shows a single "invalid utf-8" symbol' do
+ expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
+ expect(trace).to receive(:html).with(last_lines: 10, max_size: 5).and_call_original
+ end
+
+ is_expected.to eq "<span>�££</span>"
+ end
+ end
+ end
end
end
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index f31c0d5255c..a69c6f37ee1 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do
needs
pipeline
playable
+ previousStageJobs
previousStageJobsOrNeeds
project
queued_at
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 6c4e68fba6b..d4d0eff9adb 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential hidden discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled emails_enabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert alert_management_alerts severity current_user_todos moved moved_to
- closed_as_duplicate_of create_note_email timelogs project_id customer_relations_contacts escalation_status]
+ closed_as_duplicate_of create_note_email timelogs project_id customer_relations_contacts escalation_status external_author]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/label_type_spec.rb b/spec/graphql/types/label_type_spec.rb
index 427b5d2dcef..d68945b01e6 100644
--- a/spec/graphql/types/label_type_spec.rb
+++ b/spec/graphql/types/label_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe GitlabSchema.types['Label'] do
:description_html,
:title,
:color,
+ :lock_on_merge,
:text_color,
:created_at,
:updated_at
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index bd271da55a9..9742908edf9 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_revie
commit_count current_user_todos conflicts auto_merge_enabled approved_by source_branch_protected
squash_on_merge available_auto_merge_strategies
has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message
- auto_merge_strategy merge_user award_emoji prepared_at
+ auto_merge_strategy merge_user award_emoji prepared_at codequality_reports_comparer supports_lock_on_merge
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
diff --git a/spec/graphql/types/organizations/group_sort_enum_spec.rb b/spec/graphql/types/organizations/group_sort_enum_spec.rb
new file mode 100644
index 00000000000..57915d95c45
--- /dev/null
+++ b/spec/graphql/types/organizations/group_sort_enum_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['OrganizationGroupSort'], feature_category: :cell do
+ let(:sort_values) do
+ %w[
+ ID_ASC
+ ID_DESC
+ NAME_ASC
+ NAME_DESC
+ PATH_ASC
+ PATH_DESC
+ UPDATED_AT_ASC
+ UPDATED_AT_DESC
+ CREATED_AT_ASC
+ CREATED_AT_DESC
+ ]
+ end
+
+ specify { expect(described_class.graphql_name).to eq('OrganizationGroupSort') }
+
+ it 'exposes all the organization groups sort values' do
+ expect(described_class.values.keys).to include(*sort_values)
+ end
+end
diff --git a/spec/graphql/types/organizations/organization_type_spec.rb b/spec/graphql/types/organizations/organization_type_spec.rb
new file mode 100644
index 00000000000..26d7c10a715
--- /dev/null
+++ b/spec/graphql/types/organizations/organization_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Organization'], feature_category: :cell do
+ let(:expected_fields) { %w[groups id name organization_users path] }
+
+ specify { expect(described_class.graphql_name).to eq('Organization') }
+ specify { expect(described_class).to require_graphql_authorizations(:read_organization) }
+ specify { expect(described_class).to have_graphql_fields(*expected_fields) }
+end
diff --git a/spec/graphql/types/organizations/organization_user_type_spec.rb b/spec/graphql/types/organizations/organization_user_type_spec.rb
new file mode 100644
index 00000000000..876080b0f15
--- /dev/null
+++ b/spec/graphql/types/organizations/organization_user_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['OrganizationUser'], feature_category: :cell do
+ let(:expected_fields) { %w[badges id user] }
+
+ specify { expect(described_class.graphql_name).to eq('OrganizationUser') }
+ specify { expect(described_class).to require_graphql_authorizations(:read_organization_user) }
+ specify { expect(described_class).to have_graphql_fields(*expected_fields) }
+end
diff --git a/spec/graphql/types/permission_types/work_item_spec.rb b/spec/graphql/types/permission_types/work_item_spec.rb
index 3ee42e2e3ad..cdbf94304de 100644
--- a/spec/graphql/types/permission_types/work_item_spec.rb
+++ b/spec/graphql/types/permission_types/work_item_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Types::PermissionTypes::WorkItem do
it do
expected_permissions = [
:read_work_item, :update_work_item, :delete_work_item, :admin_work_item,
- :admin_parent_link, :set_work_item_metadata, :create_note
+ :admin_parent_link, :set_work_item_metadata, :create_note, :admin_work_item_link
]
expected_permissions.each do |permission|
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index cd9a0642ae6..a20a4767bb5 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -892,22 +892,25 @@ RSpec.describe GitlabSchema.types['Project'] do
subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
- before do
+ before_all do
fork_reporter.add_reporter(user)
fork_developer.add_developer(user)
fork_group_developer.group.add_developer(user)
+ fork_private.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it 'contains all forks' do
- expect(forks.count).to eq(5)
+ expect(forks.count).to eq(4)
end
context 'with minimum_access_level DEVELOPER' do
let(:minimum_access_level) { '(minimumAccessLevel: DEVELOPER)' }
it 'contains forks with developer access' do
- expect(forks).to contain_exactly(a_hash_including('fullPath' => fork_developer.full_path),
-a_hash_including('fullPath' => fork_group_developer.full_path))
+ expect(forks).to contain_exactly(
+ a_hash_including('fullPath' => fork_developer.full_path),
+ a_hash_including('fullPath' => fork_group_developer.full_path)
+ )
end
context 'when current user is not set' do
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 100ecc94f35..8bda738751d 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -23,6 +23,16 @@ RSpec.describe GitlabSchema.types['Query'], feature_category: :shared do
end
end
+ describe 'organization field' do
+ subject { described_class.fields['organization'] }
+
+ it 'finds organization by path' do
+ is_expected.to have_graphql_arguments(:id)
+ is_expected.to have_graphql_type(Types::Organizations::OrganizationType)
+ is_expected.to have_graphql_resolver(Resolvers::Organizations::OrganizationResolver)
+ end
+ end
+
describe 'project field' do
subject { described_class.fields['project'] }
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
index 9537fca7322..104093bd909 100644
--- a/spec/graphql/types/repository/blob_type_spec.rb
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Types::Repository::BlobType, feature_category: :source_code_manag
:gitpod_blob_url,
:find_file_path,
:blame_path,
+ :blame,
:history_path,
:permalink_path,
:environment_formatted_external_url,
diff --git a/spec/graphql/types/security/codequality_reports_comparer/degradation_type_spec.rb b/spec/graphql/types/security/codequality_reports_comparer/degradation_type_spec.rb
new file mode 100644
index 00000000000..9ae1b3fcf94
--- /dev/null
+++ b/spec/graphql/types/security/codequality_reports_comparer/degradation_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CodequalityReportsComparerReportDegradation'], feature_category: :code_quality do
+ specify { expect(described_class.graphql_name).to eq('CodequalityReportsComparerReportDegradation') }
+
+ it 'has expected fields' do
+ expected_fields = %i[description fingerprint severity file_path line web_url engine_name]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/security/codequality_reports_comparer/report_type_spec.rb b/spec/graphql/types/security/codequality_reports_comparer/report_type_spec.rb
new file mode 100644
index 00000000000..a1788126f1b
--- /dev/null
+++ b/spec/graphql/types/security/codequality_reports_comparer/report_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CodequalityReportsComparerReport'], feature_category: :code_quality do
+ specify { expect(described_class.graphql_name).to eq('CodequalityReportsComparerReport') }
+
+ it 'has expected fields' do
+ expected_fields = %i[status new_errors resolved_errors existing_errors summary]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/security/codequality_reports_comparer/status_enum_spec.rb b/spec/graphql/types/security/codequality_reports_comparer/status_enum_spec.rb
new file mode 100644
index 00000000000..6e5bdd1e91d
--- /dev/null
+++ b/spec/graphql/types/security/codequality_reports_comparer/status_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CodequalityReportsComparerReportStatus'], feature_category: :code_quality do
+ specify { expect(described_class.graphql_name).to eq('CodequalityReportsComparerReportStatus') }
+
+ it 'exposes all codequality report status values' do
+ expect(described_class.values.keys).to contain_exactly('SUCCESS', 'FAILED', 'NOT_FOUND')
+ end
+end
diff --git a/spec/graphql/types/security/codequality_reports_comparer/summary_type_spec.rb b/spec/graphql/types/security/codequality_reports_comparer/summary_type_spec.rb
new file mode 100644
index 00000000000..41de93b27ad
--- /dev/null
+++ b/spec/graphql/types/security/codequality_reports_comparer/summary_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CodequalityReportsComparerReportSummary'], feature_category: :code_quality do
+ specify { expect(described_class.graphql_name).to eq('CodequalityReportsComparerReportSummary') }
+
+ it 'has expected fields' do
+ expected_fields = %i[total resolved errored]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/security/codequality_reports_comparer_type_spec.rb b/spec/graphql/types/security/codequality_reports_comparer_type_spec.rb
new file mode 100644
index 00000000000..02f7a9d6925
--- /dev/null
+++ b/spec/graphql/types/security/codequality_reports_comparer_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CodequalityReportsComparer'], feature_category: :code_quality do
+ specify { expect(described_class.graphql_name).to eq('CodequalityReportsComparer') }
+
+ it 'has expected fields' do
+ expect(described_class).to have_graphql_fields(:report)
+ end
+end
diff --git a/spec/helpers/admin/abuse_reports_helper_spec.rb b/spec/helpers/admin/abuse_reports_helper_spec.rb
index 6a7630dc76a..d6f102682ba 100644
--- a/spec/helpers/admin/abuse_reports_helper_spec.rb
+++ b/spec/helpers/admin/abuse_reports_helper_spec.rb
@@ -25,10 +25,14 @@ RSpec.describe Admin::AbuseReportsHelper, feature_category: :insider_threat do
describe '#abuse_report_data' do
let(:report) { build_stubbed(:abuse_report) }
- subject(:data) { helper.abuse_report_data(report)[:abuse_report_data] }
+ subject(:data) { helper.abuse_report_data(report) }
it 'has the expected attributes' do
- expect(data).to include('user', 'reporter', 'report')
+ expect(data[:abuse_report_data]).to include('user', 'reporter', 'report')
+ end
+
+ it 'includes path to abuse reports list page' do
+ expect(data[:abuse_reports_list_path]).to eq admin_abuse_reports_path
end
end
end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index ad81c125055..757f832faa4 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -679,15 +679,75 @@ RSpec.describe ApplicationHelper do
end
describe '#page_class' do
+ let_it_be(:user) { build(:user) }
+
subject(:page_class) do
helper.page_class.flatten
end
- before do
- allow(helper).to receive(:current_user).and_return(nil)
+ describe 'with-header' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(helper).to receive(:show_super_sidebar?).and_return(show_super_sidebar)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+
+ where(:show_super_sidebar, :current_user) do
+ true | nil
+ false | ref(:user)
+ false | nil
+ end
+
+ with_them do
+ it { is_expected.to include('with-header') }
+ end
+
+ context 'when with-header should not be shown' do
+ let(:show_super_sidebar) { true }
+ let(:current_user) { user }
+
+ it { is_expected.not_to include('with-header') }
+ end
end
- it { is_expected.not_to include('logged-out-marketing-header') }
+ describe 'with-top-bar' do
+ context 'when show_super_sidebar? is true' do
+ context 'when @hide_top_bar_padding is false' do
+ before do
+ allow(helper).to receive(:show_super_sidebar?).and_return(true)
+ helper.instance_variable_set(:@hide_top_bar_padding, false)
+ end
+
+ it { is_expected.to include('with-top-bar') }
+ end
+
+ context 'when @hide_top_bar_padding is true' do
+ before do
+ allow(helper).to receive(:show_super_sidebar?).and_return(true)
+ helper.instance_variable_set(:@hide_top_bar_padding, true)
+ end
+
+ it { is_expected.not_to include('with-top-bar') }
+ end
+ end
+
+ context 'when show_super_sidebar? is false' do
+ before do
+ allow(helper).to receive(:show_super_sidebar?).and_return(false)
+ end
+
+ it { is_expected.not_to include('with-top-bar') }
+ end
+ end
+
+ describe 'logged-out-marketing-header' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ it { is_expected.not_to include('logged-out-marketing-header') }
+ end
end
describe '#dispensable_render' do
@@ -891,4 +951,38 @@ RSpec.describe ApplicationHelper do
end
end
end
+
+ describe '#controller_full_path' do
+ let(:path) { 'some_path' }
+ let(:action) { 'show' }
+
+ before do
+ allow(helper.controller).to receive(:controller_path).and_return(path)
+ allow(helper.controller).to receive(:action_name).and_return(action)
+ end
+
+ context 'when is create action' do
+ let(:action) { 'create' }
+
+ it 'transforms to "new" path' do
+ expect(helper.controller_full_path).to eq("#{path}/new")
+ end
+ end
+
+ context 'when is update action' do
+ let(:action) { 'update' }
+
+ it 'transforms to "edit" path' do
+ expect(helper.controller_full_path).to eq("#{path}/edit")
+ end
+ end
+
+ context 'when is show action' do
+ let(:action) { 'show' }
+
+ it 'passes through' do
+ expect(helper.controller_full_path).to eq("#{path}/#{action}")
+ end
+ end
+ end
end
diff --git a/spec/helpers/artifacts_helper_spec.rb b/spec/helpers/artifacts_helper_spec.rb
index 7c577cbf11c..30f9421954e 100644
--- a/spec/helpers/artifacts_helper_spec.rb
+++ b/spec/helpers/artifacts_helper_spec.rb
@@ -17,8 +17,7 @@ RSpec.describe ArtifactsHelper, feature_category: :build_artifacts do
it 'returns expected data' do
expect(subject).to include({
project_path: project.full_path,
- project_id: project.id,
- artifacts_management_feedback_image_path: match_asset_path('illustrations/chat-bubble-sm.svg')
+ project_id: project.id
})
end
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index a59f172061e..a7153ca4006 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe ButtonHelper do
describe 'clipboard_button' do
include IconsHelper
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:project) { build_stubbed(:project) }
def element(data = {})
@@ -164,7 +164,9 @@ RSpec.describe ButtonHelper do
context 'with default options' do
context 'when no `text` attribute is not provided' do
it 'shows copy to clipboard button with default configuration and no text set to copy' do
- expect(element.attr('class')).to eq('btn btn-clipboard gl-button btn-default-tertiary btn-icon btn-sm')
+ expect(element.attr('class')).to match('btn-sm')
+ expect(element.attr('class')).to match('btn-default')
+ expect(element.attr('class')).to match('btn-default-tertiary')
expect(element.attr('title')).to eq('Copy')
expect(element.attr('type')).to eq('button')
expect(element.attr('aria-label')).to eq('Copy')
@@ -174,9 +176,9 @@ RSpec.describe ButtonHelper do
expect(element.attr('data-container')).to eq('body')
expect(element.attr('data-clipboard-text')).to eq(nil)
expect(element.attr('itemprop')).to eq(nil)
- expect(element.inner_text).to eq("")
+ expect(element.inner_text.strip).to eq('')
- expect(element.to_html).to include sprite_icon('copy-to-clipboard', css_class: 'gl-icon')
+ expect(element.to_html).to match('svg#copy-to-clipboard')
end
end
@@ -195,7 +197,7 @@ RSpec.describe ButtonHelper do
context 'with `button_text` attribute provided' do
it 'shows copy to clipboard button with provided `button_text` as button label' do
- expect(element(button_text: 'Copy text').inner_text).to eq('Copy text')
+ expect(element(button_text: 'Copy text').inner_text.strip).to eq('Copy text')
end
it 'adds `gl-button-icon` class to icon' do
@@ -213,6 +215,92 @@ RSpec.describe ButtonHelper do
context 'with `hide_button_icon` attribute provided' do
it 'shows copy to clipboard button without tooltip support' do
+ expect(element(hide_button_icon: true).to_html).not_to match('svg#copy-to-clipboard')
+ end
+ end
+
+ context 'with `itemprop` attribute provided' do
+ it 'shows copy to clipboard button with `itemprop` attribute' do
+ expect(element(itemprop: 'identifier').attr('itemprop')).to eq('identifier')
+ end
+ end
+
+ context 'when variant option is provided' do
+ it 'inherits the correct ButtonComponent class' do
+ expect(element(variant: :confirm).attr('class')).to match('btn-confirm-tertiary')
+ end
+ end
+
+ context 'when category option is provided' do
+ it 'inherits the correct ButtonComponent class' do
+ expect(element(category: :secondary).attr('class')).to match('btn-default-secondary')
+ end
+ end
+
+ context 'when size option is provided' do
+ it 'inherits the correct ButtonComponent class' do
+ expect(element(size: :medium).attr('class')).to match('btn-md')
+ end
+ end
+ end
+
+ describe 'deprecated_clipboard_button' do
+ include IconsHelper
+
+ let(:user) { create(:user) }
+ let(:project) { build_stubbed(:project) }
+
+ def element(data = {})
+ element = helper.deprecated_clipboard_button(data)
+ Nokogiri::HTML::DocumentFragment.parse(element).first_element_child
+ end
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ context 'with default options' do
+ context 'when no `text` attribute is not provided' do
+ it 'shows copy to clipboard button with default configuration and no text set to copy' do
+ expect(element.attr('class')).to eq('btn btn-clipboard gl-button btn-default-tertiary btn-icon btn-sm')
+ expect(element.attr('title')).to eq('Copy')
+ expect(element.attr('type')).to eq('button')
+ expect(element.attr('aria-label')).to eq('Copy')
+ expect(element.attr('aria-live')).to eq('polite')
+ expect(element.attr('data-toggle')).to eq('tooltip')
+ expect(element.attr('data-placement')).to eq('bottom')
+ expect(element.attr('data-container')).to eq('body')
+ expect(element.attr('data-clipboard-text')).to eq(nil)
+ expect(element.attr('itemprop')).to eq(nil)
+ expect(element.inner_text).to eq("")
+
+ expect(element.to_html).to include sprite_icon('copy-to-clipboard', css_class: 'gl-icon')
+ end
+ end
+
+ context 'when `text` attribute is provided' do
+ it 'shows copy to clipboard button with provided `text` to copy' do
+ expect(element(text: 'Hello World!').attr('data-clipboard-text')).to eq('Hello World!')
+ end
+ end
+
+ context 'when `title` attribute is provided' do
+ it 'shows copy to clipboard button with provided `title` as tooltip' do
+ expect(element(title: 'Copy to my clipboard!').attr('aria-label')).to eq('Copy to my clipboard!')
+ end
+ end
+ end
+
+ context 'with `hide_tooltip` attribute provided' do
+ it 'shows copy to clipboard button without tooltip support' do
+ expect(element(hide_tooltip: true).attr('data-placement')).to eq(nil)
+ expect(element(hide_tooltip: true).attr('data-toggle')).to eq(nil)
+ expect(element(hide_tooltip: true).attr('data-container')).to eq(nil)
+ end
+ end
+
+ context 'with `hide_button_icon` attribute provided' do
+ it 'shows copy to clipboard button without tooltip support' do
expect(element(hide_button_icon: true).to_html).not_to include sprite_icon('duplicate')
end
end
diff --git a/spec/helpers/ci/status_helper_spec.rb b/spec/helpers/ci/status_helper_spec.rb
index 0af396149ef..66c821df8f1 100644
--- a/spec/helpers/ci/status_helper_spec.rb
+++ b/spec/helpers/ci/status_helper_spec.rb
@@ -20,29 +20,6 @@ RSpec.describe Ci::StatusHelper do
end
end
- describe '#ci_text_for_status' do
- context 'when status is manual' do
- it 'changes the status to blocked' do
- expect(helper.ci_text_for_status('manual'))
- .to eq 'blocked'
- end
- end
-
- context 'when status is success' do
- it 'changes the status to passed' do
- expect(helper.ci_text_for_status('success'))
- .to eq 'passed'
- end
- end
-
- context 'when status is something else' do
- it 'returns status unchanged' do
- expect(helper.ci_text_for_status('some-status'))
- .to eq 'some-status'
- end
- end
- end
-
describe "#pipeline_status_cache_key" do
it "builds a cache key for pipeline status" do
pipeline_status = Gitlab::Cache::Ci::ProjectPipelineStatus.new(
diff --git a/spec/helpers/environment_helper_spec.rb b/spec/helpers/environment_helper_spec.rb
index b9316e46d9d..1383bf34881 100644
--- a/spec/helpers/environment_helper_spec.rb
+++ b/spec/helpers/environment_helper_spec.rb
@@ -22,6 +22,15 @@ RSpec.describe EnvironmentHelper, feature_category: :environment_management do
end
end
+ context 'when deploying from a bridge' do
+ it 'renders a span tag' do
+ deploy = build(:deployment, deployable: create(:ci_bridge), status: :success)
+ html = helper.render_deployment_status(deploy)
+
+ expect(html).to have_css('span.ci-status.ci-success')
+ end
+ end
+
context 'for a blocked deployment' do
subject { helper.render_deployment_status(deployment) }
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index c0c729f2b67..6624404bc49 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -34,12 +34,10 @@ RSpec.describe EnvironmentsHelper, feature_category: :environment_management do
'project_path' => project_path(project),
'tags_path' => project_tags_path(project),
'has_metrics' => environment.has_metrics?.to_s,
- 'external_dashboard_url' => nil,
'environment_state' => environment.state,
'custom_metrics_path' => project_prometheus_metrics_path(project),
'validate_query_path' => validate_query_project_prometheus_metrics_path(project),
'custom_metrics_available' => 'true',
- 'custom_dashboard_base_path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations_settings_path' => project_settings_operations_path(project),
'can_access_operations_settings' => 'true'
)
@@ -59,16 +57,6 @@ RSpec.describe EnvironmentsHelper, feature_category: :environment_management do
end
end
- context 'with metrics_setting' do
- before do
- create(:project_metrics_setting, project: project, external_dashboard_url: 'http://gitlab.com')
- end
-
- it 'adds external_dashboard_url' do
- expect(metrics_data['external_dashboard_url']).to eq('http://gitlab.com')
- end
- end
-
context 'when the environment is not available' do
before do
environment.stop
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index 2f1682e9194..106184b5e4a 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -53,6 +53,13 @@ RSpec.describe IconsHelper do
.to eq "<svg class=\"s72 icon-danger\" data-testid=\"#{icon_name}-icon\"><use href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
+ it 'returns a file icon' do
+ file_icons_path = ActionController::Base.helpers.image_path("file_icons/file_icons.svg")
+
+ expect(sprite_icon('coffee', file_icon: true).to_s)
+ .to eq "<svg class=\"s#{IconsHelper::DEFAULT_ICON_SIZE}\" data-testid=\"coffee-icon\"><use href=\"#{file_icons_path}#coffee\"></use></svg>"
+ end
+
describe 'non existing icon' do
non_existing = 'non_existing_icon_sprite'
@@ -60,20 +67,24 @@ RSpec.describe IconsHelper do
stub_rails_env('development')
expect { sprite_icon(non_existing) }.to raise_error(ArgumentError, /is not a known icon/)
+ expect { sprite_icon(non_existing, file_icon: true) }.to raise_error(ArgumentError, /is not a known icon/)
end
it 'raises in test mode' do
stub_rails_env('test')
expect { sprite_icon(non_existing) }.to raise_error(ArgumentError, /is not a known icon/)
+ expect { sprite_icon(non_existing, file_icon: true) }.to raise_error(ArgumentError, /is not a known icon/)
end
it 'does not raise in production mode' do
stub_rails_env('production')
expect_file_not_to_read(Rails.root.join('node_modules/@gitlab/svgs/dist/icons.json'))
+ expect_file_not_to_read(Rails.root.join('node_modules/@gitlab/svgs/dist/file_icons/file_icons.json'))
expect { sprite_icon(non_existing) }.not_to raise_error
+ expect { sprite_icon(non_existing, file_icon: true) }.not_to raise_error
end
end
end
diff --git a/spec/helpers/integrations_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb
index 6c5a489e664..7c626743f3a 100644
--- a/spec/helpers/integrations_helper_spec.rb
+++ b/spec/helpers/integrations_helper_spec.rb
@@ -172,6 +172,26 @@ RSpec.describe IntegrationsHelper, feature_category: :integrations do
it { is_expected.to include(*fields) }
end
+ describe '#serialize_integration' do
+ subject { helper.send(:serialize_integration, integration) }
+
+ let(:integration) { build(:jenkins_integration) }
+
+ it 'serializes the integration' do
+ is_expected.to match(a_hash_including(
+ id: nil,
+ active: true,
+ configured: false,
+ title: 'Jenkins',
+ description: _('Run CI/CD pipelines with Jenkins.'),
+ updated_at: nil,
+ edit_path: '/admin/application_settings/integrations/jenkins/edit',
+ name: 'jenkins',
+ icon: nil
+ ))
+ end
+ end
+
describe '#scoped_reset_integration_path' do
let(:integration) { build_stubbed(:jira_integration) }
let(:group) { nil }
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index abf8b65dc1e..7cf9ffa0621 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -65,62 +65,6 @@ RSpec.describe InviteMembersHelper do
expect(helper.common_invite_modal_dataset(project)).to include(attributes)
end
-
- context 'with tasks_to_be_done' do
- using RSpec::Parameterized::TableSyntax
-
- subject(:output) { helper.common_invite_modal_dataset(source) }
-
- shared_examples_for 'including the tasks to be done attributes' do
- it 'includes the tasks to be done attributes when expected' do
- if expected?
- expect(output[:tasks_to_be_done_options]).to eq(
- [
- { value: :code, text: 'Create/import code into a project (repository)' },
- { value: :ci, text: 'Set up CI/CD pipelines to build, test, deploy, and monitor code' },
- { value: :issues, text: 'Create/import issues (tickets) to collaborate on ideas and plan work' }
- ].to_json
- )
- expect(output[:projects]).to eq([{ id: project.id, title: project.title }].to_json)
- expect(output[:new_project_path]).to eq(
- source.is_a?(Project) ? '' : new_project_path(namespace_id: group.id)
- )
- else
- expect(output[:tasks_to_be_done_options]).to be_nil
- expect(output[:projects]).to be_nil
- expect(output[:new_project_path]).to be_nil
- end
- end
- end
-
- context 'when inviting members for tasks' do
- where(:open_modal_param?, :logged_in?, :expected?) do
- true | true | true
- true | false | false
- false | true | false
- false | false | false
- end
-
- with_them do
- before do
- allow(helper).to receive(:current_user).and_return(developer) if logged_in?
- allow(helper).to receive(:params).and_return({ open_modal: 'invite_members_for_task' }) if open_modal_param?
- end
-
- context 'when the source is a project' do
- let_it_be(:source) { project }
-
- it_behaves_like 'including the tasks to be done attributes'
- end
-
- context 'when the source is a group' do
- let_it_be(:source) { group }
-
- it_behaves_like 'including the tasks to be done attributes'
- end
- end
- end
- end
end
context 'with project' do
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 7b5537c54cc..9fe820ccae9 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -134,109 +134,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#issuable_meta', time_travel_to: '2022-08-05 00:00:00 +0000' do
- let(:user) { create(:user) }
-
- let_it_be(:project) { create(:project) }
-
- describe 'Issuable created status text' do
- subject { helper.issuable_meta(issuable, project) }
-
- context 'when issuable is a work item and flag is off' do
- using RSpec::Parameterized::TableSyntax
-
- before do
- stub_feature_flags(work_items: false)
- end
-
- where(:issuable_type, :text) do
- :issue | 'Issue created Aug 05, 2022 by'
- :incident | 'Incident created Aug 05, 2022 by'
- end
-
- let(:issuable) { build_stubbed(:work_item, issuable_type, created_at: Date.current) }
-
- with_them do
- it { is_expected.to have_content(text) }
- end
- end
-
- context 'when issuable is a work item and flag is on' do
- using RSpec::Parameterized::TableSyntax
-
- where(:issuable_type, :text) do
- :issue | 'Issue created Aug 05, 2022 by'
- :incident | 'Incident created Aug 05, 2022 by'
- end
-
- let(:issuable) { build_stubbed(:work_item, issuable_type, created_at: Date.current) }
-
- with_them do
- it { is_expected.to have_content(text) }
- end
- end
-
- context 'when issuable is not a work item' do
- let(:issuable) { build_stubbed(:merge_request, created_at: Date.current) }
-
- it { is_expected.to have_content('Created Aug 05, 2022') }
- end
- end
-
- describe 'author status' do
- let(:issuable) { build(:merge_request, source_project: project, author: user, created_at: '2020-01-30') }
-
- it 'displays an emoji if the user status is set' do
- user.status = UserStatus.new(message: 'lol')
- content = helper.issuable_meta(issuable, project)
- expect(content).to match('<span class="user-status-emoji has-tooltip" title="lol" data-html="true" data-placement="top">')
- expect(content).to match('<gl-emoji title="speech balloon" data-name="speech_balloon" data-unicode-version="6.0">')
- end
-
- it 'does not displays an emoji if the user status is not set' do
- user.status = UserStatus.new
- content = helper.issuable_meta(issuable, project)
- expect(content).not_to match('class="user-status-emoji has-tooltip"')
- expect(content).not_to match('gl-emoji')
- end
- end
-
- describe 'service desk reply to email address' do
- let(:email) { 'user@example.com' }
- let(:obfuscated_email) { 'us*****@e*****.c**' }
- let(:service_desk_issue) { build_stubbed(:issue, project: project, author: User.support_bot, service_desk_reply_to: email) }
-
- subject { helper.issuable_meta(service_desk_issue, project) }
-
- context 'with anonymous user' do
- before do
- allow(helper).to receive(:current_user).and_return(nil)
- end
-
- it { is_expected.to have_content(obfuscated_email) }
- end
-
- context 'with signed in user' do
- context 'when user has no role in project' do
- before do
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- it { is_expected.to have_content(obfuscated_email) }
- end
-
- context 'when user has reporter role in project' do
- before do
- project.add_reporter(user)
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- it { is_expected.to have_content(email) }
- end
- end
- end
- end
-
describe '#issuables_state_counter_text' do
let_it_be(:user) { create(:user) }
@@ -348,80 +245,74 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#updated_at_by' do
+ describe '#issuable_initial_data' do
let(:user) { create(:user) }
- let(:unedited_issuable) { create(:issue) }
- let(:edited_issuable) { create(:issue, last_edited_by: user, created_at: 3.days.ago, updated_at: 1.day.ago, last_edited_at: 2.days.ago) }
- let(:edited_updated_at_by) do
- {
- updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
- updatedBy: {
- name: user.name,
- path: user_path(user)
- }
- }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).and_return(true)
+ stub_commonmark_sourcepos_disabled
end
- it { expect(helper.updated_at_by(unedited_issuable)).to eq({}) }
- it { expect(helper.updated_at_by(edited_issuable)).to eq(edited_updated_at_by) }
+ context 'when issue' do
+ it 'returns the correct data for an issue' do
+ issue = create(:issue, author: user, description: 'issue text')
+ @project = issue.project
+
+ base_data = {
+ endpoint: "/#{@project.full_path}/-/issues/#{issue.iid}",
+ updateEndpoint: "/#{@project.full_path}/-/issues/#{issue.iid}.json",
+ canUpdate: true,
+ canDestroy: true,
+ issuableRef: "##{issue.iid}",
+ markdownPreviewPath: "/#{@project.full_path}/preview_markdown?target_id=#{issue.iid}&target_type=Issue",
+ markdownDocsPath: '/help/user/markdown',
+ lockVersion: issue.lock_version,
+ state: issue.state,
+ issuableTemplateNamesPath: template_names_path(@project, issue),
+ initialTitleHtml: issue.title,
+ initialTitleText: issue.title,
+ initialDescriptionHtml: '<p dir="auto">issue text</p>',
+ initialDescriptionText: 'issue text',
+ initialTaskCompletionStatus: { completed_count: 0, count: 0 }
+ }
- context 'when updated by a deleted user' do
- let(:edited_updated_at_by) do
- {
- updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
- updatedBy: {
- name: User.ghost.name,
- path: user_path(User.ghost)
- }
+ issue_only_data = {
+ canCreateIncident: true,
+ fullPath: issue.project.full_path,
+ iid: issue.iid,
+ issuableId: issue.id,
+ issueType: 'issue',
+ isHidden: false,
+ sentryIssueIdentifier: nil,
+ zoomMeetingUrl: nil
}
- end
- before do
- user.destroy!
- end
+ issue_header_data = {
+ authorId: issue.author.id,
+ authorName: issue.author.name,
+ authorUsername: issue.author.username,
+ authorWebUrl: url_for(user_path(issue.author)),
+ createdAt: issue.created_at.to_time.iso8601,
+ isFirstContribution: issue.first_contribution?,
+ serviceDeskReplyTo: nil
+ }
- it 'returns "Ghost user" as edited_by' do
- expect(helper.updated_at_by(edited_issuable.reload)).to eq(edited_updated_at_by)
- end
- end
- end
+ work_items_data = {
+ registerPath: '/users/sign_up?redirect_to_referer=yes',
+ signInPath: '/users/sign_in?redirect_to_referer=yes'
+ }
- describe '#issuable_initial_data' do
- let(:user) { create(:user) }
+ path_data = {
+ projectPath: @project.path,
+ projectId: @project.id,
+ projectNamespace: @project.namespace.path
+ }
- before do
- allow(helper).to receive(:current_user).and_return(user)
- allow(helper).to receive(:can?).and_return(true)
- stub_commonmark_sourcepos_disabled
- end
+ expected = base_data.merge(issue_only_data, issue_header_data, work_items_data, path_data)
- it 'returns the correct data for an issue' do
- issue = create(:issue, author: user, description: 'issue text')
- @project = issue.project
-
- expected_data = {
- endpoint: "/#{@project.full_path}/-/issues/#{issue.iid}",
- updateEndpoint: "/#{@project.full_path}/-/issues/#{issue.iid}.json",
- canUpdate: true,
- canDestroy: true,
- issuableRef: "##{issue.iid}",
- markdownPreviewPath: "/#{@project.full_path}/preview_markdown?target_id=#{issue.iid}&target_type=Issue",
- markdownDocsPath: '/help/user/markdown',
- lockVersion: issue.lock_version,
- projectPath: @project.path,
- projectId: @project.id,
- projectNamespace: @project.namespace.path,
- state: issue.state,
- initialTitleHtml: issue.title,
- initialTitleText: issue.title,
- initialDescriptionHtml: '<p dir="auto">issue text</p>',
- initialDescriptionText: 'issue text',
- initialTaskCompletionStatus: { completed_count: 0, count: 0 },
- issueType: 'issue',
- iid: issue.iid.to_s,
- isHidden: false
- }
- expect(helper.issuable_initial_data(issue)).to match(hash_including(expected_data))
+ expect(helper.issuable_initial_data(issue)).to include(expected)
+ end
end
context 'for incident tab' do
@@ -453,6 +344,46 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
+ context 'when edited' do
+ it 'contains edited metadata' do
+ edited_issuable = create(:issue, author: user, description: 'issue text', last_edited_by: user, created_at: 3.days.ago, updated_at: 1.day.ago, last_edited_at: 2.days.ago)
+ @project = edited_issuable.project
+
+ expected = {
+ updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
+ updatedBy: {
+ name: user.name,
+ path: user_path(user)
+ }
+ }
+
+ expect(helper.issuable_initial_data(edited_issuable)).to include(expected)
+ end
+
+ context 'when updated by a deleted user' do
+ let(:destroyed_user) { create(:user) }
+
+ before do
+ destroyed_user.destroy!
+ end
+
+ it 'returns "Ghost user" for updated by data' do
+ edited_issuable = create(:issue, author: user, description: 'issue text', last_edited_by: destroyed_user, created_at: 3.days.ago, updated_at: 1.day.ago, last_edited_at: 2.days.ago)
+ @project = edited_issuable.project
+
+ expected = {
+ updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
+ updatedBy: {
+ name: Users::Internal.ghost.name,
+ path: user_path(Users::Internal.ghost)
+ }
+ }
+
+ expect(helper.issuable_initial_data(edited_issuable.reload)).to include(expected)
+ end
+ end
+ end
+
describe '#sentryIssueIdentifier' do
let(:issue) { create(:issue, author: user) }
@@ -613,38 +544,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#reviewer_sidebar_data' do
- let(:user) { create(:user) }
-
- subject { helper.reviewer_sidebar_data(user, merge_request: merge_request) }
-
- context 'without merge_request' do
- let(:merge_request) { nil }
-
- it 'returns hash of reviewer data' do
- is_expected.to eql({
- avatar_url: user.avatar_url,
- name: user.name,
- username: user.username
- })
- end
- end
-
- context 'with merge_request' do
- let(:merge_request) { build(:merge_request) }
-
- where(can_merge: [true, false])
-
- with_them do
- before do
- allow(merge_request).to receive(:can_be_merged_by?).and_return(can_merge)
- end
-
- it { is_expected.to include({ can_merge: can_merge }) }
- end
- end
- end
-
describe '#issuable_squash_option?' do
using RSpec::Parameterized::TableSyntax
@@ -704,34 +603,6 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do
end
end
- describe '#issuable_display_type' do
- using RSpec::Parameterized::TableSyntax
-
- where(:issuable_type, :issuable_display_type) do
- :issue | 'issue'
- :incident | 'incident'
- :merge_request | 'merge request'
- end
-
- with_them do
- let(:issuable) { build_stubbed(issuable_type) }
-
- subject { helper.issuable_display_type(issuable) }
-
- it { is_expected.to eq(issuable_display_type) }
- end
- end
-
- describe '#sidebar_milestone_tooltip_label' do
- it 'escapes HTML in the milestone title' do
- milestone = build(:milestone, title: '&lt;img onerror=alert(1)&gt;', due_date: Date.new(2022, 6, 26))
-
- expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq(
- '&lt;img onerror=alert(1)&gt;<br/>Jun 26, 2022 (<strong>Past due</strong>)'
- )
- end
- end
-
describe '#issuable_type_selector_data' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 0cde9aeac8d..72fa264698d 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe IssuesHelper do
+RSpec.describe IssuesHelper, feature_category: :team_planning do
include Features::MergeRequestHelpers
let_it_be(:project) { create(:project) }
@@ -136,86 +136,11 @@ RSpec.describe IssuesHelper do
end
end
- describe '#issue_closed_link' do
- let(:new_issue) { create(:issue, project: project) }
- let(:guest) { create(:user) }
-
- before do
- allow(helper).to receive(:can?) do |*args|
- Ability.allowed?(*args)
- end
- end
-
- shared_examples 'successfully displays link to issue and with css class' do |action|
- it 'returns link' do
- link = "<a class=\"#{css_class}\" href=\"/#{new_issue.project.full_path}/-/issues/#{new_issue.iid}\">(#{action})</a>"
-
- expect(helper.issue_closed_link(issue, user, css_class: css_class)).to match(link)
- end
- end
-
- shared_examples 'does not display link' do
- it 'returns nil' do
- expect(helper.issue_closed_link(issue, user)).to be_nil
- end
- end
-
- context 'with linked issue' do
- context 'with moved issue' do
- before do
- issue.update!(moved_to: new_issue)
- end
-
- context 'when user has permission to see new issue' do
- let(:user) { project.owner }
- let(:css_class) { 'text-white text-underline' }
-
- it_behaves_like 'successfully displays link to issue and with css class', 'moved'
- end
-
- context 'when user has no permission to see new issue' do
- let(:user) { guest }
-
- it_behaves_like 'does not display link'
- end
- end
-
- context 'with duplicated issue' do
- before do
- issue.update!(duplicated_to: new_issue)
- end
-
- context 'when user has permission to see new issue' do
- let(:user) { project.owner }
- let(:css_class) { 'text-white text-underline' }
-
- it_behaves_like 'successfully displays link to issue and with css class', 'duplicated'
- end
-
- context 'when user has no permission to see new issue' do
- let(:user) { guest }
-
- it_behaves_like 'does not display link'
- end
- end
- end
-
- context 'without linked issue' do
- let(:user) { project.owner }
-
- before do
- issue.update!(moved_to: nil, duplicated_to: nil)
- end
-
- it_behaves_like 'does not display link'
- end
- end
-
describe '#show_moved_service_desk_issue_warning?' do
let(:project1) { create(:project, service_desk_enabled: true) }
let(:project2) { create(:project, service_desk_enabled: true) }
- let!(:old_issue) { create(:issue, author: User.support_bot, project: project1) }
- let!(:new_issue) { create(:issue, author: User.support_bot, project: project2) }
+ let!(:old_issue) { create(:issue, author: Users::Internal.support_bot, project: project1) }
+ let!(:new_issue) { create(:issue, author: Users::Internal.support_bot, project: project2) }
before do
allow(Gitlab::Email::IncomingEmail).to receive(:enabled?) { true }
@@ -249,14 +174,13 @@ RSpec.describe IssuesHelper do
it 'returns expected result' do
expected = {
can_create_issue: 'true',
+ can_create_incident: 'true',
can_destroy_issue: 'true',
can_reopen_issue: 'true',
can_report_spam: 'false',
can_update_issue: 'true',
- iid: issue.iid,
is_issue_author: 'false',
issue_path: issue_path(issue),
- issue_type: 'issue',
new_issue_path: new_project_issue_path(project, { add_related_issue: issue.iid }),
project_path: project.full_path,
report_abuse_path: add_category_abuse_reports_path,
diff --git a/spec/helpers/members_helper_spec.rb b/spec/helpers/members_helper_spec.rb
index 005fce1730f..68a12d8dbf7 100644
--- a/spec/helpers/members_helper_spec.rb
+++ b/spec/helpers/members_helper_spec.rb
@@ -69,12 +69,6 @@ RSpec.describe MembersHelper do
it { expect(leave_confirmation_message(group)).to eq "Are you sure you want to leave the \"#{group.name}\" group?" }
end
- describe '#localized_tasks_to_be_done_choices' do
- it 'has a translation for all `TASKS_TO_BE_DONE` keys' do
- expect(localized_tasks_to_be_done_choices).to include(*MemberTask::TASKS.keys)
- end
- end
-
describe '#member_request_access_link' do
let(:project) { create(:project) }
let(:group) { create(:group) }
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index 4ec120d152b..47f23c4fa21 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Nav::NewDropdownHelper, feature_category: :navigation do
partial: partial,
component: 'invite_members',
data: {
- trigger_source: 'top-nav',
+ trigger_source: 'top_nav',
trigger_element: 'text-emoji'
}
)
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 4b83561b265..950d8b77d01 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -144,15 +144,14 @@ RSpec.describe NavHelper, feature_category: :navigation do
context 'when user has not interacted with the new nav toggle yet' do
let(:user_preference) { nil }
- specify { expect(subject).to eq false }
+ specify { expect(subject).to eq true }
- context 'when the user was enrolled into the new nav via a special feature flag' do
+ context 'when the user was not enrolled into the new nav via a special feature flag' do
before do
- # this ff is disabled in globally to keep tests of the old nav working
- stub_feature_flags(super_sidebar_nav_enrolled: true)
+ stub_feature_flags(super_sidebar_nav_enrolled: false)
end
- specify { expect(subject).to eq true }
+ specify { expect(subject).to eq false }
end
end
diff --git a/spec/helpers/organizations/organization_helper_spec.rb b/spec/helpers/organizations/organization_helper_spec.rb
new file mode 100644
index 00000000000..ec99d928059
--- /dev/null
+++ b/spec/helpers/organizations/organization_helper_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
+ let_it_be(:organization) { build_stubbed(:organization) }
+ let_it_be(:new_group_path) { '/groups/new' }
+ let_it_be(:new_project_path) { '/projects/new' }
+ let_it_be(:groups_empty_state_svg_path) { 'illustrations/empty-state/empty-groups-md.svg' }
+ let_it_be(:projects_empty_state_svg_path) { 'illustrations/empty-state/empty-projects-md.svg' }
+
+ before do
+ allow(helper).to receive(:new_group_path).and_return(new_group_path)
+ allow(helper).to receive(:new_project_path).and_return(new_project_path)
+ allow(helper).to receive(:image_path).with(groups_empty_state_svg_path).and_return(groups_empty_state_svg_path)
+ allow(helper).to receive(:image_path).with(projects_empty_state_svg_path).and_return(projects_empty_state_svg_path)
+ end
+
+ describe '#organization_show_app_data' do
+ before do
+ allow(helper).to receive(:groups_and_projects_organization_path)
+ .with(organization)
+ .and_return('/-/organizations/default/groups_and_projects')
+ end
+
+ it 'returns expected json' do
+ expect(
+ Gitlab::Json.parse(
+ helper.organization_show_app_data(organization)
+ )
+ ).to eq(
+ {
+ 'organization' => { 'id' => organization.id, 'name' => organization.name },
+ 'groups_and_projects_organization_path' => '/-/organizations/default/groups_and_projects',
+ 'new_group_path' => new_group_path,
+ 'new_project_path' => new_project_path,
+ 'groups_empty_state_svg_path' => groups_empty_state_svg_path,
+ 'projects_empty_state_svg_path' => projects_empty_state_svg_path,
+ 'association_counts' => {
+ 'groups' => 10,
+ 'projects' => 5,
+ 'users' => 1050
+ }
+ }
+ )
+ end
+ end
+
+ describe '#organization_groups_and_projects_app_data' do
+ it 'returns expected json' do
+ expect(
+ Gitlab::Json.parse(
+ helper.organization_groups_and_projects_app_data
+ )
+ ).to eq(
+ {
+ 'new_group_path' => new_group_path,
+ 'new_project_path' => new_project_path,
+ 'groups_empty_state_svg_path' => groups_empty_state_svg_path,
+ 'projects_empty_state_svg_path' => projects_empty_state_svg_path
+ }
+ )
+ end
+ end
+end
diff --git a/spec/helpers/projects/observability_helper_spec.rb b/spec/helpers/projects/observability_helper_spec.rb
deleted file mode 100644
index 0f47cdb8be2..00000000000
--- a/spec/helpers/projects/observability_helper_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require 'json'
-
-RSpec.describe Projects::ObservabilityHelper, type: :helper, feature_category: :tracing do
- include Gitlab::Routing.url_helpers
-
- let_it_be(:group) { build_stubbed(:group) }
- let_it_be(:project) { build_stubbed(:project, group: group) }
-
- describe '#observability_tracing_view_model' do
- it 'generates the correct JSON' do
- expected_json = {
- tracingUrl: Gitlab::Observability.tracing_url(project),
- provisioningUrl: Gitlab::Observability.provisioning_url(project),
- oauthUrl: Gitlab::Observability.oauth_url
- }.to_json
-
- expect(helper.observability_tracing_view_model(project)).to eq(expected_json)
- end
- end
-
- describe '#observability_tracing_details_model' do
- it 'generates the correct JSON' do
- expected_json = {
- tracingIndexUrl: namespace_project_tracing_index_path(project.group, project),
- traceId: "trace-id",
- tracingUrl: Gitlab::Observability.tracing_url(project),
- provisioningUrl: Gitlab::Observability.provisioning_url(project),
- oauthUrl: Gitlab::Observability.oauth_url
- }.to_json
-
- expect(helper.observability_tracing_details_model(project, "trace-id")).to eq(expected_json)
- end
- end
-end
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index baeafe6b7e7..16c9b8a85ec 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Projects::PipelineHelper do
blob_path: project_blob_path(project, pipeline.sha),
has_test_report: pipeline.complete_and_has_reports?(Ci::JobArtifact.of_report_type(:test)),
empty_dag_svg_path: match_asset_path('illustrations/empty-state/empty-dag-md.svg'),
- empty_state_image_path: match_asset_path('illustrations/empty-state/empty-test-cases-lg.svg'),
+ empty_state_image_path: match_asset_path('illustrations/empty-todos-md.svg'),
artifacts_expired_image_path: match_asset_path('illustrations/pipeline.svg'),
tests_count: pipeline.test_report_summary.total[:count]
})
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index e1537c7b287..9f9372f94cc 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -809,22 +809,6 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
end
end
- describe '#metrics_external_dashboard_url' do
- context 'metrics_setting exists' do
- it 'returns external_dashboard_url' do
- metrics_setting = create(:project_metrics_setting, project: project)
-
- expect(helper.metrics_external_dashboard_url).to eq(metrics_setting.external_dashboard_url)
- end
- end
-
- context 'metrics_setting does not exist' do
- it 'returns nil' do
- expect(helper.metrics_external_dashboard_url).to eq(nil)
- end
- end
- end
-
describe '#grafana_integration_url' do
subject { helper.grafana_integration_url }
diff --git a/spec/helpers/registrations_helper_spec.rb b/spec/helpers/registrations_helper_spec.rb
index 85cedd4aace..74d46245cc2 100644
--- a/spec/helpers/registrations_helper_spec.rb
+++ b/spec/helpers/registrations_helper_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe RegistrationsHelper, feature_category: :user_management do
describe '#signup_username_data_attributes' do
it 'has expected attributes' do
- expect(helper.signup_username_data_attributes.keys).to include(:min_length, :min_length_message, :max_length, :max_length_message, :qa_selector)
+ expect(helper.signup_username_data_attributes.keys).to include(:min_length, :min_length_message, :max_length, :max_length_message, :testid)
end
end
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
index 4109eb01caa..3e5ee714b32 100644
--- a/spec/helpers/sidebars_helper_spec.rb
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -71,6 +71,36 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
let(:project) { nil }
let(:current_user_mode) { Gitlab::Auth::CurrentUserMode.new(user) }
+ let(:global_shortcut_links) do
+ [
+ {
+ title: _('Milestones'),
+ href: dashboard_milestones_path,
+ css_class: 'dashboard-shortcuts-milestones'
+ },
+ {
+ title: _('Snippets'),
+ href: dashboard_snippets_path,
+ css_class: 'dashboard-shortcuts-snippets'
+ },
+ {
+ title: _('Activity'),
+ href: activity_dashboard_path,
+ css_class: 'dashboard-shortcuts-activity'
+ },
+ {
+ title: _('Groups'),
+ href: dashboard_groups_path,
+ css_class: 'dashboard-shortcuts-groups'
+ },
+ {
+ title: _('Projects'),
+ href: dashboard_projects_path,
+ css_class: 'dashboard-shortcuts-projects'
+ }
+ ]
+ end
+
subject do
helper.super_sidebar_context(user, group: group, project: project, panel: panel, panel_type: panel_type)
end
@@ -139,65 +169,65 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
canary_toggle_com_url: Gitlab::Saas.canary_toggle_com_url,
pinned_items: %w[foo bar],
update_pins_url: pins_path,
- shortcut_links: [
- {
- title: _('Milestones'),
- href: dashboard_milestones_path,
- css_class: 'dashboard-shortcuts-milestones'
- },
- {
- title: _('Snippets'),
- href: dashboard_snippets_path,
- css_class: 'dashboard-shortcuts-snippets'
- },
- {
- title: _('Activity'),
- href: activity_dashboard_path,
- css_class: 'dashboard-shortcuts-activity'
- }
- ]
+ shortcut_links: global_shortcut_links,
+ track_visits_path: track_namespace_visits_path
})
end
describe "shortcut links" do
- let(:global_shortcut_links) do
- [
- {
- title: _('Milestones'),
- href: dashboard_milestones_path,
- css_class: 'dashboard-shortcuts-milestones'
- },
- {
- title: _('Snippets'),
- href: dashboard_snippets_path,
- css_class: 'dashboard-shortcuts-snippets'
- },
- {
- title: _('Activity'),
- href: activity_dashboard_path,
- css_class: 'dashboard-shortcuts-activity'
- }
- ]
- end
+ describe "as the anonymous user" do
+ let_it_be(:user) { nil }
+ let(:global_shortcut_links) do
+ [
+ {
+ title: _('Snippets'),
+ href: explore_snippets_path,
+ css_class: 'dashboard-shortcuts-snippets'
+ },
+ {
+ title: _('Groups'),
+ href: explore_groups_path,
+ css_class: 'dashboard-shortcuts-groups'
+ },
+ {
+ title: _('Projects'),
+ href: explore_projects_path,
+ css_class: 'dashboard-shortcuts-projects'
+ }
+ ]
+ end
+
+ it 'returns global shortcut links' do
+ expect(subject[:shortcut_links]).to eq(global_shortcut_links)
+ end
+
+ context 'in a project' do
+ let_it_be(:project) { build_stubbed(:project) }
- it 'returns global shortcut links' do
- expect(subject[:shortcut_links]).to eq(global_shortcut_links)
+ it 'returns project-specific shortcut links' do
+ expect(subject[:shortcut_links]).to eq(global_shortcut_links)
+ end
+ end
end
- context 'in a project' do
- # rubocop: disable RSpec/FactoryBot/AvoidCreate
- let_it_be(:project) { create(:project) }
- # rubocop: enable RSpec/FactoryBot/AvoidCreate
+ describe "as logged-in user" do
+ it 'returns global shortcut links' do
+ expect(subject[:shortcut_links]).to eq(global_shortcut_links)
+ end
- it 'returns project-specific shortcut links' do
- expect(subject[:shortcut_links]).to eq([
- *global_shortcut_links,
- {
- title: _('Create a new issue'),
- href: new_project_issue_path(project),
- css_class: 'shortcuts-new-issue'
- }
- ])
+ context 'in a project' do
+ let_it_be(:project) { build_stubbed(:project) }
+
+ it 'returns project-specific shortcut links' do
+ expect(subject[:shortcut_links]).to eq([
+ *global_shortcut_links,
+ {
+ title: _('Create a new issue'),
+ href: new_project_issue_path(project),
+ css_class: 'shortcuts-new-issue'
+ }
+ ])
+ end
end
end
end
@@ -488,7 +518,6 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
end
describe '#super_sidebar_nav_panel' do
- let(:user) { build(:user) }
let(:group) { build(:group) }
let(:project) { build(:project) }
let(:organization) { build(:organization) }
@@ -500,48 +529,84 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
{ current_user: nil, container: group, show_discover_group_security: false })
allow(group).to receive(:to_global_id).and_return(5)
- Rails.cache.write(['users', user.id, 'assigned_open_issues_count'], 1)
- Rails.cache.write(['users', user.id, 'assigned_open_merge_requests_count'], 4)
- Rails.cache.write(['users', user.id, 'review_requested_open_merge_requests_count'], 0)
- Rails.cache.write(['users', user.id, 'todos_pending_count'], 3)
end
- it 'returns Project Panel for project nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'project')).to be_a(Sidebars::Projects::SuperSidebarPanel)
- end
+ shared_examples 'nav panels available to logged-out users' do
+ it 'returns Project Panel for project nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'project',
+ user: user)).to be_a(Sidebars::Projects::SuperSidebarPanel)
+ end
- it 'returns Group Panel for group nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'group')).to be_a(Sidebars::Groups::SuperSidebarPanel)
- end
+ it 'returns Group Panel for group nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'group', user: user)).to be_a(Sidebars::Groups::SuperSidebarPanel)
+ end
- it 'returns User Settings Panel for profile nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'profile')).to be_a(Sidebars::UserSettings::Panel)
- end
+ it 'returns User profile Panel for user profile nav' do
+ viewed_user = build(:user)
+ expect(helper.super_sidebar_nav_panel(nav: 'user_profile', user: user,
+ viewed_user: viewed_user)).to be_a(Sidebars::UserProfile::Panel)
+ end
- it 'returns User profile Panel for user profile nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'user_profile')).to be_a(Sidebars::UserProfile::Panel)
- end
+ it 'returns Explore Panel for explore nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'explore', user: user)).to be_a(Sidebars::Explore::Panel)
+ end
- it 'returns Admin Panel for admin nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'admin')).to be_a(Sidebars::Admin::Panel)
- end
+ it 'returns Organization Panel for organization nav' do
+ expect(
+ helper.super_sidebar_nav_panel(nav: 'organization', organization: organization, user: user)
+ ).to be_a(Sidebars::Organizations::SuperSidebarPanel)
+ end
- it 'returns Organization Panel for organization nav' do
- expect(
- helper.super_sidebar_nav_panel(nav: 'organization', organization: organization)
- ).to be_a(Sidebars::Organizations::SuperSidebarPanel)
+ it 'returns Search Panel for search nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'search', user: user)).to be_a(Sidebars::Search::Panel)
+ end
end
- it 'returns "Your Work" Panel for your_work nav', :use_clean_rails_memory_store_caching do
- expect(helper.super_sidebar_nav_panel(nav: 'your_work', user: user)).to be_a(Sidebars::YourWork::Panel)
- end
+ describe 'when logged-in' do
+ let(:user) { build(:user) }
+
+ before do
+ Rails.cache.write(['users', user.id, 'assigned_open_issues_count'], 1)
+ Rails.cache.write(['users', user.id, 'assigned_open_merge_requests_count'], 4)
+ Rails.cache.write(['users', user.id, 'review_requested_open_merge_requests_count'], 0)
+ Rails.cache.write(['users', user.id, 'todos_pending_count'], 3)
+ end
+
+ it 'returns User Settings Panel for profile nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'profile', user: user)).to be_a(Sidebars::UserSettings::Panel)
+ end
+
+ describe 'admin user' do
+ it 'returns Admin Panel for admin nav', :aggregate_failures do
+ allow(user).to receive(:can_admin_all_resources?).and_return(true)
+
+ expect(helper.super_sidebar_nav_panel(nav: 'admin', user: user)).to be_a(Sidebars::Admin::Panel)
+ end
+ end
+
+ it 'returns Your Work Panel for admin nav' do
+ expect(helper.super_sidebar_nav_panel(nav: 'admin', user: user)).to be_a(Sidebars::YourWork::Panel)
+ end
+
+ it 'returns "Your Work" Panel for your_work nav', :use_clean_rails_memory_store_caching do
+ expect(helper.super_sidebar_nav_panel(nav: 'your_work', user: user)).to be_a(Sidebars::YourWork::Panel)
+ end
+
+ it 'returns "Your Work" Panel as a fallback', :use_clean_rails_memory_store_caching do
+ expect(helper.super_sidebar_nav_panel(user: user)).to be_a(Sidebars::YourWork::Panel)
+ end
- it 'returns Search Panel for search nav' do
- expect(helper.super_sidebar_nav_panel(nav: 'search', user: user)).to be_a(Sidebars::Search::Panel)
+ it_behaves_like 'nav panels available to logged-out users'
end
- it 'returns "Your Work" Panel as a fallback', :use_clean_rails_memory_store_caching do
- expect(helper.super_sidebar_nav_panel(user: user)).to be_a(Sidebars::YourWork::Panel)
+ describe 'when logged-out' do
+ let(:user) { nil }
+
+ it_behaves_like 'nav panels available to logged-out users'
+
+ it 'returns "Explore" Panel as a fallback' do
+ expect(helper.super_sidebar_nav_panel(user: user)).to be_a(Sidebars::Explore::Panel)
+ end
end
end
diff --git a/spec/helpers/sidekiq_helper_spec.rb b/spec/helpers/sidekiq_helper_spec.rb
index 6a0a92bafd8..594996bac95 100644
--- a/spec/helpers/sidekiq_helper_spec.rb
+++ b/spec/helpers/sidekiq_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SidekiqHelper do
+RSpec.describe SidekiqHelper, feature_category: :shared do
describe 'parse_sidekiq_ps' do
it 'parses line with time' do
line = '55137 10,0 2,1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '
diff --git a/spec/helpers/vite_helper_spec.rb b/spec/helpers/vite_helper_spec.rb
new file mode 100644
index 00000000000..edb5650ab1a
--- /dev/null
+++ b/spec/helpers/vite_helper_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ViteHelper, feature_category: :tooling do
+ let(:source) { 'foo.js' }
+ let(:vite_source) { 'vite/foo.js' }
+ let(:vite_tag) { '<tag src="vite/foo"></tag>' }
+ let(:webpack_source) { 'webpack/foo.js' }
+ let(:webpack_tag) { '<tag src="webpack/foo"></tag>' }
+
+ context 'when vite enabled' do
+ before do
+ stub_rails_env('development')
+ stub_feature_flags(vite: true)
+
+ allow(helper).to receive(:vite_javascript_tag).and_return(vite_tag)
+ allow(helper).to receive(:vite_asset_path).and_return(vite_source)
+ allow(helper).to receive(:vite_stylesheet_tag).and_return(vite_tag)
+ allow(helper).to receive(:vite_asset_url).and_return(vite_source)
+ allow(helper).to receive(:vite_running).and_return(true)
+ end
+
+ describe '#universal_javascript_include_tag' do
+ it 'returns vite javascript tag' do
+ expect(helper.universal_javascript_include_tag(source)).to eq(vite_tag)
+ end
+ end
+
+ describe '#universal_asset_path' do
+ it 'returns vite asset path' do
+ expect(helper.universal_asset_path(source)).to eq(vite_source)
+ end
+ end
+ end
+
+ context 'when vite disabled' do
+ before do
+ stub_feature_flags(vite: false)
+
+ allow(helper).to receive(:javascript_include_tag).and_return(webpack_tag)
+ allow(helper).to receive(:asset_path).and_return(webpack_source)
+ allow(helper).to receive(:stylesheet_link_tag).and_return(webpack_tag)
+ allow(helper).to receive(:path_to_stylesheet).and_return(webpack_source)
+ end
+
+ describe '#universal_javascript_include_tag' do
+ it 'returns webpack javascript tag' do
+ expect(helper.universal_javascript_include_tag(source)).to eq(webpack_tag)
+ end
+ end
+
+ describe '#universal_asset_path' do
+ it 'returns ActionView asset path' do
+ expect(helper.universal_asset_path(source)).to eq(webpack_source)
+ end
+ end
+ end
+end
diff --git a/spec/helpers/webpack_helper_spec.rb b/spec/helpers/webpack_helper_spec.rb
index f9e2d265153..23585c47239 100644
--- a/spec/helpers/webpack_helper_spec.rb
+++ b/spec/helpers/webpack_helper_spec.rb
@@ -34,4 +34,22 @@ RSpec.describe WebpackHelper do
expect(output).to eq("<link rel=\"prefetch\" href=\"#{asset_path}\">")
end
end
+
+ context 'when vite enabled' do
+ let(:bundle) { 'bundle.js' }
+
+ before do
+ stub_rails_env('development')
+ stub_feature_flags(vite: true)
+
+ allow(helper).to receive(:vite_javascript_tag).and_return('vite')
+ allow(helper).to receive(:vite_running).and_return(true)
+ end
+
+ describe '#webpack_bundle_tag' do
+ it 'return vite javascript tag' do
+ expect(helper.webpack_bundle_tag(bundle)).to eq('vite')
+ end
+ end
+ end
end
diff --git a/spec/helpers/work_items_helper_spec.rb b/spec/helpers/work_items_helper_spec.rb
index 4e1eca3d411..b790f21d412 100644
--- a/spec/helpers/work_items_helper_spec.rb
+++ b/spec/helpers/work_items_helper_spec.rb
@@ -21,4 +21,18 @@ RSpec.describe WorkItemsHelper, feature_category: :team_planning do
)
end
end
+
+ describe '#work_items_list_data' do
+ let_it_be(:group) { build(:group) }
+
+ subject(:work_items_list_data) { helper.work_items_list_data(group) }
+
+ it 'returns expected data' do
+ expect(work_items_list_data).to include(
+ {
+ full_path: group.full_path
+ }
+ )
+ end
+ end
end
diff --git a/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb b/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
index cf82fd751dd..dd2bf298611 100644
--- a/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
+++ b/spec/initializers/action_cable_subscription_adapter_identifier_spec.rb
@@ -27,7 +27,8 @@ RSpec.describe 'ActionCableSubscriptionAdapterIdentifier override' do
sub = ActionCable.server.pubsub.send(:redis_connection)
- expect(sub.connection[:id]).to eq('unix:///home/localuser/redis/redis.socket/0')
+ expect(sub.is_a?(::Gitlab::Redis::MultiStore)).to eq(true)
+ expect(sub.secondary_store.connection[:id]).to eq('unix:///home/localuser/redis/redis.socket/0')
expect(ActionCable.server.config.cable[:id]).to be_nil
end
end
diff --git a/spec/initializers/mail_starttls_patch_spec.rb b/spec/initializers/mail_starttls_patch_spec.rb
index 99c8edddd12..0ceeb78a5b8 100644
--- a/spec/initializers/mail_starttls_patch_spec.rb
+++ b/spec/initializers/mail_starttls_patch_spec.rb
@@ -17,6 +17,37 @@ RSpec.describe 'Mail STARTTLS patch', feature_category: :shared do
end
end
+ # As long as this monkey patch exists and overrides the constructor
+ # we should test that the defaults of Mail::SMTP are not overriden.
+ #
+ # @see issue https://gitlab.com/gitlab-org/gitlab/-/issues/423268
+ # @see incident https://gitlab.com/gitlab-com/gl-infra/production/-/issues/16223
+ it 'does not override default constants values' do
+ expected_settings = Mail::SMTP.new({}).settings.dup
+
+ Mail.new.delivery_method(Mail::SMTP, { user_name: 'user@example.com' })
+
+ expect(Mail::SMTP.new({}).settings).to eq(expected_settings)
+ end
+
+ describe 'enable_starttls_auto setting' do
+ let(:settings) { {} }
+
+ subject(:smtp) { Mail::SMTP.new(settings) }
+
+ it 'uses default for enable_starttls_auto' do
+ expect(smtp.settings).to include(enable_starttls_auto: nil)
+ end
+
+ context 'when set to false' do
+ let(:settings) { { enable_starttls_auto: false } }
+
+ it 'overrides default and sets value' do
+ expect(smtp.settings).to include(enable_starttls_auto: false)
+ end
+ end
+ end
+
# Taken from https://github.com/mikel/mail/pull/1536#issue-1490438378
where(:ssl, :tls, :enable_starttls, :enable_starttls_auto, :smtp_tls, :smtp_starttls_mode) do
true | nil | nil | nil | true | false
diff --git a/spec/initializers/sidekiq_spec.rb b/spec/initializers/sidekiq_spec.rb
index 063dddd8c46..a034e628d25 100644
--- a/spec/initializers/sidekiq_spec.rb
+++ b/spec/initializers/sidekiq_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'sidekiq' do
+RSpec.describe 'sidekiq', feature_category: :build do
describe 'enable_reliable_fetch?' do
subject { enable_reliable_fetch? }
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 62b79c77b4a..70504a58af3 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -3,10 +3,18 @@
require 'spec_helper'
RSpec.describe API::Ci::Helpers::Runner do
- let(:helper) { Class.new { include API::Ci::Helpers::Runner }.new }
+ let(:helper) do
+ Class.new do
+ include API::Ci::Helpers::Runner
+ include Gitlab::RackLoadBalancingHelpers
+ end.new
+ end
+
+ let(:env_hash) { {} }
+ let(:request) { instance_double(Rack::Request, env: env_hash) }
before do
- allow(helper).to receive(:env).and_return({})
+ allow(helper).to receive(:request).and_return(request)
end
describe '#current_job', feature_category: :continuous_integration do
@@ -16,17 +24,22 @@ RSpec.describe API::Ci::Helpers::Runner do
allow(helper).to receive(:params).and_return(id: build.id)
expect(Ci::Build.sticking)
- .to receive(:stick_or_unstick_request)
- .with({}, :build, build.id)
+ .to receive(:find_caught_up_replica)
+ .with(:build, build.id)
helper.current_job
+
+ stick_object = env_hash[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first
+ expect(stick_object[0]).to eq(Ci::Build.sticking)
+ expect(stick_object[1]).to eq(:build)
+ expect(stick_object[2]).to eq(build.id)
end
it 'does not handle sticking if no build ID was specified' do
allow(helper).to receive(:params).and_return({})
expect(Ci::Build.sticking)
- .not_to receive(:stick_or_unstick_request)
+ .not_to receive(:find_caught_up_replica)
helper.current_job
end
@@ -45,17 +58,22 @@ RSpec.describe API::Ci::Helpers::Runner do
allow(helper).to receive(:params).and_return(token: runner.token)
expect(Ci::Runner.sticking)
- .to receive(:stick_or_unstick_request)
- .with({}, :runner, runner.token)
+ .to receive(:find_caught_up_replica)
+ .with(:runner, runner.token)
helper.current_runner
+
+ stick_object = env_hash[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first
+ expect(stick_object[0]).to eq(Ci::Runner.sticking)
+ expect(stick_object[1]).to eq(:runner)
+ expect(stick_object[2]).to eq(runner.token)
end
it 'does not handle sticking if no token was specified' do
allow(helper).to receive(:params).and_return({})
expect(Ci::Runner.sticking)
- .not_to receive(:stick_or_unstick_request)
+ .not_to receive(:find_caught_up_replica)
helper.current_runner
end
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb
index 89e19f8529e..0cf0a57fa87 100644
--- a/spec/lib/api/entities/merge_request_basic_spec.rb
+++ b/spec/lib/api/entities/merge_request_basic_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::API::Entities::MergeRequestBasic do
+RSpec.describe ::API::Entities::MergeRequestBasic, feature_category: :code_review_workflow do
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:labels) { create_list(:label, 3) }
diff --git a/spec/lib/api/entities/merge_request_diff_spec.rb b/spec/lib/api/entities/merge_request_diff_spec.rb
new file mode 100644
index 00000000000..a6927914316
--- /dev/null
+++ b/spec/lib/api/entities/merge_request_diff_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::API::Entities::MergeRequestDiff, feature_category: :code_review_workflow do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:project) { merge_request.target_project }
+ let_it_be(:entity) { described_class.new(merge_request.merge_request_diffs.first) }
+
+ before do
+ merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
+ merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ end
+
+ subject(:json) { entity.as_json }
+
+ it "includes expected fields" do
+ expected_fields = %i[
+ id head_commit_sha base_commit_sha start_commit_sha created_at
+ merge_request_id state real_size patch_id_sha
+ ]
+
+ is_expected.to include(*expected_fields)
+ end
+
+ it "returns expected data" do
+ merge_request_diff = merge_request.merge_request_diffs.first
+
+ expect(entity.as_json).to eq(
+ {
+ id: merge_request_diff.id,
+ head_commit_sha: merge_request_diff.head_commit_sha,
+ base_commit_sha: merge_request_diff.base_commit_sha,
+ start_commit_sha: merge_request_diff.start_commit_sha,
+ created_at: merge_request_diff.created_at,
+ merge_request_id: merge_request.id,
+ state: merge_request_diff.state,
+ real_size: merge_request_diff.real_size,
+ patch_id_sha: merge_request_diff.patch_id_sha
+ }
+ )
+ end
+end
diff --git a/spec/lib/api/entities/ml/mlflow/get_run_spec.rb b/spec/lib/api/entities/ml/mlflow/get_run_spec.rb
new file mode 100644
index 00000000000..513ecdeee3c
--- /dev/null
+++ b/spec/lib/api/entities/ml/mlflow/get_run_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ml::Mlflow::GetRun, feature_category: :mlops do
+ let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) }
+
+ subject { described_class.new(candidate).as_json }
+
+ it 'has run key' do
+ expect(subject).to have_key(:run)
+ end
+
+ it 'has the id' do
+ expect(subject.dig(:run, :info, :run_id)).to eq(candidate.eid.to_s)
+ end
+
+ it 'presents the metrics' do
+ expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size)
+ end
+
+ it 'presents metrics correctly' do
+ presented_metric = subject.dig(:run, :data, :metrics)[0]
+ metric = candidate.metrics[0]
+
+ expect(presented_metric[:key]).to eq(metric.name)
+ expect(presented_metric[:value]).to eq(metric.value)
+ expect(presented_metric[:timestamp]).to eq(metric.tracked_at)
+ expect(presented_metric[:step]).to eq(metric.step)
+ end
+
+ it 'presents the params' do
+ expect(subject.dig(:run, :data, :params).size).to eq(candidate.params.size)
+ end
+
+ it 'presents params correctly' do
+ presented_param = subject.dig(:run, :data, :params)[0]
+ param = candidate.params[0]
+
+ expect(presented_param[:key]).to eq(param.name)
+ expect(presented_param[:value]).to eq(param.value)
+ end
+
+ context 'when candidate has no metrics' do
+ before do
+ allow(candidate).to receive(:metrics).and_return([])
+ end
+
+ it 'returns empty data' do
+ expect(subject.dig(:run, :data, :metrics)).to be_empty
+ end
+ end
+
+ context 'when candidate has no params' do
+ before do
+ allow(candidate).to receive(:params).and_return([])
+ end
+
+ it 'data is empty' do
+ expect(subject.dig(:run, :data, :params)).to be_empty
+ end
+ end
+end
diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
index 28fef16a532..1664d9f18d2 100644
--- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
- let_it_be(:candidate) { create(:ml_candidates) }
+ let_it_be(:candidate) { build(:ml_candidates) }
subject { described_class.new(candidate, packages_url: 'http://example.com').as_json }
diff --git a/spec/lib/api/entities/ml/mlflow/run_spec.rb b/spec/lib/api/entities/ml/mlflow/run_spec.rb
index a57f70f788b..58148212a7b 100644
--- a/spec/lib/api/entities/ml/mlflow/run_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_spec.rb
@@ -3,24 +3,20 @@
require 'spec_helper'
RSpec.describe API::Entities::Ml::Mlflow::Run do
- let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) }
+ let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) }
subject { described_class.new(candidate).as_json }
- it 'has run key' do
- expect(subject).to have_key(:run)
- end
-
it 'has the id' do
- expect(subject.dig(:run, :info, :run_id)).to eq(candidate.eid.to_s)
+ expect(subject.dig(:info, :run_id)).to eq(candidate.eid.to_s)
end
it 'presents the metrics' do
- expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size)
+ expect(subject.dig(:data, :metrics).size).to eq(candidate.metrics.size)
end
it 'presents metrics correctly' do
- presented_metric = subject.dig(:run, :data, :metrics)[0]
+ presented_metric = subject.dig(:data, :metrics)[0]
metric = candidate.metrics[0]
expect(presented_metric[:key]).to eq(metric.name)
@@ -30,11 +26,11 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
end
it 'presents the params' do
- expect(subject.dig(:run, :data, :params).size).to eq(candidate.params.size)
+ expect(subject.dig(:data, :params).size).to eq(candidate.params.size)
end
it 'presents params correctly' do
- presented_param = subject.dig(:run, :data, :params)[0]
+ presented_param = subject.dig(:data, :params)[0]
param = candidate.params[0]
expect(presented_param[:key]).to eq(param.name)
@@ -47,7 +43,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
end
it 'returns empty data' do
- expect(subject.dig(:run, :data, :metrics)).to be_empty
+ expect(subject.dig(:data, :metrics)).to be_empty
end
end
@@ -57,7 +53,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
end
it 'data is empty' do
- expect(subject.dig(:run, :data, :params)).to be_empty
+ expect(subject.dig(:data, :params)).to be_empty
end
end
end
diff --git a/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb b/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb
new file mode 100644
index 00000000000..6ed59d454fa
--- /dev/null
+++ b/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ml::Mlflow::SearchRuns, feature_category: :mlops do
+ let_it_be(:candidates) { [build_stubbed(:ml_candidates, :with_metrics_and_params), build_stubbed(:ml_candidates)] }
+
+ let(:next_page_token) { 'abcdef' }
+
+ subject { described_class.new({ candidates: candidates, next_page_token: next_page_token }).as_json }
+
+ it 'presents the candidates', :aggregate_failures do
+ expect(subject[:runs].size).to eq(2)
+ expect(subject.dig(:runs, 0, :info, :run_id)).to eq(candidates[0].eid.to_s)
+ expect(subject.dig(:runs, 1, :info, :run_id)).to eq(candidates[1].eid.to_s)
+ end
+
+ it 'presents metrics', :aggregate_failures do
+ expect(subject.dig(:runs, 0, :data, :metrics).size).to eq(candidates[0].metrics.size)
+ expect(subject.dig(:runs, 1, :data, :metrics).size).to eq(0)
+
+ presented_metric = subject.dig(:runs, 0, :data, :metrics, 0, :key)
+ metric = candidates[0].metrics[0].name
+
+ expect(presented_metric).to eq(metric)
+ end
+
+ it 'presents params', :aggregate_failures do
+ expect(subject.dig(:runs, 0, :data, :params).size).to eq(candidates[0].params.size)
+ expect(subject.dig(:runs, 1, :data, :params).size).to eq(0)
+
+ presented_param = subject.dig(:runs, 0, :data, :params, 0, :key)
+ param = candidates[0].params[0].name
+
+ expect(presented_param).to eq(param)
+ end
+end
diff --git a/spec/lib/api/entities/project_spec.rb b/spec/lib/api/entities/project_spec.rb
index 5d18b93228f..2c2cabba5e9 100644
--- a/spec/lib/api/entities/project_spec.rb
+++ b/spec/lib/api/entities/project_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe ::API::Entities::Project do
end
end
- describe '.service_desk_address' do
+ describe '.service_desk_address', feature_category: :service_desk do
before do
allow(project).to receive(:service_desk_enabled?).and_return(true)
end
diff --git a/spec/lib/api/helpers/packages_helpers_spec.rb b/spec/lib/api/helpers/packages_helpers_spec.rb
index 6ba4396c396..bb7b9d688ea 100644
--- a/spec/lib/api/helpers/packages_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages_helpers_spec.rb
@@ -292,7 +292,7 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr
let(:label) { 'counts.package_events_i_package_push_package_by_deploy_token' }
let(:property) { 'i_package_push_package_by_deploy_token' }
let(:service_ping_context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_push_package_by_deploy_token').to_h]
+ [Gitlab::Usage::MetricDefinition.context_for('counts.package_events_i_package_push_package_by_deploy_token').to_h]
end
it 'logs a snowplow event' do
@@ -320,7 +320,7 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr
let(:label) { 'counts.package_events_i_package_pull_package_by_guest' }
let(:property) { 'i_package_pull_package_by_guest' }
let(:service_ping_context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_pull_package_by_guest').to_h]
+ [Gitlab::Usage::MetricDefinition.context_for('counts.package_events_i_package_pull_package_by_guest').to_h]
end
it 'logs a snowplow event' do
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 667ee72f821..dd62343890e 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe API::Helpers, feature_category: :shared do
include Rack::Test::Methods
let(:user) { build(:user, id: 42) }
- let(:request) { instance_double(Rack::Request) }
let(:helper) do
Class.new(Grape::API::Instance) do
helpers API::APIGuard::HelperMethods
@@ -36,18 +35,23 @@ RSpec.describe API::Helpers, feature_category: :shared do
allow_any_instance_of(described_class).to receive(:initial_current_user).and_return(user)
expect(ApplicationRecord.sticking)
- .to receive(:stick_or_unstick_request).with(any_args, :user, 42)
+ .to receive(:find_caught_up_replica).with(:user, 42)
get 'user'
expect(Gitlab::Json.parse(last_response.body)).to eq({ 'id' => user.id })
+
+ stick_object = last_request.env[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first
+ expect(stick_object[0]).to eq(User.sticking)
+ expect(stick_object[1]).to eq(:user)
+ expect(stick_object[2]).to eq(42)
end
it 'does not handle sticking if no user could be found' do
allow_any_instance_of(described_class).to receive(:initial_current_user).and_return(nil)
expect(ApplicationRecord.sticking)
- .not_to receive(:stick_or_unstick_request)
+ .not_to receive(:find_caught_up_replica)
get 'user'
@@ -243,6 +247,165 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ describe '#find_pipeline' do
+ let(:pipeline) { create(:ci_pipeline) }
+
+ shared_examples 'pipeline finder' do
+ context 'when pipeline exists' do
+ it 'returns requested pipeline' do
+ expect(helper.find_pipeline(existing_id)).to eq(pipeline)
+ end
+ end
+
+ context 'when pipeline does not exists' do
+ it 'returns nil' do
+ expect(helper.find_pipeline(non_existing_id)).to be_nil
+ end
+ end
+
+ context 'when pipeline id is not provided' do
+ it 'returns nil' do
+ expect(helper.find_pipeline(nil)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ let(:existing_id) { pipeline.id }
+ let(:non_existing_id) { non_existing_record_id }
+
+ it_behaves_like 'pipeline finder'
+ end
+
+ context 'when string ID is used as an argument' do
+ let(:existing_id) { pipeline.id.to_s }
+ let(:non_existing_id) { non_existing_record_id }
+
+ it_behaves_like 'pipeline finder'
+ end
+
+ context 'when ID is a negative number' do
+ let(:existing_id) { pipeline.id }
+ let(:non_existing_id) { -1 }
+
+ it_behaves_like 'pipeline finder'
+ end
+ end
+
+ describe '#find_pipeline!' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'private project without access' do
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ it 'returns not found' do
+ expect(helper).to receive(:not_found!)
+
+ helper.find_pipeline!(pipeline.id)
+ end
+ end
+
+ context 'when user is authenticated' do
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
+ end
+
+ context 'public project' do
+ it 'returns requested pipeline' do
+ expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline)
+ end
+ end
+
+ context 'private project' do
+ it_behaves_like 'private project without access'
+
+ context 'without read pipeline permission' do
+ before do
+ allow(helper).to receive(:can?).with(user, :read_pipeline, pipeline).and_return(false)
+ end
+
+ it_behaves_like 'private project without access'
+ end
+ end
+
+ context 'with read pipeline permission' do
+ before do
+ allow(helper).to receive(:can?).with(user, :read_pipeline, pipeline).and_return(true)
+ end
+
+ it 'returns requested pipeline' do
+ expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline)
+ end
+ end
+ end
+
+ context 'when user is not authenticated' do
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
+ end
+
+ context 'public project' do
+ it 'returns requested pipeline' do
+ expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline)
+ end
+ end
+
+ context 'private project' do
+ it_behaves_like 'private project without access'
+ end
+ end
+
+ context 'support for IDs and paths as argument' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:user) { project.first_owner }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:authorized_project_scope?).and_return(true)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ shared_examples 'pipeline finder' do
+ context 'when pipeline exists' do
+ it 'returns requested pipeline' do
+ expect(helper.find_pipeline!(existing_id)).to eq(pipeline)
+ end
+
+ it 'returns nil' do
+ expect(helper).to receive(:render_api_error!).with('404 Pipeline Not Found', 404)
+ expect(helper.find_pipeline!(non_existing_id)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ context 'when pipeline id is an integer' do
+ let(:existing_id) { pipeline.id }
+ let(:non_existing_id) { non_existing_record_id }
+
+ it_behaves_like 'pipeline finder'
+ end
+
+ context 'when pipeline id is a string' do
+ let(:existing_id) { pipeline.id.to_s }
+ let(:non_existing_id) { "non_existing_record_id" }
+
+ it_behaves_like 'pipeline finder'
+ end
+ end
+ end
+ end
+
describe '#find_group!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -628,10 +791,12 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
it 'logs an exception for unknown event' do
- expect(Gitlab::AppLogger).to receive(:warn).with(
- "Internal Event tracking event failed for event: #{unknown_event}, message: Unknown event: #{unknown_event}"
- )
-
+ expect(Gitlab::InternalEvents).to receive(:track_event).and_raise(Gitlab::InternalEvents::UnknownEventError, "Unknown event: #{unknown_event}")
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ .with(
+ instance_of(Gitlab::InternalEvents::UnknownEventError),
+ event_name: unknown_event
+ )
helper.track_event(unknown_event, user_id: user_id, namespace_id: namespace_id, project_id: project_id)
end
@@ -1072,4 +1237,47 @@ RSpec.describe API::Helpers, feature_category: :shared do
it_behaves_like 'authorized'
end
end
+
+ describe "attributes_for_keys" do
+ let(:hash) do
+ {
+ existing_key_with_present_value: 'actual value',
+ existing_key_with_nil_value: nil,
+ existing_key_with_false_value: false
+ }
+ end
+
+ let(:parameters) { ::ActionController::Parameters.new(hash) }
+ let(:symbol_keys) do
+ %i[
+ existing_key_with_present_value
+ existing_key_with_nil_value
+ existing_key_with_false_value
+ non_existing_key
+ ]
+ end
+
+ let(:string_keys) { symbol_keys.map(&:to_s) }
+ let(:filtered_attrs) do
+ {
+ 'existing_key_with_present_value' => 'actual value',
+ 'existing_key_with_false_value' => false
+ }
+ end
+
+ let(:empty_attrs) { {} }
+
+ where(:params, :keys, :attrs_result) do
+ ref(:hash) | ref(:symbol_keys) | ref(:filtered_attrs)
+ ref(:hash) | ref(:string_keys) | ref(:empty_attrs)
+ ref(:parameters) | ref(:symbol_keys) | ref(:filtered_attrs)
+ ref(:parameters) | ref(:string_keys) | ref(:filtered_attrs)
+ end
+
+ with_them do
+ it 'returns the values for given keys' do
+ expect(helper.attributes_for_keys(keys, params)).to eq(attrs_result)
+ end
+ end
+ end
end
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 4f6a37c66c4..757a73ed612 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -37,4 +37,28 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
it { is_expected.to eql("http://localhost/gitlab/root/api/v4/projects/#{user_project.id}/packages/generic") }
end
end
+
+ describe '#candidates_order_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { candidates_order_params(params) }
+
+ where(:input, :order_by, :order_by_type, :sort) do
+ '' | nil | nil | nil
+ 'created_at' | 'created_at' | 'column' | nil
+ 'created_at ASC' | 'created_at' | 'column' | 'ASC'
+ 'metrics.something' | 'something' | 'metric' | nil
+ 'metrics.something asc' | 'something' | 'metric' | 'asc'
+ 'metrics.something.blah asc' | 'something' | 'metric' | 'asc'
+ 'params.something ASC' | nil | nil | 'ASC'
+ 'metadata.something ASC' | nil | nil | 'ASC'
+ end
+ with_them do
+ let(:params) { { order_by: input } }
+
+ it 'is correct' do
+ is_expected.to include({ order_by: order_by, order_by_type: order_by_type, sort: sort })
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
new file mode 100644
index 00000000000..5758ad2c1aa
--- /dev/null
+++ b/spec/lib/backup/database_model_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do
+ let(:gitlab_database_name) { 'main' }
+
+ describe '#connection' do
+ subject { described_class.new(gitlab_database_name).connection }
+
+ it 'an instance of a ActiveRecord::Base.connection' do
+ subject.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases
+ end
+ end
+
+ describe '#config' do
+ let(:application_config) do
+ {
+ adapter: 'postgresql',
+ host: 'some_host',
+ port: '5432'
+ }
+ end
+
+ subject { described_class.new(gitlab_database_name).config }
+
+ before do
+ allow(
+ Gitlab::Database.database_base_models_with_gitlab_shared[gitlab_database_name].connection_db_config
+ ).to receive(:configuration_hash).and_return(application_config)
+ end
+
+ context 'when no GITLAB_BACKUP_PG* variables are set' do
+ it 'ActiveRecord backup configuration is expected to equal application configuration' do
+ expect(subject[:activerecord]).to eq(application_config)
+ end
+
+ it 'PostgreSQL ENV is expected to equal application configuration' do
+ expect(subject[:pg_env]).to eq(
+ {
+ 'PGHOST' => application_config[:host],
+ 'PGPORT' => application_config[:port]
+ }
+ )
+ end
+ end
+
+ context 'when GITLAB_BACKUP_PG* variables are set' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_PGPORT' | '1543'
+ 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ end
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_BACKUP_(\w+)/, 1] }
+ let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
+
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
+ end
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(subject[:pg_env]).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 61e6c59a1a5..2f14b403576 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -2,13 +2,7 @@
require 'spec_helper'
-RSpec.configure do |rspec|
- rspec.expect_with :rspec do |c|
- c.max_formatted_output_length = nil
- end
-end
-
-RSpec.describe Backup::Database, feature_category: :backup_restore do
+RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let(:output) { progress.string }
let(:one_database_configured?) { base_models_for_backup.one? }
@@ -37,13 +31,6 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
subject { described_class.new(progress, force: force) }
- before do
- base_models_for_backup.each do |_, base_model|
- base_model.connection.rollback_transaction unless base_model.connection.open_transactions.zero?
- allow(base_model.connection).to receive(:execute).and_call_original
- end
- end
-
it 'creates gzipped database dumps' do
Dir.mktmpdir do |dir|
subject.dump(dir, backup_id)
@@ -62,14 +49,15 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
it 'uses snapshots' do
Dir.mktmpdir do |dir|
- base_model = Gitlab::Database.database_base_models['main']
- expect(base_model.connection).to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(base_model.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(base_model.connection).to receive(:rollback_transaction).and_call_original
+ expect_next_instances_of(Backup::DatabaseModel, 2) do |adapter|
+ expect(adapter.connection).to receive(:begin_transaction).with(
+ isolation: :repeatable_read
+ ).and_call_original
+ expect(adapter.connection).to receive(:select_value).with(
+ "SELECT pg_export_snapshot()"
+ ).and_call_original
+ expect(adapter.connection).to receive(:rollback_transaction).and_call_original
+ end
subject.dump(dir, backup_id)
end
@@ -95,7 +83,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
it 'does not use snapshots' do
Dir.mktmpdir do |dir|
- base_model = Gitlab::Database.database_base_models['main']
+ base_model = Backup::DatabaseModel.new('main')
expect(base_model.connection).not_to receive(:begin_transaction).with(
isolation: :repeatable_read
).and_call_original
@@ -111,7 +99,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
describe 'pg_dump arguments' do
let(:snapshot_id) { 'fake_id' }
- let(:pg_args) do
+ let(:default_pg_args) do
args = [
'--clean',
'--if-exists'
@@ -130,24 +118,35 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
before do
allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper)
allow(dumper).to receive(:dump).with(any_args).and_return(true)
+ end
- base_models_for_backup.each do |_, base_model|
- allow(base_model.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_return(snapshot_id)
+ shared_examples 'pg_dump arguments' do
+ it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
+ number_of_databases = base_models_for_backup.count
+ if number_of_databases > 1
+ expect_next_instances_of(Backup::DatabaseModel, number_of_databases) do |model|
+ expect(model.connection).to receive(:select_value).with(
+ "SELECT pg_export_snapshot()"
+ ).and_return(snapshot_id)
+ end
+ end
+
+ expect(dumper).to receive(:dump).with(anything, anything, expected_pg_args)
+
+ subject.dump(destination_dir, backup_id)
end
end
- it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
- expect(dumper).to receive(:dump).with(anything, anything, pg_args)
+ context 'when no PostgreSQL schemas are specified' do
+ let(:expected_pg_args) { default_pg_args }
- subject.dump(destination_dir, backup_id)
+ include_examples 'pg_dump arguments'
end
context 'when a PostgreSQL schema is used' do
let(:schema) { 'gitlab' }
- let(:additional_args) do
- pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema|
+ let(:expected_pg_args) do
+ default_pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema|
['-n', schema.to_s]
end
end
@@ -156,11 +155,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema)
end
- it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
- expect(dumper).to receive(:dump).with(anything, anything, additional_args)
-
- subject.dump(destination_dir, backup_id)
- end
+ include_examples 'pg_dump arguments'
end
end
@@ -180,6 +175,25 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
end
end
end
+
+ context 'when using GITLAB_BACKUP_* environment variables' do
+ before do
+ stub_env('GITLAB_BACKUP_PGHOST', 'test.invalid.')
+ end
+
+ it 'will override database.yml configuration' do
+ # Expect an error because we can't connect to test.invalid.
+ expect do
+ Dir.mktmpdir { |dir| subject.dump(dir, backup_id) }
+ end.to raise_error(Backup::DatabaseBackupError)
+
+ expect do
+ ApplicationRecord.connection.select_value('select 1')
+ end.not_to raise_error
+
+ expect(ENV['PGHOST']).to be_nil
+ end
+ end
end
describe '#restore' do
@@ -288,7 +302,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- expect(ENV).to receive(:[]=).with('PGHOST', 'test.example.com')
+ expect(ENV).to receive(:merge!).with(hash_including { 'PGHOST' => 'test.example.com' })
expect(ENV).not_to receive(:[]=).with('PGPASSWORD', anything)
subject.restore(backup_dir)
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 1105f39124b..6c2656b1c48 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -45,9 +45,9 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
context 'create' do
RSpec.shared_examples 'creates a repository backup' do
it 'creates repository bundles', :aggregate_failures do
- # Add data to the wiki, design repositories, and snippets, so they will be included in the dump.
+ # Add data to the wiki, and snippets, so they will be included in the dump.
+ # Design repositories already have data through the factory :project_with_design
create(:wiki_page, container: project)
- create(:design, :with_file, issue: create(:issue, project: project))
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
@@ -56,7 +56,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
subject.start(:create, destination, backup_id: backup_id)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
- subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
@@ -126,13 +126,13 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
end
context 'hashed storage' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
it_behaves_like 'creates a repository backup'
end
context 'legacy storage' do
- let_it_be(:project) { create(:project, :repository, :legacy_storage) }
+ let_it_be(:project) { create(:project_with_design, :repository, :legacy_storage) }
it_behaves_like 'creates a repository backup'
end
@@ -162,7 +162,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
end
context 'restore' do
- let_it_be(:project) { create(:project, :repository, :design_repo) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:personal_snippet) { create(:personal_snippet, author: project.first_owner) }
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
@@ -189,7 +189,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
subject.start(:restore, destination, backup_id: backup_id)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
- subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index d8794ba68a0..1f3818de4a0 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
describe '#dump' do
- let_it_be(:projects) { create_list(:project, 5, :repository) }
+ let_it_be(:projects) { create_list(:project_with_design, 5, :repository) }
RSpec.shared_examples 'creates repository bundles' do
it 'calls enqueue for each repository type', :aggregate_failures do
@@ -34,7 +34,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:finish!)
@@ -42,13 +42,13 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
context 'hashed storage' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
it_behaves_like 'creates repository bundles'
end
context 'legacy storage' do
- let_it_be(:project) { create(:project, :repository, :legacy_storage) }
+ let_it_be(:project) { create(:project_with_design, :repository, :legacy_storage) }
it_behaves_like 'creates repository bundles'
end
@@ -75,15 +75,19 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
create_list(:project, 2, :repository)
create_list(:snippet, 2, :repository)
+ # Number of expected queries are 2 more than control_count
+ # to account for the queries for project.design_management_repository
+ # for each project.
+ # We are using 2 projects here.
expect do
subject.dump(destination, backup_id)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control_count + 2)
end
describe 'storages' do
let(:storages) { %w{default} }
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
before do
stub_storage_settings('test_second_storage' => {
@@ -93,7 +97,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do
- excluded_project = create(:project, :repository, repository_storage: 'test_second_storage')
+ excluded_project = create(:project_with_design, :repository, repository_storage: 'test_second_storage')
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_project_snippet.track_snippet_repository('test_second_storage')
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
@@ -107,13 +111,13 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
describe 'paths' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
context 'project path' do
let(:paths) { [project.full_path] }
@@ -131,7 +135,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -152,14 +156,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
end
describe 'skip_paths' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:excluded_project) { create(:project, :repository) }
context 'project path' do
@@ -177,7 +181,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -197,7 +201,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -205,7 +209,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
describe '#restore' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: project.first_owner) }
let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: project.first_owner) }
@@ -216,7 +220,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:finish!)
@@ -300,7 +304,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -322,7 +326,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -343,7 +347,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -367,7 +371,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -387,7 +391,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
diff --git a/spec/lib/banzai/filter/code_language_filter_spec.rb b/spec/lib/banzai/filter/code_language_filter_spec.rb
index 25f844ee575..d6be088eaff 100644
--- a/spec/lib/banzai/filter/code_language_filter_spec.rb
+++ b/spec/lib/banzai/filter/code_language_filter_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Banzai::Filter::CodeLanguageFilter, feature_category: :team_plann
end
end
- context 'when lang is specified' do
+ context 'when lang is specified on `pre`' do
it 'adds data-canonical-lang and removes lang attribute' do
result = filter('<pre lang="ruby"><code>def fun end</code></pre>')
@@ -36,19 +36,39 @@ RSpec.describe Banzai::Filter::CodeLanguageFilter, feature_category: :team_plann
end
end
- context 'when lang has extra params' do
- let(:lang_params) { 'foo-bar-kux' }
- let(:xss_lang) { %(ruby data-meta="foo-bar-kux"&lt;script&gt;alert(1)&lt;/script&gt;) }
+ context 'when lang is specified on `code`' do
+ it 'adds data-canonical-lang to `pre` and removes lang attribute' do
+ result = filter('<pre><code lang="ruby">def fun end</code></pre>')
+
+ expect(result.to_html.delete("\n"))
+ .to eq('<pre data-canonical-lang="ruby"><code>def fun end</code></pre>')
+ end
+ end
- it 'includes data-lang-params tag with extra information and removes data-meta' do
- expected_result = <<~HTML
+ context 'when lang has extra params' do
+ let_it_be(:lang_params) { 'foo-bar-kux' }
+ let_it_be(:xss_lang) { %(ruby data-meta="foo-bar-kux"&lt;script&gt;alert(1)&lt;/script&gt;) }
+ let_it_be(:expected_result) do
+ <<~HTML
<pre data-canonical-lang="ruby" data-lang-params="#{lang_params}">
<code>This is a test</code></pre>
HTML
+ end
+
+ context 'when lang is specified on `pre`' do
+ it 'includes data-lang-params tag with extra information and removes data-meta' do
+ result = filter(%(<pre lang="ruby" data-meta="#{lang_params}"><code>This is a test</code></pre>))
+
+ expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n"))
+ end
+ end
- result = filter(%(<pre lang="ruby" data-meta="#{lang_params}"><code>This is a test</code></pre>))
+ context 'when lang is specified on `code`' do
+ it 'includes data-lang-params tag with extra information and removes data-meta' do
+ result = filter(%(<pre><code lang="ruby" data-meta="#{lang_params}">This is a test</code></pre>))
- expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n"))
+ expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n"))
+ end
end
include_examples 'XSS prevention', 'ruby'
diff --git a/spec/lib/banzai/filter/inline_diff_filter_spec.rb b/spec/lib/banzai/filter/inline_diff_filter_spec.rb
index 1388a9053d9..89ee17837e0 100644
--- a/spec/lib/banzai/filter/inline_diff_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_diff_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::InlineDiffFilter do
+RSpec.describe Banzai::Filter::InlineDiffFilter, feature_category: :source_code_management do
include FilterSpecHelper
it 'adds inline diff span tags for deletions when using square brackets' do
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index f39222805d0..9ebf59ecf82 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -56,4 +56,55 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
describe '#updated_at' do
it { expect(described_class.new('updated_on' => '2023-01-01').updated_at).to eq('2023-01-01') }
end
+
+ describe '#merge_commit_sha' do
+ it { expect(described_class.new('merge_commit' => { 'hash' => 'SHA' }).merge_commit_sha).to eq('SHA') }
+ it { expect(described_class.new({}).merge_commit_sha).to be_nil }
+ end
+
+ describe '#to_hash' do
+ it do
+ raw = {
+ 'id' => 11,
+ 'description' => 'description',
+ 'author' => { 'nickname' => 'user-1' },
+ 'state' => 'MERGED',
+ 'created_on' => 'created-at',
+ 'updated_on' => 'updated-at',
+ 'title' => 'title',
+ 'source' => {
+ 'branch' => { 'name' => 'source-branch-name' },
+ 'commit' => { 'hash' => 'source-commit-hash' }
+ },
+ 'destination' => {
+ 'branch' => { 'name' => 'destination-branch-name' },
+ 'commit' => { 'hash' => 'destination-commit-hash' }
+ },
+ 'merge_commit' => { 'hash' => 'merge-commit-hash' },
+ 'reviewers' => [
+ {
+ 'username' => 'user-2'
+ }
+ ]
+ }
+
+ expected_hash = {
+ author: 'user-1',
+ created_at: 'created-at',
+ description: 'description',
+ iid: 11,
+ source_branch_name: 'source-branch-name',
+ source_branch_sha: 'source-commit-hash',
+ merge_commit_sha: 'merge-commit-hash',
+ state: 'merged',
+ target_branch_name: 'destination-branch-name',
+ target_branch_sha: 'destination-commit-hash',
+ title: 'title',
+ updated_at: 'updated-at',
+ reviewers: ['user-2']
+ }
+
+ expect(described_class.new(raw).to_hash).to eq(expected_hash)
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
index bcc2d6fd5ed..4a97e092141 100644
--- a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
+++ b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
@@ -41,7 +41,17 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery, feature_category:
it 'queries group & group members' do
expect(query.to_s).to include('group')
expect(query.to_s).to include('groupMembers')
- expect(query.to_s).to include('SHARED_FROM_GROUPS')
+ expect(query.to_s).to include('DIRECT INHERITED')
+ end
+
+ context "when source version is past 14.7.0" do
+ before do
+ entity.bulk_import.update!(source_version: "14.8.0")
+ end
+
+ it 'includes SHARED_FROM_GROUPS' do
+ expect(query.to_s).to include('DIRECT INHERITED SHARED_FROM_GROUPS')
+ end
end
end
@@ -51,7 +61,17 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery, feature_category:
it 'queries project & project members' do
expect(query.to_s).to include('project')
expect(query.to_s).to include('projectMembers')
- expect(query.to_s).to include('INVITED_GROUPS SHARED_INTO_ANCESTORS')
+ expect(query.to_s).to include('DIRECT INHERITED INVITED_GROUPS')
+ end
+
+ context "when source version is at least 16.0.0" do
+ before do
+ entity.bulk_import.update!(source_version: "16.0.0")
+ end
+
+ it 'includes SHARED_INTO_ANCESTORS' do
+ expect(query.to_s).to include('DIRECT INHERITED INVITED_GROUPS SHARED_INTO_ANCESTORS')
+ end
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
index dc17dc594a8..8ca74565788 100644
--- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do
+RSpec.describe BulkImports::Common::Pipelines::EntityFinisher, feature_category: :importers do
it 'updates the entity status to finished' do
- entity = create(:bulk_import_entity, :started)
+ entity = create(:bulk_import_entity, :project_entity, :started)
pipeline_tracker = create(:bulk_import_tracker, entity: entity)
context = BulkImports::Pipeline::Context.new(pipeline_tracker)
subject = described_class.new(context)
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do
)
end
- expect(context.portable).to receive(:try).with(:after_import)
+ expect(BulkImports::FinishProjectImportWorker).to receive(:perform_async).with(entity.project_id)
expect { subject.run }
.to change(entity, :status_name).to(:finished)
diff --git a/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb
index 1c9ed4f0f97..4565de32c70 100644
--- a/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb
@@ -85,14 +85,15 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f
end
end
- describe 'source user id caching' do
+ describe 'source user id and username caching' do
context 'when user gid is present' do
- it 'caches source user id' do
+ it 'caches source user id and username' do
gid = 'gid://gitlab/User/7'
data = member_data(email: user.email, gid: gid)
expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ expect(mapper).to receive(:cache_source_username).with('source_username', user.username)
end
subject.transform(context, data)
@@ -108,6 +109,35 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f
subject.transform(context, data)
end
end
+
+ context 'when username is nil' do
+ it 'caches source user id only' do
+ gid = 'gid://gitlab/User/7'
+ data = nil_username_member_data(email: user.email, gid: gid)
+
+ expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
+ expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ expect(mapper).not_to receive(:cache_source_username)
+ end
+
+ subject.transform(context, data)
+ end
+ end
+
+ context 'when source username matches destination username' do
+ it 'caches source user id only' do
+ gid = 'gid://gitlab/User/7'
+ data = member_data(email: user.email, gid: gid)
+ data["user"]["username"] = user.username
+
+ expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
+ expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ expect(mapper).not_to receive(:cache_source_username)
+ end
+
+ subject.transform(context, data)
+ end
+ end
end
end
end
@@ -136,7 +166,24 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f
},
'user' => {
'user_gid' => gid,
- 'public_email' => email
+ 'public_email' => email,
+ 'username' => 'source_username'
+ }
+ }
+ end
+
+ def nil_username_member_data(email: '', gid: nil, access_level: 30)
+ {
+ 'created_at' => '2020-01-01T00:00:00Z',
+ 'updated_at' => '2020-01-01T00:00:00Z',
+ 'expires_at' => nil,
+ 'access_level' => {
+ 'integer_value' => access_level
+ },
+ 'user' => {
+ 'user_gid' => gid,
+ 'public_email' => email,
+ 'username' => nil
}
}
end
diff --git a/spec/lib/bulk_imports/file_downloads/validations_spec.rb b/spec/lib/bulk_imports/file_downloads/validations_spec.rb
index 85f45c2a8f0..95f3f78310f 100644
--- a/spec/lib/bulk_imports/file_downloads/validations_spec.rb
+++ b/spec/lib/bulk_imports/file_downloads/validations_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::FileDownloads::Validations do
+RSpec.describe BulkImports::FileDownloads::Validations, feature_category: :importers do
let(:dummy_instance) { dummy_class.new }
let(:dummy_class) do
Class.new do
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index 7d1f9ae5da0..87b64ef198e 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -84,6 +84,33 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader, feature_category: :imp
include_examples 'calls Group Create Service to create a new group'
end
+
+ context 'when user does not have 2FA enabled' do
+ before do
+ allow(user).to receive(:two_factor_enabled?).and_return(false)
+ end
+
+ context 'when require_two_factor_authentication is not passed' do
+ include_examples 'calls Group Create Service to create a new group'
+ end
+
+ context 'when require_two_factor_authentication is false' do
+ let(:data) { { 'require_two_factor_authentication' => false, 'path' => 'test' } }
+
+ include_examples 'calls Group Create Service to create a new group'
+ end
+
+ context 'when require_two_factor_authentication is true' do
+ let(:data) { { 'require_two_factor_authentication' => true, 'path' => 'test' } }
+
+ it 'does not create new group' do
+ expect(::Groups::CreateService).not_to receive(:new)
+
+ expect { subject.load(context, data) }
+ .to raise_error(described_class::GroupCreationError, 'User requires Two-Factor Authentication')
+ end
+ end
+ end
end
context 'when user cannot create group' do
diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb
index 54d6554df96..d5e2b739c8f 100644
--- a/spec/lib/bulk_imports/network_error_spec.rb
+++ b/spec/lib/bulk_imports/network_error_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do
+RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache, feature_category: :importers do
let(:tracker) { double(id: 1, stage: 2, entity: double(id: 3)) }
describe '.new' do
@@ -65,10 +65,32 @@ RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do
end
describe '#retry_delay' do
- it 'returns the default value when there is not a rate limit error' do
- exception = described_class.new('foo')
+ context 'when the exception is not a rate limit error' do
+ let(:exception) { described_class.new('Error!') }
- expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds)
+ it 'returns the default value' do
+ expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds)
+ end
+
+ context 'when the exception is a decompression error' do
+ before do
+ allow(exception).to receive(:cause).and_return(Zlib::Error.new('Error!'))
+ end
+
+ it 'returns the exception delay value' do
+ expect(exception.retry_delay).to eq(60.seconds)
+ end
+ end
+
+ context 'when the exception is a no space left error' do
+ before do
+ allow(exception).to receive(:cause).and_return(Errno::ENOSPC.new('Error!'))
+ end
+
+ it 'returns the exception delay value' do
+ expect(exception.retry_delay).to eq(120.seconds)
+ end
+ end
end
context 'when the exception is a rate limit error' do
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index e66f2d26911..2f54ab111c8 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Pipeline::Runner do
+RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
let(:extractor) do
Class.new do
def initialize(options = {}); end
diff --git a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
index a0789522ea8..fd13c10d61e 100644
--- a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb
@@ -164,6 +164,40 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
expect(note.award_emoji.first.name).to eq('clapper')
end
end
+
+ context "when importing an issue with one award emoji and other relations with one item" do
+ let(:issue_attributes) do
+ {
+ "notes" => [
+ {
+ 'note' => 'Description changed',
+ 'author_id' => 22,
+ 'author' => {
+ 'name' => 'User 22'
+ },
+ 'updated_at' => '2016-06-14T15:02:47.770Z'
+ }
+ ],
+ 'award_emoji' => [
+ {
+ 'name' => 'thumbsup',
+ 'user_id' => 22
+ }
+ ]
+ }
+ end
+
+ it 'saves properly' do
+ issue = project.issues.last
+ notes = issue.notes
+
+ aggregate_failures do
+ expect(notes.count).to eq 1
+ expect(notes[0].note).to include("Description changed")
+ expect(issue.award_emoji.first.name).to eq "thumbsup"
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
index 3a808851f81..af8bce47c3d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
create(
:merge_request,
source_project: project,
- description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1'
+ description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
)
end
@@ -33,7 +33,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
:note,
project: project,
noteable: issue,
- note: 'https://my.gitlab.com/source/full/path/-/issues/1'
+ note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
)
end
@@ -42,7 +42,16 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
:note,
project: project,
noteable: mr,
- note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1'
+ note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
+ )
+ end
+
+ let(:interchanged_usernames) do
+ create(
+ :note,
+ project: project,
+ noteable: mr,
+ note: '@manuelgrabowski-admin, @boaty-mc-boatface'
)
end
@@ -53,19 +62,48 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
project: project,
system: true,
noteable: issue,
- note: "mentioned in merge request !#{mr.iid}",
+ note: "mentioned in merge request !#{mr.iid} created by @old_username",
note_html: old_note_html
)
end
+ let(:username_system_note) do
+ create(
+ :note,
+ project: project,
+ system: true,
+ noteable: issue,
+ note: "mentioned in merge request created by @source_username.",
+ note_html: 'empty'
+ )
+ end
+
subject(:pipeline) { described_class.new(context) }
before do
project.add_owner(user)
+
+ allow(Gitlab::Cache::Import::Caching)
+ .to receive(:values_from_hash)
+ .and_return({
+ 'old_username' => 'new_username',
+ 'older_username' => 'newer_username',
+ 'source_username' => 'destination_username',
+ 'bob' => 'alice-gdk',
+ 'alice' => 'bob-gdk',
+ 'manuelgrabowski' => 'manuelgrabowski-admin',
+ 'manuelgrabowski-admin' => 'manuelgrabowski',
+ 'boaty-mc-boatface' => 'boatymcboatface',
+ 'boatymcboatface' => 'boaty-mc-boatface'
+ })
end
def create_project_data
- [issue, mr, issue_note, mr_note, system_note]
+ [issue, mr, issue_note, mr_note, system_note, username_system_note]
+ end
+
+ def create_username_project_data
+ [username_system_note]
end
describe '#extract' do
@@ -75,11 +113,14 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
extracted_data = subject.extract(context)
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
- expect(extracted_data.data).to contain_exactly(issue_note, mr, issue, mr_note)
+ expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
expect(system_note.note_html).not_to eq(old_note_html)
expect(system_note.note_html)
- .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a></p>")
+ .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
.and include(project.full_path.to_s)
+ .and include("@old_username")
+ expect(username_system_note.note_html)
+ .to include("@source_username")
end
context 'when object body is nil' do
@@ -94,9 +135,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
end
describe '#transform' do
- it 'updates matching urls with new ones' do
+ it 'updates matching urls and usernames with new ones' do
transformed_mr = subject.transform(context, mr)
transformed_note = subject.transform(context, mr_note)
+ transformed_issue = subject.transform(context, issue)
+ transformed_issue_note = subject.transform(context, issue_note)
+ transformed_system_note = subject.transform(context, system_note)
+ transformed_username_system_note = subject.transform(context, username_system_note)
expected_url = URI('')
expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
@@ -104,11 +149,44 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
expected_url.port = ::Gitlab.config.gitlab.port
expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
- expect(transformed_mr.description).to eq(expected_url.to_s)
- expect(transformed_note.note).to eq(expected_url.to_s)
+ expect(transformed_issue_note.note).not_to include("@older_username")
+ expect(transformed_mr.description).not_to include("@source_username")
+ expect(transformed_system_note.note).not_to include("@old_username")
+ expect(transformed_username_system_note.note).not_to include("@source_username")
+
+ expect(transformed_issue.description).to eq('http://localhost:80/namespace1/project-1/-/issues/1')
+ expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
+ expect(transformed_note.note).to eq("#{expected_url} @same_username")
+ expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
+ expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
+ expect(transformed_username_system_note.note).to include("@destination_username.")
end
- context 'when object does not have reference' do
+ it 'handles situations where old usernames are substrings of new usernames' do
+ transformed_mr = subject.transform(context, mr)
+
+ expect(transformed_mr.description).to include("@alice-gdk")
+ expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
+ end
+
+ it 'handles situations where old and new usernames are interchanged' do
+ # e.g
+ # |------------------------|-------------------------|
+ # | old_username | new_username |
+ # |------------------------|-------------------------|
+ # | @manuelgrabowski-admin | @manuelgrabowski |
+ # | @manuelgrabowski | @manuelgrabowski-admin |
+ # |------------------------|-------------------------|
+
+ transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
+
+ expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
+ expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
+ expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
+ expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
+ end
+
+ context 'when object does not have reference or username' do
it 'returns object unchanged' do
issue.update!(description: 'foo')
@@ -118,35 +196,35 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
end
end
- context 'when there are not matched urls' do
- let(:url) { 'https://my.gitlab.com/another/project/path/-/issues/1' }
+ context 'when there are not matched urls or usernames' do
+ let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
shared_examples 'returns object unchanged' do
it 'returns object unchanged' do
- issue.update!(description: url)
+ issue.update!(description: description)
transformed_issue = subject.transform(context, issue)
- expect(transformed_issue.description).to eq(url)
+ expect(transformed_issue.description).to eq(description)
end
end
include_examples 'returns object unchanged'
context 'when url path does not start with source full path' do
- let(:url) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
+ let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
include_examples 'returns object unchanged'
end
context 'when host does not match and url path starts with source full path' do
- let(:url) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
+ let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
include_examples 'returns object unchanged'
end
context 'when url does not match at all' do
- let(:url) { 'https://website.example/foo/bar' }
+ let(:description) { 'https://website.example/foo/bar' }
include_examples 'returns object unchanged'
end
@@ -156,13 +234,22 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
describe '#load' do
it 'saves the object when object body changed' do
transformed_issue = subject.transform(context, issue)
- transformed_note = subject.transform(context, issue_note)
+ transformed_note = subject.transform(context, mr_note)
+ transformed_mr = subject.transform(context, mr)
+ transformed_issue_note = subject.transform(context, issue_note)
+ transformed_system_note = subject.transform(context, system_note)
expect(transformed_issue).to receive(:save!)
expect(transformed_note).to receive(:save!)
+ expect(transformed_mr).to receive(:save!)
+ expect(transformed_issue_note).to receive(:save!)
+ expect(transformed_system_note).to receive(:save!)
subject.load(context, transformed_issue)
subject.load(context, transformed_note)
+ subject.load(context, transformed_mr)
+ subject.load(context, transformed_issue_note)
+ subject.load(context, transformed_system_note)
end
context 'when object body is not changed' do
diff --git a/spec/lib/bulk_imports/users_mapper_spec.rb b/spec/lib/bulk_imports/users_mapper_spec.rb
index e6357319d05..dc2beb42080 100644
--- a/spec/lib/bulk_imports/users_mapper_spec.rb
+++ b/spec/lib/bulk_imports/users_mapper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::UsersMapper do
+RSpec.describe BulkImports::UsersMapper, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
@@ -34,6 +34,22 @@ RSpec.describe BulkImports::UsersMapper do
end
end
+ describe '#map_usernames' do
+ context 'when value for specified key exists' do
+ it 'returns a map of source & destination usernames from redis' do
+ allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return({ "source_username" => "destination_username" })
+
+ expect(subject.map_usernames).to eq({ "source_username" => "destination_username" })
+ end
+ end
+
+ context 'when value for specified key does not exist' do
+ it 'returns nil' do
+ expect(subject.map_usernames[:non_existent_key]).to be_nil
+ end
+ end
+ end
+
describe '#default_user_id' do
it 'returns current user id' do
expect(subject.default_user_id).to eq(user.id)
@@ -65,4 +81,12 @@ RSpec.describe BulkImports::UsersMapper do
subject.cache_source_user_id(1, 2)
end
end
+
+ describe '#cache_source_username' do
+ it 'caches provided source & destination usernames in redis' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:hash_add).with("bulk_imports/#{import.id}/#{entity.id}/source_usernames", 'source', 'destination')
+
+ subject.cache_source_username('source', 'destination')
+ end
+ end
end
diff --git a/spec/lib/click_house/bind_index_manager_spec.rb b/spec/lib/click_house/bind_index_manager_spec.rb
deleted file mode 100644
index 1c659017c63..00000000000
--- a/spec/lib/click_house/bind_index_manager_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClickHouse::BindIndexManager, feature_category: :database do
- describe '#next_bind_str' do
- context 'when initialized without a start index' do
- let(:bind_manager) { described_class.new }
-
- it 'starts from index 1 by default' do
- expect(bind_manager.next_bind_str).to eq('$1')
- end
-
- it 'increments the bind string on subsequent calls' do
- bind_manager.next_bind_str
- expect(bind_manager.next_bind_str).to eq('$2')
- end
- end
-
- context 'when initialized with a start index' do
- let(:bind_manager) { described_class.new(2) }
-
- it 'starts from the given index' do
- expect(bind_manager.next_bind_str).to eq('$2')
- end
-
- it 'increments the bind string on subsequent calls' do
- bind_manager.next_bind_str
- expect(bind_manager.next_bind_str).to eq('$3')
- end
- end
- end
-end
diff --git a/spec/lib/click_house/query_builder_spec.rb b/spec/lib/click_house/query_builder_spec.rb
index 9e3f1118eeb..f5e1d53e7c1 100644
--- a/spec/lib/click_house/query_builder_spec.rb
+++ b/spec/lib/click_house/query_builder_spec.rb
@@ -288,7 +288,8 @@ RSpec.describe ClickHouse::QueryBuilder, feature_category: :database do
describe '#to_redacted_sql' do
it 'calls ::ClickHouse::Redactor correctly' do
- expect(::ClickHouse::Redactor).to receive(:redact).with(builder)
+ expect(::ClickHouse::Redactor).to receive(:redact).with(builder,
+ an_instance_of(ClickHouse::Client::BindIndexManager))
builder.to_redacted_sql
end
@@ -331,4 +332,27 @@ RSpec.describe ClickHouse::QueryBuilder, feature_category: :database do
expect(sql).to eq(expected_sql)
end
end
+
+ context 'when combining with a raw query' do
+ it 'correctly generates the SQL query' do
+ raw_query = 'SELECT * FROM isues WHERE title = {title:String} AND id IN ({query:Subquery})'
+ placeholders = {
+ title: "'test'",
+ query: builder.select(:id).where(column1: 'value1', column2: 'value2')
+ }
+
+ query = ClickHouse::Client::Query.new(raw_query: raw_query, placeholders: placeholders)
+ expected_sql = "SELECT * FROM isues WHERE title = {title:String} AND id IN (SELECT \"test_table\".\"id\" " \
+ "FROM \"test_table\" WHERE \"test_table\".\"column1\" = 'value1' AND " \
+ "\"test_table\".\"column2\" = 'value2')"
+
+ expect(query.to_sql).to eq(expected_sql)
+
+ expected_redacted_sql = "SELECT * FROM isues WHERE title = $1 AND id IN (SELECT \"test_table\".\"id\" " \
+ "FROM \"test_table\" WHERE \"test_table\".\"column1\" = $2 AND " \
+ "\"test_table\".\"column2\" = $3)"
+
+ expect(query.to_redacted_sql).to eq(expected_redacted_sql)
+ end
+ end
end
diff --git a/spec/lib/click_house/record_sync_context_spec.rb b/spec/lib/click_house/record_sync_context_spec.rb
new file mode 100644
index 00000000000..7873796cd9c
--- /dev/null
+++ b/spec/lib/click_house/record_sync_context_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::RecordSyncContext, feature_category: :value_stream_management do
+ let(:records) { [Issue.new(id: 1), Issue.new(id: 2), Issue.new(id: 3), Issue.new(id: 4)] }
+
+ subject(:sync_context) { described_class.new(last_record_id: 0, max_records_per_batch: 3) }
+
+ it 'allows processing 3 records per batch' do
+ records.take(3).each do |record|
+ sync_context.last_processed_id = record.id
+ end
+
+ expect(sync_context).to be_record_limit_reached
+ expect(sync_context.last_processed_id).to eq(3)
+
+ expect { sync_context.new_batch! }.to change { sync_context.record_count_in_current_batch }.from(3).to(0)
+
+ expect(sync_context).not_to be_record_limit_reached
+
+ records.take(3).each do |record|
+ sync_context.last_processed_id = record.id
+ end
+
+ expect(sync_context).to be_record_limit_reached
+ end
+
+ it 'sets the no more records flag' do
+ expect { sync_context.no_more_records! }.to change { sync_context.no_more_records? }.from(false).to(true)
+ end
+end
diff --git a/spec/lib/click_house/sync_cursor_spec.rb b/spec/lib/click_house/sync_cursor_spec.rb
new file mode 100644
index 00000000000..43ffaa76e1d
--- /dev/null
+++ b/spec/lib/click_house/sync_cursor_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::SyncCursor, feature_category: :value_stream_management, click_house: {} do
+ def value
+ ClickHouse::SyncCursor.cursor_for(:my_table)
+ end
+
+ context 'when cursor is empty' do
+ it 'returns the default value: 0' do
+ expect(value).to eq(0)
+ end
+ end
+
+ context 'when cursor is present' do
+ it 'updates and returns the current cursor value' do
+ described_class.update_cursor_for(:my_table, 1111)
+
+ expect(value).to eq(1111)
+
+ described_class.update_cursor_for(:my_table, 2222)
+
+ expect(value).to eq(2222)
+ end
+ end
+
+ context 'when updating a different cursor' do
+ it 'does not affect the other cursors' do
+ described_class.update_cursor_for(:other_table, 1111)
+
+ expect(value).to eq(0)
+ end
+ end
+end
diff --git a/spec/lib/constraints/activity_pub_constrainer_spec.rb b/spec/lib/constraints/activity_pub_constrainer_spec.rb
new file mode 100644
index 00000000000..2a3d23501a9
--- /dev/null
+++ b/spec/lib/constraints/activity_pub_constrainer_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Constraints::ActivityPubConstrainer, feature_category: :groups_and_projects do
+ subject(:constraint) { described_class.new }
+
+ describe '#matches?' do
+ subject { constraint.matches?(request) }
+
+ let(:request) { ActionDispatch::Request.new(headers) }
+
+ ['application/ld+json; profile="https://www.w3.org/ns/activitystreams"', 'application/activity+json'].each do |mime|
+ context "when Accept header is #{mime}" do
+ let(:headers) { { 'HTTP_ACCEPT' => mime } }
+
+ it 'matches the header' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context "when Content-Type header is #{mime}" do
+ let(:headers) { { 'CONTENT_TYPE' => mime } }
+
+ it 'matches the header' do
+ is_expected.to be_truthy
+ end
+ end
+ end
+
+ context 'when Accept and Content-Type headers are missing' do
+ let(:headers) { {} }
+
+ it 'does not match' do
+ is_expected.to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt
index 2728d65d54b..185f6deeade 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt
@@ -2,6 +2,6 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MyBatchedMigration, schema: [0-9]+, feature_category: :database do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::MyBatchedMigration, feature_category: :database do # rubocop:disable Layout/LineLength
# Tests go here
end
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
index e67e48d83a3..b75d75107ee 100644
--- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -141,8 +141,10 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
.to receive(:known_event?).with(event).and_return(true)
end
- it 'raises error' do
- expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ it 'does not create event definition' do
+ described_class.new([], options).invoke_all
+
+ expect(event_definition_path).to eq(nil)
end
end
diff --git a/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb
index 7c7ca8207ff..229100186be 100644
--- a/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb
+++ b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb
@@ -33,8 +33,6 @@ feature_category: :continuous_integration do
SQL
end
- let_it_be(:destination_root) { File.expand_path("../tmp", __dir__) }
-
let(:generator_config) { { destination_root: destination_root } }
let(:generator_args) { ['--source', '_test_tmp_metadata', '--target', '_test_tmp_builds', '--database', 'main'] }
@@ -124,4 +122,8 @@ feature_category: :continuous_integration do
def schema_migrate_down!
# no-op
end
+
+ def destination_root
+ File.expand_path("../tmp", __dir__)
+ end
end
diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
index 62a52ee5fb9..740cfa767e4 100644
--- a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
@@ -6,7 +6,10 @@ RSpec.describe Gitlab::SnowplowEventDefinitionGenerator, :silence_stdout, featur
let(:ce_temp_dir) { Dir.mktmpdir }
let(:ee_temp_dir) { Dir.mktmpdir }
let(:timestamp) { Time.now.utc.strftime('%Y%m%d%H%M%S') }
- let(:generator_options) { { 'category' => 'Groups::EmailCampaignsController', 'action' => 'click' } }
+
+ let(:generator_options) do
+ { 'category' => 'Projects::Pipelines::EmailCampaignsController', 'action' => 'click' }
+ end
before do
stub_const("#{described_class}::CE_DIR", ce_temp_dir)
diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
index 226669bab33..291cfb951c3 100644
--- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Auth::OAuth::Provider do
+RSpec.describe Gitlab::Auth::OAuth::Provider, feature_category: :system_access do
describe '.enabled?' do
before do
allow(described_class).to receive(:providers).and_return([:ldapmain, :google_oauth2])
@@ -62,25 +62,27 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'for an OmniAuth provider' do
before do
- provider = ActiveSupport::InheritableOptions.new(
+ provider = GitlabSettings::Options.new(
name: 'google_oauth2',
app_id: 'asd123',
app_secret: 'asd123'
)
- allow(Gitlab.config.omniauth).to receive(:providers).and_return([provider])
+ openid_connect = GitlabSettings::Options.new(name: 'openid_connect')
+
+ stub_omniauth_setting(providers: [provider, openid_connect])
end
context 'when the provider exists' do
- subject { described_class.config_for('google_oauth2') }
+ subject(:config) { described_class.config_for('google_oauth2') }
it 'returns the config' do
- expect(subject).to be_a(ActiveSupport::InheritableOptions)
+ expect(config).to be_a(GitlabSettings::Options)
end
it 'merges defaults with the given configuration' do
defaults = Gitlab::OmniauthInitializer.default_arguments_for('google_oauth2').deep_stringify_keys
- expect(subject['args']).to include(defaults)
+ expect(config['args']).to include(defaults)
end
end
diff --git a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
index e5bc51edc2d..f12ed5a0e9c 100644
--- a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
+++ b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Auth::UserAccessDeniedReason do
end
context 'when the user is internal' do
- let(:user) { User.ghost }
+ let(:user) { Users::Internal.ghost }
it { is_expected.to match /This action cannot be performed by internal users/ }
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index c4fa8513618..8da617175ca 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
describe 'constants' do
it 'API_SCOPES contains all scopes for API access' do
- expect(subject::API_SCOPES).to match_array %i[api read_user read_api create_runner]
+ expect(subject::API_SCOPES).to match_array %i[api read_user read_api create_runner k8s_proxy]
end
it 'ADMIN_SCOPES contains all scopes for ADMIN access' do
@@ -40,29 +40,29 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains all non-default scopes' do
- expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner]
+ expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: false)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes without observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for group all resource bot scopes' do
group = build_stubbed(:group)
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for unsupported type no scopes' do
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'optional_scopes contains all non-default scopes' do
- expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner]
+ expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
context 'with observability_group_tab feature flag' do
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
it 'contains for group all resource bot scopes without observability scopes' do
group = build_stubbed(:group)
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
@@ -94,23 +94,23 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains for other group all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes without observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for other group all resource bot scopes without observability scopes' do
other_group = build_stubbed(:group)
- expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner]
+ expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..fc4597fbb96
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillHasMergeRequestOfVulnerabilityReads, schema: 20230907155247, feature_category: :database do # rubocop:disable Layout/LineLength
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:merge_request_links) { table(:vulnerability_merge_request_links) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+
+ let(:vulnerability) do
+ vulnerabilities.create!(
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ )
+ end
+
+ let(:merge_request) do
+ merge_requests.create!(
+ target_project_id: project.id,
+ source_branch: "other",
+ target_branch: "main",
+ author_id: user.id,
+ title: 'Feedback Merge Request'
+ )
+ end
+
+ let!(:vulnerability_read) do
+ vulnerability_reads.create!(
+ project_id: project.id,
+ vulnerability_id: vulnerability.id,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ )
+ end
+
+ let!(:merge_request_link) do
+ merge_request_links.create!(
+ vulnerability_id: vulnerability.id, merge_request_id: merge_request.id)
+ end
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: vulnerability_reads.first.vulnerability_id,
+ end_id: vulnerability_reads.last.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: vulnerability_reads.count,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ # Unset since the trigger already sets during merge_request_link creation.
+ vulnerability_reads.update_all(has_merge_request: false)
+ end
+
+ it 'sets the has_merge_request of existing record' do
+ expect { perform_migration }.to change { vulnerability_read.reload.has_merge_request }.from(false).to(true)
+ end
+
+ it 'does not modify has_merge_request of other vulnerabilities which do not have merge request' do
+ vulnerability_2 = vulnerabilities.create!(
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test 2',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ )
+
+ vulnerability_read_2 = vulnerability_reads.create!(
+ project_id: project.id,
+ vulnerability_id: vulnerability_2.id,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ )
+
+ expect { perform_migration }.not_to change { vulnerability_read_2.reload.has_merge_request }.from(false)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb
new file mode 100644
index 00000000000..3f0bd417955
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNugetNormalizedVersion, schema: 20230811103457,
+ feature_category: :package_registry do
+ let(:packages_nuget_metadata) { table(:packages_nuget_metadata) }
+ let(:versions) do
+ {
+ '1' => '1.0.0',
+ '1.0' => '1.0.0',
+ '1.0.0' => '1.0.0',
+ '1.00' => '1.0.0',
+ '1.00.01' => '1.0.1',
+ '1.01.1' => '1.1.1',
+ '1.0.0.0' => '1.0.0',
+ '1.0.01.0' => '1.0.1',
+ '1.0.7+r3456' => '1.0.7',
+ '1.0.0-Alpha' => '1.0.0-alpha',
+ '1.00.05-alpha.0' => '1.0.5-alpha.0'
+ }
+ end
+
+ let!(:migration_attrs) do
+ {
+ start_id: packages_nuget_metadata.minimum(:package_id),
+ end_id: packages_nuget_metadata.maximum(:package_id),
+ batch_table: :packages_nuget_metadata,
+ batch_column: :package_id,
+ sub_batch_size: 1000,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ let(:migration) { described_class.new(**migration_attrs) }
+ let(:packages) { table(:packages_packages) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'project', path: 'project', type: 'Project') }
+ let(:project) do
+ table(:projects).create!(name: 'project', path: 'project', project_namespace_id: namespace.id,
+ namespace_id: namespace.id)
+ end
+
+ let(:package_ids) { [] }
+
+ subject(:perform_migration) { migration.perform }
+
+ before do
+ versions.each_key do |version|
+ packages.create!(name: 'test', version: version, package_type: 4, project_id: project.id).tap do |package|
+ package_ids << package.id
+ packages_nuget_metadata.create!(package_id: package.id)
+ end
+ end
+ end
+
+ it 'executes 5 queries and updates the normalized_version column' do
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ # each_batch lower bound query
+ # each_batch upper bound query
+ # SELECT packages_nuget_metadata.package_id FROM packages_nuget_metadata....
+ # SELECT packages_packages.id, packages_packages.version FROM packages_packages....
+ # UPDATE packages_nuget_metadata SET normalized_version =....
+ expect(queries.count).to eq(5)
+
+ expect(
+ packages_nuget_metadata.where(package_id: package_ids).pluck(:normalized_version)
+ ).to match_array(versions.values)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb
new file mode 100644
index 00000000000..2884fb9b10b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectStatisticsStorageSizeWithRecentSize,
+ schema: 20230823090001,
+ feature_category: :consumables_cost_management do
+ include MigrationHelpers::ProjectStatisticsHelper
+
+ include_context 'when backfilling project statistics'
+
+ let(:recent_size_enabled_at) { described_class::RECENT_OBJECTS_SIZE_ENABLED_AT }
+ let(:default_stats) do
+ {
+ repository_size: 1,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ storage_size: default_storage_size,
+ updated_at: recent_size_enabled_at - 1.month
+ }
+ end
+
+ describe '#filter_batch' do
+ let!(:project_statistics) { generate_records(default_projects, project_statistics_table, default_stats) }
+ let!(:expected) { project_statistics.map(&:id) }
+
+ it 'filters out project_statistics with no repository_size' do
+ project_statistics_table.create!(
+ project_id: proj5.id,
+ namespace_id: proj5.namespace_id,
+ repository_size: 0,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ storage_size: 6,
+ updated_at: recent_size_enabled_at - 1.month
+ )
+
+ actual = migration.filter_batch(project_statistics_table).pluck(:id)
+
+ expect(actual).to match_array(expected)
+ end
+
+ shared_examples 'filters out project_statistics updated since recent objects went live' do
+ it 'filters out project_statistics updated since recent objects went live' do
+ project_statistics_table.create!(
+ project_id: proj5.id,
+ namespace_id: proj5.namespace_id,
+ repository_size: 10,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ storage_size: 6,
+ updated_at: recent_size_enabled_at + 1.month
+ )
+
+ actual = migration.filter_batch(project_statistics_table).pluck(:id)
+
+ expect(actual).to match_array(expected)
+ end
+ end
+
+ context 'when on GitLab.com' do
+ before do
+ allow(Gitlab).to receive(:org_or_com?).and_return(true)
+ end
+
+ it_behaves_like 'filters out project_statistics updated since recent objects went live'
+ end
+
+ context 'when Gitlab.dev_or_test_env? is true ' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(true)
+ end
+
+ it_behaves_like 'filters out project_statistics updated since recent objects went live'
+ end
+
+ context 'when on self-managed' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(false)
+ end
+
+ it 'does not filter out project_statistics updated since recent objects went live' do
+ latest = project_statistics_table.create!(
+ project_id: proj5.id,
+ namespace_id: proj5.namespace_id,
+ repository_size: 10,
+ wiki_size: 1,
+ lfs_objects_size: 1,
+ build_artifacts_size: 1,
+ packages_size: 1,
+ snippets_size: 1,
+ uploads_size: 1,
+ storage_size: 6,
+ updated_at: recent_size_enabled_at + 1.month
+ )
+
+ actual = migration.filter_batch(project_statistics_table).pluck(:id)
+
+ expect(actual).to match_array(expected.push(latest.id))
+ end
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) { migration.perform }
+
+ before do
+ allow_next_instance_of(Repository) do |repo|
+ allow(repo).to receive(:recent_objects_size).and_return(10)
+ end
+ end
+
+ context 'when project_statistics backfill runs' do
+ before do
+ generate_records(default_projects, project_statistics_table, default_stats)
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ end
+
+ it 'uses repository#recent_objects_size for repository_size' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(6 + 10.megabytes)
+ end
+ end
+
+ it 'coerces a null wiki_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { wiki_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(5 + 10.megabytes)
+ end
+
+ it 'coerces a null snippets_size to 0' do
+ project_statistics = create_project_stats(projects, namespaces, default_stats, { snippets_size: nil })
+ allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ migration = create_migration(end_id: project_statistics.project_id)
+
+ migration.perform
+
+ project_statistics.reload
+ expect(project_statistics.storage_size).to eq(5 + 10.megabytes)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index 9f76e4131b2..06b66b599ab 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -250,7 +250,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
context 'when user name is invalid' do
- let(:user_name) { '.' }
+ let(:user_name) { ',' }
let!(:snippet) { snippets.create!(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
let(:ids) { [4, 4] }
@@ -262,7 +262,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
context 'when both user name and snippet file_name are invalid' do
- let(:user_name) { '.' }
+ let(:user_name) { ',' }
let!(:other_user) do
users.create!(
id: 2,
diff --git a/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb
new file mode 100644
index 00000000000..b66b930b7ac
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillUserPreferencesWithDefaults,
+ schema: 20230818085219,
+ feature_category: :user_profile do
+ let(:user_preferences) { table(:user_preferences) }
+ let(:users) { table(:users) }
+ let(:columns) { [:tab_width, :time_display_relative, :render_whitespace_in_code] }
+ let(:initial_column_values) do
+ [
+ [nil, nil, nil],
+ [10, nil, nil],
+ [nil, false, nil],
+ [nil, nil, true]
+ ]
+ .map { |row| columns.zip(row).to_h }
+ end
+
+ let(:final_column_values) do
+ [
+ [8, true, false],
+ [10, true, false],
+ [8, false, false],
+ [8, true, true]
+ ]
+ .map { |row| columns.zip(row).to_h }
+ end
+
+ subject(:perform_migration) do
+ described_class
+ .new(
+ start_id: user_preferences.minimum(:id),
+ end_id: user_preferences.maximum(:id),
+ batch_table: :user_preferences,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ .perform
+ end
+
+ before do
+ initial_column_values.each_with_index do |attributes, index|
+ user = users.create!(projects_limit: 1, email: "user#{index}@gitlab.com")
+ user_preference = user_preferences.create!(attributes.merge(user_id: user.id))
+ final_column_values[index].merge!(id: user_preference.id)
+ end
+ end
+
+ it 'backfills the null values with the default values' do
+ perform_migration
+
+ final_column_values.each { |attributes| match_attributes(attributes) }
+ end
+
+ def match_attributes(attributes)
+ migrated_user_preference = user_preferences.find(attributes[:id])
+
+ expect(migrated_user_preference.tab_width).to eq(attributes[:tab_width])
+ expect(migrated_user_preference.time_display_relative).to eq(attributes[:time_display_relative])
+ expect(migrated_user_preference.render_whitespace_in_code).to eq(attributes[:render_whitespace_in_code])
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb b/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb
new file mode 100644
index 00000000000..78f36933435
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillUsersWithDefaults,
+ schema: 20230818083610,
+ feature_category: :user_profile do
+ let(:users) { table(:users) }
+ let(:columns) { [:project_view, :hide_no_ssh_key, :hide_no_password, :notified_of_own_activity] }
+ let(:initial_column_values) do
+ [
+ [nil, nil, nil, nil],
+ [0, nil, nil, nil],
+ [nil, true, nil, nil],
+ [nil, nil, true, nil],
+ [nil, nil, nil, true]
+ ]
+ .map { |row| columns.zip(row).to_h }
+ end
+
+ let(:final_column_values) do
+ [
+ [2, false, false, false],
+ [0, false, false, false],
+ [2, true, false, false],
+ [2, false, true, false],
+ [2, false, false, true]
+ ]
+ .map { |row| columns.zip(row).to_h }
+ end
+
+ subject(:perform_migration) do
+ described_class
+ .new(
+ start_id: users.minimum(:id),
+ end_id: users.maximum(:id),
+ batch_table: :users,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ .perform
+ end
+
+ before do
+ initial_column_values.each_with_index do |attributes, index|
+ user = users.create!(**attributes.merge(projects_limit: 1, email: "user#{index}@gitlab.com"))
+ final_column_values[index].merge!(id: user.id)
+ end
+ end
+
+ it 'backfills the null values with the default values' do
+ perform_migration
+
+ final_column_values.each { |attributes| match_attributes(attributes) }
+ end
+
+ private
+
+ def match_attributes(attributes)
+ migrated_user = users.find(attributes[:id])
+ expect(migrated_user.project_view).to eq(attributes[:project_view])
+ expect(migrated_user.hide_no_ssh_key).to eq(attributes[:hide_no_ssh_key])
+ expect(migrated_user.hide_no_password).to eq(attributes[:hide_no_password])
+ expect(migrated_user.notified_of_own_activity).to eq(attributes[:notified_of_own_activity])
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb b/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb
new file mode 100644
index 00000000000..97f69afca55
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ConvertCreditCardValidationDataToHashes, schema: 20230821081603, feature_category: :user_profile do # rubocop:disable Layout/LineLength
+ let(:users_table) { table(:users) }
+ let(:credit_card_validations_table) { table(:user_credit_card_validations) }
+ let(:rows) { 5 }
+
+ describe '#perform' do
+ let(:network) { 'Visa' }
+ let(:holder_name) { 'John Smith' }
+ let(:last_digits) { 1111 }
+ let(:expiration_date) { 1.year.from_now.to_date }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: 1,
+ end_id: rows,
+ batch_table: :user_credit_card_validations,
+ batch_column: :user_id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ (1..rows).each do |i|
+ users_table.create!(id: i, username: "John #{i}", email: "johndoe_#{i}@gitlab.com", projects_limit: 10)
+
+ credit_card_validations_table.create!(
+ id: i,
+ user_id: i,
+ network: network,
+ holder_name: holder_name,
+ last_digits: last_digits,
+ expiration_date: expiration_date,
+ credit_card_validated_at: Date.today
+ )
+ end
+ end
+
+ it 'updates values to hash for records in the specified batch', :aggregate_failures do
+ perform_migration
+
+ (1..rows).each do |i|
+ credit_card = credit_card_validations_table.find_by(user_id: i)
+
+ expect(credit_card.last_digits_hash).to eq(hashed_value(last_digits))
+ expect(credit_card.holder_name_hash).to eq(hashed_value(holder_name.downcase))
+ expect(credit_card.network_hash).to eq(hashed_value(network.downcase))
+ expect(credit_card.expiration_date_hash).to eq(hashed_value(expiration_date.to_s))
+ end
+ end
+
+ context 'with NULL columns' do
+ let(:network) { nil }
+ let(:holder_name) { nil }
+ let(:last_digits) { nil }
+ let(:expiration_date) { nil }
+
+ it 'does not update values for records in the specified batch', :aggregate_failures do
+ perform_migration
+
+ (1..rows).each do |i|
+ credit_card = credit_card_validations_table.find_by(user_id: i)
+
+ expect(credit_card.last_digits_hash).to eq(nil)
+ expect(credit_card.holder_name_hash).to eq(nil)
+ expect(credit_card.network_hash).to eq(nil)
+ expect(credit_card.expiration_date_hash).to eq(nil)
+ end
+ end
+ end
+ end
+
+ def hashed_value(value)
+ Gitlab::CryptoHelper.sha256(value)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb b/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb
deleted file mode 100644
index 195e57e4e59..00000000000
--- a/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RebalancePartitionId,
- :migration,
- schema: 20230125093723,
- feature_category: :continuous_integration do
- let(:ci_builds_table) { table(:ci_builds, database: :ci) }
- let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
-
- let!(:valid_ci_pipeline) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
- let!(:invalid_ci_pipeline) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
-
- describe '#perform' do
- using RSpec::Parameterized::TableSyntax
-
- where(:table_name, :invalid_record, :valid_record) do
- :ci_pipelines | invalid_ci_pipeline | valid_ci_pipeline
- end
-
- subject(:perform) do
- described_class.new(
- start_id: 1,
- end_id: 2,
- batch_table: table_name,
- batch_column: :id,
- sub_batch_size: 1,
- pause_ms: 0,
- connection: Ci::ApplicationRecord.connection
- ).perform
- end
-
- shared_examples 'fix invalid records' do
- it 'rebalances partition_id to 100 when partition_id is 101' do
- expect { perform }
- .to change { invalid_record.reload.partition_id }.from(101).to(100)
- .and not_change { valid_record.reload.partition_id }
- end
- end
-
- with_them do
- it_behaves_like 'fix invalid records'
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb b/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb
new file mode 100644
index 00000000000..19ad70337dc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateUsersSetExternalIfServiceAccount, feature_category: :system_access do
+ describe "#perform" do
+ let(:users_table) { table(:users) }
+ let(:service_account_user) do
+ users_table.create!(username: 'john_doe', email: 'johndoe@gitlab.com',
+ user_type: HasUserType::USER_TYPES[:service_account], projects_limit: 5)
+ end
+
+ let(:service_user) do
+ users_table.create!(username: 'john_doe2', email: 'johndoe2@gitlab.com',
+ user_type: HasUserType::USER_TYPES[:service_user], projects_limit: 5)
+ end
+
+ let(:table_name) { :users }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 2 }
+ let(:pause_ms) { 0 }
+ let(:migration) do
+ described_class.new(
+ start_id: service_account_user.id, end_id: service_user.id,
+ batch_table: table_name, batch_column: batch_column,
+ sub_batch_size: sub_batch_size, pause_ms: pause_ms,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ subject(:perform_migration) do
+ migration.perform
+ end
+
+ it "changes external field for service_account user" do
+ perform_migration
+
+ expect(service_account_user.reload.external).to eq(true)
+ expect(service_user.reload.external).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 4c94ecfe745..9786e7a364e 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -92,6 +92,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
describe '#import_pull_requests' do
let(:source_branch_sha) { sample.commits.last }
+ let(:merge_commit_sha) { sample.commits.second }
let(:target_branch_sha) { sample.commits.first }
let(:pull_request) do
instance_double(
@@ -101,6 +102,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: target_branch_sha,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
+ merge_commit_sha: merge_commit_sha,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
@@ -217,17 +219,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea
end
end
- context "when branches' sha is not found in the repository" do
+ context 'when source SHA is not found in the repository' do
let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
- let(:target_branch_sha) { 'b' * Commit::MIN_SHA_LENGTH }
+ let(:target_branch_sha) { 'c' * Commit::MIN_SHA_LENGTH }
- it 'uses the pull request sha references' do
+ it 'uses merge commit SHA for source' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request_diff = MergeRequest.first.merge_request_diff
- expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
+ expect(merge_request_diff.head_commit_sha).to eq merge_commit_sha
expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
end
+
+ context 'when the merge commit SHA is also not found' do
+ let(:merge_commit_sha) { 'b' * Commit::MIN_SHA_LENGTH }
+
+ it 'uses the pull request sha references' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request_diff = MergeRequest.first.merge_request_diff
+ expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
+ expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
+ end
+ end
end
context "when target_branch_sha is blank" do
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
new file mode 100644
index 00000000000..2eca6bb47d6
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestImporter, :clean_gitlab_redis_cache, feature_category: :importers do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:bitbucket_user) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: bitbucket_user, extern_uid: 'bitbucket_user', provider: :bitbucket) }
+ let_it_be(:identity_2) { create(:identity, user: user_2, extern_uid: 'user_2', provider: :bitbucket) }
+ let(:source_branch_sha) { project.repository.commit.sha }
+ let(:target_branch_sha) { project.repository.commit('refs/heads/master').sha }
+
+ let(:hash) do
+ {
+ author: 'bitbucket_user',
+ created_at: Date.today,
+ description: 'description',
+ iid: 11,
+ source_branch_name: 'source-branch-name',
+ source_branch_sha: source_branch_sha,
+ state: 'merged',
+ target_branch_name: 'destination-branch-name',
+ target_branch_sha: target_branch_sha,
+ title: 'title',
+ updated_at: Date.today,
+ reviewers: %w[user_2 user_3]
+ }
+ end
+
+ subject(:importer) { described_class.new(project, hash) }
+
+ describe '#execute' do
+ it 'calls MergeRequestCreator' do
+ expect(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute)
+
+ importer.execute
+ end
+
+ it 'creates a merge request with the correct attributes' do
+ expect { importer.execute }.to change { project.merge_requests.count }.from(0).to(1)
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.iid).to eq(11)
+ expect(merge_request.author).to eq(bitbucket_user)
+ expect(merge_request.title).to eq('title')
+ expect(merge_request.merged?).to be_truthy
+ expect(merge_request.created_at).to eq(Date.today)
+ expect(merge_request.description).to eq('description')
+ expect(merge_request.source_project_id).to eq(project.id)
+ expect(merge_request.target_project_id).to eq(project.id)
+ expect(merge_request.source_branch).to eq('source-branch-name')
+ expect(merge_request.target_branch).to eq('destination-branch-name')
+ expect(merge_request.assignee_ids).to eq([bitbucket_user.id])
+ expect(merge_request.reviewer_ids).to eq([user_2.id])
+ expect(merge_request.merge_request_diffs.first.base_commit_sha).to eq(source_branch_sha)
+ expect(merge_request.merge_request_diffs.first.head_commit_sha).to eq(target_branch_sha)
+ end
+
+ context 'when the state is closed' do
+ it 'marks merge request as closed' do
+ described_class.new(project, hash.merge(state: 'closed')).execute
+
+ expect(project.merge_requests.first.closed?).to be_truthy
+ end
+ end
+
+ context 'when the state is opened' do
+ it 'marks merge request as opened' do
+ described_class.new(project, hash.merge(state: 'opened')).execute
+
+ expect(project.merge_requests.first.opened?).to be_truthy
+ end
+ end
+
+ context 'when the author does not have a bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'sets the author and assignee to the project creator and adds the author to the description' do
+ importer.execute
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.author).to eq(project.creator)
+ expect(merge_request.assignee).to eq(project.creator)
+ expect(merge_request.description).to eq("*Created by: bitbucket_user*\n\ndescription")
+ end
+ end
+
+ context 'when none of the reviewers have an identity' do
+ before do
+ identity_2.destroy!
+ end
+
+ it 'does not set reviewer_ids' do
+ importer.execute
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.reviewer_ids).to be_empty
+ end
+ end
+
+ describe 'head_commit_sha for merge request diff' do
+ let(:diff) { project.merge_requests.first.merge_request_diffs.first }
+ let(:min_length) { Commit::MIN_SHA_LENGTH }
+
+ context 'when the source commit hash from Bitbucket is found on the repo' do
+ it 'is set to the source commit hash' do
+ described_class.new(project, hash.merge(source_branch_sha: source_branch_sha)).execute
+
+ expect(diff.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+
+ context 'when the source commit hash is not found but the merge commit hash is found' do
+ it 'is set to the merge commit hash' do
+ attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: source_branch_sha }
+
+ described_class.new(project, hash.merge(attrs)).execute
+
+ expect(diff.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+
+ context 'when both the source commit and merge commit hash are not found' do
+ it 'is nil' do
+ attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: 'y' * min_length }
+
+ described_class.new(project, hash.merge(attrs)).execute
+
+ expect(diff.head_commit_sha).to be_nil
+ end
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(Gitlab::Import::MergeRequestCreator).to receive(:new).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ it 'logs its progress' do
+ allow(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute)
+
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'starting', iid: anything)).and_call_original
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'finished', iid: anything)).and_call_original
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
new file mode 100644
index 00000000000..46bf099de0c
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:pull_requests).and_return(
+ [
+ Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }),
+ Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }),
+ Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' })
+ ],
+ []
+ )
+ end
+ end
+
+ it 'imports each pull request in parallel', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).exactly(3).times
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(3)
+ expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key))
+ .to match_array(%w[1 2 3])
+ end
+
+ context 'when the client raises an error' do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:pull_requests).and_raise(StandardError)
+ end
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ context 'when pull request was already enqueued' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 1)
+ end
+
+ it 'does not schedule job for enqueued pull requests', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).twice
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb
new file mode 100644
index 00000000000..1caf0b884c2
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::RepositoryImporter, feature_category: :importers do
+ let_it_be(:project) { create(:project, import_url: 'https://bitbucket.org/vim/vim.git') }
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute' do
+ context 'when repository is empty' do
+ it 'imports the repository' do
+ expect(project.repository).to receive(:import_repository).with(project.import_url)
+ expect(project.repository).to receive(:fetch_as_mirror).with(project.import_url,
+ refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head'])
+ expect(project.last_repository_updated_at).to be_present
+
+ importer.execute
+ end
+ end
+
+ context 'when repository is not empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(false)
+
+ project.last_repository_updated_at = 1.day.ago
+ end
+
+ it 'does not import the repository' do
+ expect(project.repository).not_to receive(:import_repository)
+
+ expect { importer.execute }.not_to change { project.last_repository_updated_at }
+ end
+ end
+
+ context 'when a Git CommandError is raised and the repository exists' do
+ before do
+ allow(project.repository).to receive(:import_repository).and_raise(::Gitlab::Git::CommandError)
+ allow(project).to receive(:repository_exists?).and_return(true)
+ end
+
+ it 'expires repository caches' do
+ expect(project.repository).to receive(:expire_content_cache)
+
+ expect { importer.execute }.to raise_error(::Gitlab::Git::CommandError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb
new file mode 100644
index 00000000000..29919c43d23
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ParallelImporter, feature_category: :importers do
+ subject { described_class }
+
+ it { is_expected.to be_async }
+
+ describe '.track_start_import' do
+ it 'tracks the start of import' do
+ project = build_stubbed(:project)
+
+ expect_next_instance_of(Gitlab::Import::Metrics, :bitbucket_importer, project) do |metric|
+ expect(metric).to receive(:track_start_import)
+ end
+
+ subject.track_start_import(project)
+ end
+ end
+
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ let_it_be(:project) { create(:project) }
+ let(:importer) { subject.new(project) }
+
+ before do
+ create(:import_state, :started, project: project)
+ end
+
+ it 'schedules the importing of the repository' do
+ expect(Gitlab::BitbucketImport::Stage::ImportRepositoryWorker)
+ .to receive_message_chain(:with_status, :perform_async).with(project.id)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'sets the JID in Redis' do
+ expect(Gitlab::Import::SetAsyncJid).to receive(:set_jid).with(project.import_state).and_call_original
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb
new file mode 100644
index 00000000000..4ac4c2e4813
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::UserFinder, :clean_gitlab_redis_cache, feature_category: :importers do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: user, extern_uid: 'uid', provider: :bitbucket) }
+ let(:created_id) { 1 }
+ let(:project) { instance_double(Project, creator_id: created_id, id: 1) }
+ let(:author) { 'uid' }
+ let(:cache_key) { format(described_class::USER_ID_FOR_AUTHOR_CACHE_KEY, project_id: project.id, author: author) }
+
+ subject(:user_finder) { described_class.new(project) }
+
+ describe '#find_user_id' do
+ it 'returns the user id' do
+ expect(User).to receive(:by_provider_and_extern_uid).and_call_original.once
+
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ end
+
+ context 'when the id is cached' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, user.id)
+ end
+
+ it 'does not attempt to find the user' do
+ expect(User).not_to receive(:by_provider_and_extern_uid)
+
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ end
+ end
+
+ context 'when -1 is cached' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, -1)
+ end
+
+ it 'does not attempt to find the user and returns nil' do
+ expect(User).not_to receive(:by_provider_and_extern_uid)
+
+ expect(user_finder.find_user_id(author)).to be_nil
+ end
+ end
+
+ context 'when the user does not have a matching bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'returns nil' do
+ expect(user_finder.find_user_id(author)).to be_nil
+ end
+ end
+ end
+
+ describe '#gitlab_user_id' do
+ context 'when find_user_id returns a user' do
+ it 'returns the user id' do
+ expect(user_finder.gitlab_user_id(project, author)).to eq(user.id)
+ end
+ end
+
+ context 'when find_user_id does not return a user' do
+ before do
+ allow(user_finder).to receive(:find_user_id).and_return(nil)
+ end
+
+ it 'returns the project creator' do
+ expect(user_finder.gitlab_user_id(project, author)).to eq(created_id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
deleted file mode 100644
index 4ff61bf329c..00000000000
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ /dev/null
@@ -1,653 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BitbucketServerImport::Importer, feature_category: :importers do
- include ImportSpecHelper
-
- let(:import_url) { 'http://my-bitbucket' }
- let(:bitbucket_user) { 'bitbucket' }
- let(:project_creator) { create(:user, username: 'project_creator', email: 'project_creator@example.org') }
- let(:password) { 'test' }
- let(:project) { create(:project, :repository, import_url: import_url, creator: project_creator) }
- let(:now) { Time.now.utc.change(usec: 0) }
- let(:project_key) { 'TEST' }
- let(:repo_slug) { 'rouge-repo' }
- let(:sample) { RepoHelpers.sample_compare }
-
- subject { described_class.new(project, recover_missing_commits: true) }
-
- before do
- data = project.create_or_update_import_data(
- data: { project_key: project_key, repo_slug: repo_slug },
- credentials: { base_uri: import_url, user: bitbucket_user, password: password }
- )
- data.save!
- project.save!
- end
-
- describe '#import_repository' do
- let(:repo_url) { 'http://bitbucket:test@my-bitbucket' }
-
- before do
- expect(project.repository).to receive(:import_repository).with(repo_url)
- end
-
- it 'adds a remote' do
- expect(subject).to receive(:import_pull_requests)
- expect(subject).to receive(:delete_temp_branches)
- expect(project.repository).to receive(:fetch_as_mirror)
- .with(repo_url,
- refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head'])
-
- subject.execute
- end
-
- it 'raises a Gitlab::Git::CommandError in the fetch' do
- expect(project.repository).to receive(:fetch_as_mirror).and_raise(::Gitlab::Git::CommandError)
-
- expect { subject.execute }.to raise_error(::Gitlab::Git::CommandError)
- end
-
- it 'raises an unhandled exception in the fetch' do
- expect(project.repository).to receive(:fetch_as_mirror).and_raise(RuntimeError)
-
- expect { subject.execute }.to raise_error(RuntimeError)
- end
- end
-
- describe '#import_pull_requests' do
- let(:pull_request_author) { create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org') }
- let(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
-
- let(:pull_request) do
- instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: 10,
- source_branch_sha: sample.commits.last,
- source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: sample.commits.first,
- target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
- title: 'This is a title',
- description: 'This is a test pull request',
- reviewers: [],
- state: 'merged',
- author: 'Test Author',
- author_email: pull_request_author.email,
- author_username: pull_request_author.username,
- created_at: Time.now,
- updated_at: Time.now,
- raw: {},
- merged?: true)
- end
-
- let(:merge_event) do
- instance_double(
- BitbucketServer::Representation::Activity,
- comment?: false,
- merge_event?: true,
- committer_email: pull_request_author.email,
- merge_timestamp: now,
- merge_commit: '12345678'
- )
- end
-
- let(:pr_note) do
- instance_double(
- BitbucketServer::Representation::Comment,
- note: 'Hello world',
- author_email: note_author.email,
- author_username: note_author.username,
- comments: [],
- created_at: now,
- updated_at: now,
- parent_comment: nil)
- end
-
- let(:pr_comment) do
- instance_double(
- BitbucketServer::Representation::Activity,
- comment?: true,
- inline_comment?: false,
- merge_event?: false,
- comment: pr_note)
- end
-
- before do
- allow(subject).to receive(:import_repository)
- allow(subject).to receive(:delete_temp_branches)
- allow(subject).to receive(:restore_branches)
-
- allow(subject.client).to receive(:pull_requests).and_return([pull_request], [])
- end
-
- # As we are using Caching with redis, it is best to clean the cache after each test run, else we need to wait for
- # the expiration by the importer
- after do
- Gitlab::Cache::Import::Caching.expire(subject.already_imported_cache_key, 0)
- end
-
- it 'imports merge event' do
- expect(subject.client).to receive(:activities).and_return([merge_event])
-
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.metrics.merged_by).to eq(pull_request_author)
- expect(merge_request.metrics.merged_at).to eq(merge_event.merge_timestamp)
- expect(merge_request.merge_commit_sha).to eq('12345678')
- expect(merge_request.state_id).to eq(3)
- end
-
- describe 'pull request author user mapping' do
- before do
- allow(subject.client).to receive(:activities).and_return([merge_event])
- end
-
- shared_examples 'imports pull requests' do
- it 'maps user' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.author).to eq(expected_author)
- end
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
- end
-
- context 'when email is not present' do
- before do
- allow(pull_request).to receive(:author_email).and_return(nil)
- end
-
- let(:expected_author) { project_creator }
-
- include_examples 'imports pull requests'
- end
-
- context 'when email is present' do
- before do
- allow(pull_request).to receive(:author_email).and_return(pull_request_author.email)
- end
-
- let(:expected_author) { pull_request_author }
-
- include_examples 'imports pull requests'
- end
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
- end
-
- context 'when username is not present' do
- before do
- allow(pull_request).to receive(:author_username).and_return(nil)
- end
-
- let(:expected_author) { project_creator }
-
- include_examples 'imports pull requests'
- end
-
- context 'when username is present' do
- before do
- allow(pull_request).to receive(:author_username).and_return(pull_request_author.username)
- end
-
- let(:expected_author) { pull_request_author }
-
- include_examples 'imports pull requests'
- end
- end
-
- context 'when user is not found' do
- before do
- allow(pull_request).to receive(:author_username).and_return(nil)
- allow(pull_request).to receive(:author_email).and_return(nil)
- end
-
- it 'maps importer user' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.author).to eq(project_creator)
- end
- end
- end
-
- describe 'comments' do
- shared_examples 'imports comments' do
- it 'imports comments' do
- expect(subject.client).to receive(:activities).and_return([pr_comment])
-
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(1)
- note = merge_request.notes.first
- expect(note.note).to end_with(pr_note.note)
- expect(note.author).to eq(note_author)
- expect(note.created_at).to eq(pr_note.created_at)
- expect(note.updated_at).to eq(pr_note.created_at)
- end
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
- end
-
- include_examples 'imports comments'
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
- end
-
- include_examples 'imports comments'
-
- context 'when username is not present' do
- before do
- allow(pr_note).to receive(:author_username).and_return(nil)
- allow(subject.client).to receive(:activities).and_return([pr_comment])
- end
-
- it 'defaults to import user' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(1)
- note = merge_request.notes.first
- expect(note.author).to eq(project_creator)
- end
- end
-
- context 'when username is present' do
- before do
- allow(pr_note).to receive(:author_username).and_return(note_author.username)
- allow(subject.client).to receive(:activities).and_return([pr_comment])
- end
-
- it 'maps by username' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(1)
- note = merge_request.notes.first
- expect(note.author).to eq(note_author)
- end
- end
- end
- end
-
- context 'metrics' do
- let(:histogram) { double(:histogram).as_null_object }
- let(:counter) { double('counter', increment: true) }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
- allow(Gitlab::Metrics).to receive(:histogram) { histogram }
- allow(subject.client).to receive(:activities).and_return([merge_event])
- end
-
- it 'counts and measures duration of imported projects' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_server_importer_imported_projects_total,
- 'The number of imported projects'
- )
-
- expect(Gitlab::Metrics).to receive(:histogram).with(
- :bitbucket_server_importer_total_duration_seconds,
- 'Total time spent importing projects, in seconds',
- {},
- Gitlab::Import::Metrics::IMPORT_DURATION_BUCKETS
- )
-
- expect(counter).to receive(:increment)
- expect(histogram).to receive(:observe).with({ importer: :bitbucket_server_importer }, anything)
-
- subject.execute
- end
-
- it 'counts imported pull requests' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_server_importer_imported_merge_requests_total,
- 'The number of imported merge (pull) requests'
- )
-
- expect(counter).to receive(:increment)
-
- subject.execute
- end
- end
-
- describe 'threaded discussions' do
- let(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
- let(:inline_note_author) { create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org') }
-
- let(:reply) do
- instance_double(
- BitbucketServer::Representation::PullRequestComment,
- author_email: reply_author.email,
- author_username: reply_author.username,
- note: 'I agree',
- created_at: now,
- updated_at: now)
- end
-
- # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
- let(:inline_note) do
- instance_double(
- BitbucketServer::Representation::PullRequestComment,
- file_type: 'ADDED',
- from_sha: sample.commits.first,
- to_sha: sample.commits.last,
- file_path: '.gitmodules',
- old_pos: nil,
- new_pos: 4,
- note: 'Hello world',
- author_email: inline_note_author.email,
- author_username: inline_note_author.username,
- comments: [reply],
- created_at: now,
- updated_at: now,
- parent_comment: nil)
- end
-
- let(:inline_comment) do
- instance_double(
- BitbucketServer::Representation::Activity,
- comment?: true,
- inline_comment?: true,
- merge_event?: false,
- comment: inline_note)
- end
-
- before do
- allow(reply).to receive(:parent_comment).and_return(inline_note)
- allow(subject.client).to receive(:activities).and_return([inline_comment])
- end
-
- shared_examples 'imports threaded discussions' do
- it 'imports threaded discussions' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(2)
- expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note.type).to eq('DiffNote')
- expect(start_note.note).to end_with(inline_note.note)
- expect(start_note.created_at).to eq(inline_note.created_at)
- expect(start_note.updated_at).to eq(inline_note.updated_at)
- expect(start_note.position.base_sha).to eq(inline_note.from_sha)
- expect(start_note.position.start_sha).to eq(inline_note.from_sha)
- expect(start_note.position.head_sha).to eq(inline_note.to_sha)
- expect(start_note.position.old_line).to be_nil
- expect(start_note.position.new_line).to eq(inline_note.new_pos)
- expect(start_note.author).to eq(inline_note_author)
-
- reply_note = notes.last
- # Make sure author and reply context is included
- expect(reply_note.note).to start_with("> #{inline_note.note}\n\n#{reply.note}")
- expect(reply_note.author).to eq(reply_author)
- expect(reply_note.created_at).to eq(reply.created_at)
- expect(reply_note.updated_at).to eq(reply.created_at)
- expect(reply_note.position.base_sha).to eq(inline_note.from_sha)
- expect(reply_note.position.start_sha).to eq(inline_note.from_sha)
- expect(reply_note.position.head_sha).to eq(inline_note.to_sha)
- expect(reply_note.position.old_line).to be_nil
- expect(reply_note.position.new_line).to eq(inline_note.new_pos)
- end
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
- end
-
- include_examples 'imports threaded discussions'
- end
-
- context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
- before do
- stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
- end
-
- include_examples 'imports threaded discussions' do
- context 'when username is not present' do
- before do
- allow(reply).to receive(:author_username).and_return(nil)
- allow(inline_note).to receive(:author_username).and_return(nil)
- end
-
- it 'defaults to import user' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- notes = MergeRequest.first.notes.order(:id).to_a
-
- expect(notes.first.author).to eq(project_creator)
- expect(notes.last.author).to eq(project_creator)
- end
- end
- end
- end
-
- context 'when user is not found' do
- before do
- allow(reply).to receive(:author_username).and_return(nil)
- allow(reply).to receive(:author_email).and_return(nil)
- allow(inline_note).to receive(:author_username).and_return(nil)
- allow(inline_note).to receive(:author_email).and_return(nil)
- end
-
- it 'maps importer user' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- notes = MergeRequest.first.notes.order(:id).to_a
-
- expect(notes.first.author).to eq(project_creator)
- expect(notes.last.author).to eq(project_creator)
- end
- end
- end
-
- it 'falls back to comments if diff comments fail to validate' do
- reply = instance_double(
- BitbucketServer::Representation::Comment,
- author_email: 'someuser@gitlab.com',
- author_username: 'Aquaman',
- note: 'I agree',
- created_at: now,
- updated_at: now)
-
- # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
- inline_note = instance_double(
- BitbucketServer::Representation::PullRequestComment,
- file_type: 'REMOVED',
- from_sha: sample.commits.first,
- to_sha: sample.commits.last,
- file_path: '.gitmodules',
- old_pos: 8,
- new_pos: 9,
- note: 'This is a note with an invalid line position.',
- author_email: project.owner.email,
- author_username: 'Owner',
- comments: [reply],
- created_at: now,
- updated_at: now,
- parent_comment: nil)
-
- inline_comment = instance_double(
- BitbucketServer::Representation::Activity,
- comment?: true,
- inline_comment?: true,
- merge_event?: false,
- comment: inline_note)
-
- allow(reply).to receive(:parent_comment).and_return(inline_note)
-
- expect(subject.client).to receive(:activities).and_return([inline_comment])
-
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(2)
- notes = merge_request.notes
-
- expect(notes.first.note).to start_with('*Comment on .gitmodules')
- expect(notes.second.note).to start_with('*Comment on .gitmodules')
- end
-
- it 'reports an error if an exception is raised' do
- allow(subject).to receive(:import_bitbucket_pull_request).and_raise(RuntimeError)
- expect(Gitlab::ErrorTracking).to receive(:log_exception)
-
- subject.execute
- end
-
- describe 'import pull requests with caching' do
- let(:pull_request_already_imported) do
- instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: 11)
- end
-
- let(:pull_request_to_be_imported) do
- instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: 12,
- source_branch_sha: sample.commits.last,
- source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: sample.commits.first,
- target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
- title: 'This is a title',
- description: 'This is a test pull request',
- reviewers: sample.reviewers,
- state: 'merged',
- author: 'Test Author',
- author_email: pull_request_author.email,
- author_username: pull_request_author.username,
- created_at: Time.now,
- updated_at: Time.now,
- raw: {},
- merged?: true)
- end
-
- before do
- Gitlab::Cache::Import::Caching.set_add(subject.already_imported_cache_key, pull_request_already_imported.iid)
- allow(subject.client).to receive(:pull_requests).and_return([pull_request_to_be_imported, pull_request_already_imported], [])
- end
-
- it 'only imports one Merge Request, as the other on is in the cache' do
- expect(subject.client).to receive(:activities).and_return([merge_event])
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_already_imported.iid)).to eq(true)
- expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_to_be_imported.iid)).to eq(true)
- end
- end
- end
-
- describe 'inaccessible branches' do
- let(:id) { 10 }
- let(:temp_branch_from) { "gitlab/import/pull-request/#{id}/from" }
- let(:temp_branch_to) { "gitlab/import/pull-request/#{id}/to" }
-
- before do
- pull_request = instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: id,
- source_branch_sha: '12345678',
- source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: '98765432',
- target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
- title: 'This is a title',
- description: 'This is a test pull request',
- reviewers: [],
- state: 'merged',
- author: 'Test Author',
- author_email: project.owner.email,
- author_username: 'author',
- created_at: Time.now,
- updated_at: Time.now,
- merged?: true)
-
- expect(subject.client).to receive(:pull_requests).and_return([pull_request], [])
- expect(subject.client).to receive(:activities).and_return([])
- expect(subject).to receive(:import_repository).twice
- end
-
- it '#restore_branches' do
- expect(subject).to receive(:restore_branches).and_call_original
- expect(subject).to receive(:delete_temp_branches)
- expect(subject.client).to receive(:create_branch)
- .with(project_key, repo_slug,
- temp_branch_from,
- '12345678')
- expect(subject.client).to receive(:create_branch)
- .with(project_key, repo_slug,
- temp_branch_to,
- '98765432')
-
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
- end
-
- it '#delete_temp_branches' do
- expect(subject.client).to receive(:create_branch).twice
- expect(subject).to receive(:delete_temp_branches).and_call_original
- expect(subject.client).to receive(:delete_branch)
- .with(project_key, repo_slug,
- temp_branch_from,
- '12345678')
- expect(subject.client).to receive(:delete_branch)
- .with(project_key, repo_slug,
- temp_branch_to,
- '98765432')
- expect(project.repository).to receive(:delete_branch).with(temp_branch_from)
- expect(project.repository).to receive(:delete_branch).with(temp_branch_to)
-
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
- end
- end
-
- context "lfs files" do
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- allow(subject).to receive(:import_repository)
- allow(subject).to receive(:import_pull_requests)
- end
-
- it "downloads lfs objects if lfs_enabled is enabled for project" do
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service|
- expect(lfs_import_service).to receive(:execute).and_return(status: :success)
- end
-
- subject.execute
- end
-
- it "adds the error message when the lfs download fails" do
- allow_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service|
- expect(lfs_import_service).to receive(:execute).and_return(status: :error, message: "LFS server not reachable")
- end
-
- subject.execute
-
- expect(project.import_state.reload.last_error).to eq(Gitlab::Json.dump({
- message: "The remote data could not be fully imported.",
- errors: [{
- type: "lfs_objects",
- errors: "The Lfs import process failed. LFS server not reachable"
- }]
- }))
- end
- end
-end
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
index c65a1e4d656..5397aea90a9 100644
--- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -31,33 +31,40 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do
expect(matcher.match?).to be false
end
- context 'with load balancing enabled' do
+ context 'with load balancing enabled', :redis do
let(:session) { ::Gitlab::Database::LoadBalancing::Session.current }
- let(:all_caught_up) { true }
before do
+ # Need to mock as though we actually have replicas
+ allow(::ApplicationRecord.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
+
+ # Put some sticking position for the primary in Redis
+ ::ApplicationRecord.sticking.stick(:project, project.id)
+
Gitlab::Database::LoadBalancing::Session.clear_session
- allow(::ApplicationRecord.sticking)
- .to receive(:all_caught_up?)
- .and_return(all_caught_up)
+ # Mock the load balancer result since we don't actually have real replicas to match against
+ expect(::ApplicationRecord.load_balancer)
+ .to receive(:select_up_to_date_host)
+ .and_return(load_balancer_result)
+ # Expect sticking called with correct arguments but don't mock it so that we can also test the internal
+ # behaviour of updating the Session.use_primary?
expect(::ApplicationRecord.sticking)
- .to receive(:select_valid_host)
- .with(:project, project.id)
+ .to receive(:find_caught_up_replica)
+ .with(:project, project.id, use_primary_on_empty_location: true)
.and_call_original
-
- allow(::ApplicationRecord.sticking)
- .to receive(:select_caught_up_replicas)
- .with(:project, project.id)
- .and_return(all_caught_up)
end
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
- shared_examples 'secondary that has caught up to a primary' do
+ context 'when any secondary is caught up' do
+ let(:load_balancer_result) { ::Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP }
+
it 'continues to use the secondary' do
expect(session.use_primary?).to be false
expect(subject.match?).to be true
@@ -70,7 +77,9 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do
end
end
- shared_examples 'secondary that is lagging primary' do
+ context 'when all secondaries are lagging behind' do
+ let(:load_balancer_result) { ::Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP }
+
it 'sticks to the primary' do
expect(subject.match?).to be true
expect(session.use_primary?).to be true
@@ -82,14 +91,6 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do
.and change { stale_counter.get }.by(1)
end
end
-
- it_behaves_like 'secondary that has caught up to a primary'
-
- context 'on secondary behind primary' do
- let(:all_caught_up) { false }
-
- it_behaves_like 'secondary that is lagging primary'
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb b/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb
index efe99cd276c..1f3ba0ef76e 100644
--- a/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb
+++ b/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata do
+RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata, feature_category: :build_artifacts do
def metadata(path = '', **opts)
described_class.new(metadata_file_stream, path, **opts)
end
@@ -19,132 +19,158 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata do
metadata_file_stream&.close
end
- context 'metadata file exists' do
- describe '#find_entries! empty string' do
- subject { metadata('').find_entries! }
+ describe '#to_entry' do
+ subject(:entry) { metadata.to_entry }
- it 'matches correct paths' do
- expect(subject.keys).to contain_exactly 'ci_artifacts.txt',
- 'other_artifacts_0.1.2/',
- 'rails_sample.jpg',
- 'tests_encoding/'
- end
-
- it 'matches metadata for every path' do
- expect(subject.keys.count).to eq 4
- end
+ it { is_expected.to be_an_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) }
- it 'return Hashes for each metadata' do
- expect(subject.values).to all(be_kind_of(Hash))
+ context 'when given path starts with a ./ prefix' do
+ it 'instantiates the entry without the ./ prefix from the path' do
+ meta = metadata("./some/path")
+ expect(Gitlab::Ci::Build::Artifacts::Metadata::Entry).to receive(:new).with("some/path", {})
+ meta.to_entry
end
end
+ end
- describe '#find_entries! other_artifacts_0.1.2/' do
- subject { metadata('other_artifacts_0.1.2/').find_entries! }
+ describe '#full_version' do
+ subject { metadata.full_version }
- it 'matches correct paths' do
- expect(subject.keys)
- .to contain_exactly 'other_artifacts_0.1.2/',
- 'other_artifacts_0.1.2/doc_sample.txt',
- 'other_artifacts_0.1.2/another-subdirectory/'
- end
- end
+ it { is_expected.to eq 'GitLab Build Artifacts Metadata 0.0.1' }
+ end
- describe '#find_entries! other_artifacts_0.1.2/another-subdirectory/' do
- subject { metadata('other_artifacts_0.1.2/another-subdirectory/').find_entries! }
+ describe '#version' do
+ subject { metadata.version }
- it 'matches correct paths' do
- expect(subject.keys)
- .to contain_exactly 'other_artifacts_0.1.2/another-subdirectory/',
- 'other_artifacts_0.1.2/another-subdirectory/empty_directory/',
- 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif'
- end
- end
+ it { is_expected.to eq '0.0.1' }
+ end
- describe '#find_entries! recursively for other_artifacts_0.1.2/' do
- subject { metadata('other_artifacts_0.1.2/', recursive: true).find_entries! }
+ describe '#errors' do
+ subject { metadata.errors }
- it 'matches correct paths' do
- expect(subject.keys)
- .to contain_exactly 'other_artifacts_0.1.2/',
- 'other_artifacts_0.1.2/doc_sample.txt',
- 'other_artifacts_0.1.2/another-subdirectory/',
- 'other_artifacts_0.1.2/another-subdirectory/empty_directory/',
- 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif'
- end
- end
+ it { is_expected.to eq({}) }
+ end
- describe '#to_entry' do
- subject { metadata('').to_entry }
+ describe '#find_entries!' do
+ let(:recursive) { false }
- it { is_expected.to be_an_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) }
- end
+ subject(:find_entries) { metadata(path, recursive: recursive).find_entries! }
- describe '#full_version' do
- subject { metadata('').full_version }
+ context 'when metadata file exists' do
+ context 'and given path is an empty string' do
+ let(:path) { '' }
- it { is_expected.to eq 'GitLab Build Artifacts Metadata 0.0.1' }
- end
+ it 'returns paths to all files and directories at the root level' do
+ expect(find_entries.keys).to contain_exactly(
+ 'ci_artifacts.txt',
+ 'other_artifacts_0.1.2/',
+ 'rails_sample.jpg',
+ 'tests_encoding/'
+ )
+ end
- describe '#version' do
- subject { metadata('').version }
+ it 'return Hashes for each metadata' do
+ expect(find_entries.values).to all(be_kind_of(Hash))
+ end
+ end
- it { is_expected.to eq '0.0.1' }
- end
+ shared_examples 'finding entries for a given path' do |options|
+ let(:path) { "#{options[:path_prefix]}#{target_path}" }
+
+ context 'when given path targets a directory at the root level' do
+ let(:target_path) { 'other_artifacts_0.1.2/' }
+
+ it 'returns paths to all files and directories at the first level of the directory' do
+ expect(find_entries.keys).to contain_exactly(
+ 'other_artifacts_0.1.2/',
+ 'other_artifacts_0.1.2/doc_sample.txt',
+ 'other_artifacts_0.1.2/another-subdirectory/'
+ )
+ end
+ end
+
+ context 'when given path targets a sub-directory' do
+ let(:target_path) { 'other_artifacts_0.1.2/another-subdirectory/' }
+
+ it 'returns paths to all files and directories at the first level of the sub-directory' do
+ expect(find_entries.keys).to contain_exactly(
+ 'other_artifacts_0.1.2/another-subdirectory/',
+ 'other_artifacts_0.1.2/another-subdirectory/empty_directory/',
+ 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif'
+ )
+ end
+ end
+
+ context 'when given path targets a directory recursively' do
+ let(:target_path) { 'other_artifacts_0.1.2/' }
+ let(:recursive) { true }
+
+ it 'returns all paths recursively within the target directory' do
+ expect(subject.keys).to contain_exactly(
+ 'other_artifacts_0.1.2/',
+ 'other_artifacts_0.1.2/doc_sample.txt',
+ 'other_artifacts_0.1.2/another-subdirectory/',
+ 'other_artifacts_0.1.2/another-subdirectory/empty_directory/',
+ 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif'
+ )
+ end
+ end
+ end
- describe '#errors' do
- subject { metadata('').errors }
+ context 'and given path does not start with a ./ prefix' do
+ it_behaves_like 'finding entries for a given path', path_prefix: ''
+ end
- it { is_expected.to eq({}) }
+ context 'and given path starts with a ./ prefix' do
+ it_behaves_like 'finding entries for a given path', path_prefix: './'
+ end
end
- end
- context 'metadata file does not exist' do
- let(:metadata_file_path) { nil }
+ context 'when metadata file stream is nil' do
+ let(:path) { '' }
+ let(:metadata_file_stream) { nil }
- describe '#find_entries!' do
it 'raises error' do
- expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /Invalid stream/)
+ expect { find_entries }.to raise_error(described_class::InvalidStreamError, /Invalid stream/)
end
end
- end
- context 'metadata file is invalid' do
- let(:metadata_file_path) { Rails.root + 'spec/fixtures/ci_build_artifacts.zip' }
+ context 'when metadata file is invalid' do
+ let(:path) { '' }
+ let(:metadata_file_path) { Rails.root + 'spec/fixtures/ci_build_artifacts.zip' }
- describe '#find_entries!' do
it 'raises error' do
- expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /not in gzip format/)
+ expect { find_entries }.to raise_error(described_class::InvalidStreamError, /not in gzip format/)
end
end
- end
- context 'generated metadata' do
- let(:tmpfile) { Tempfile.new('test-metadata') }
- let(:generator) { CiArtifactMetadataGenerator.new(tmpfile) }
- let(:entry_count) { 5 }
+ context 'with generated metadata' do
+ let(:tmpfile) { Tempfile.new('test-metadata') }
+ let(:generator) { CiArtifactMetadataGenerator.new(tmpfile) }
+ let(:entry_count) { 5 }
- before do
- tmpfile.binmode
+ before do
+ tmpfile.binmode
- (1..entry_count).each do |index|
- generator.add_entry("public/test-#{index}.txt")
- end
+ (1..entry_count).each do |index|
+ generator.add_entry("public/test-#{index}.txt")
+ end
- generator.write
- end
+ generator.write
+ end
- after do
- File.unlink(tmpfile.path)
- end
+ after do
+ File.unlink(tmpfile.path)
+ end
- describe '#find_entries!' do
- it 'reads expected number of entries' do
- stream = File.open(tmpfile.path)
+ describe '#find_entries!' do
+ it 'reads expected number of entries' do
+ stream = File.open(tmpfile.path)
- metadata = described_class.new(stream, 'public', recursive: true)
+ metadata = described_class.new(stream, 'public', recursive: true)
- expect(metadata.find_entries!.count).to eq entry_count
+ expect(metadata.find_entries!.count).to eq entry_count
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/build/duration_parser_spec.rb b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
index 7f5ff1eb0ee..bc905aa0a35 100644
--- a/spec/lib/gitlab/ci/build/duration_parser_spec.rb
+++ b/spec/lib/gitlab/ci/build/duration_parser_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do
it { is_expected.to be_truthy }
it 'caches data' do
- expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original
- expect(ChronicDuration).to receive(:parse).with(other_value).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(other_value, use_complete_matcher: true).once.and_call_original
2.times do
expect(described_class.validate_duration(value)).to eq(86400)
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do
it { is_expected.to be_falsy }
it 'caches data' do
- expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original
+ expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original
2.times do
expect(described_class.validate_duration(value)).to be_falsey
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index f4bc706f9b4..97843781891 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -14,125 +14,214 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
describe 'FQDN path' do
- let_it_be(:existing_project) { create(:project, :repository) }
-
- let(:project_path) { existing_project.full_path }
- let(:address) { "acme.com/#{project_path}/component@#{version}" }
let(:version) { 'master' }
+ let(:project_path) { project.full_path }
+ let(:address) { "acme.com/#{project_path}/secret-detection@#{version}" }
+
+ context 'when the project repository contains a templates directory' do
+ let_it_be(:project) do
+ create(
+ :project, :custom_repo,
+ files: {
+ 'templates/secret-detection.yml' => 'image: alpine_1',
+ 'templates/dast/template.yml' => 'image: alpine_2',
+ 'templates/dast/another-template.yml' => 'image: alpine_3',
+ 'templates/dast/another-folder/template.yml' => 'image: alpine_4'
+ }
+ )
+ end
- context 'when project exists' do
- it 'provides the expected attributes', :aggregate_failures do
- expect(path.project).to eq(existing_project)
- expect(path.host).to eq(current_host)
- expect(path.sha).to eq(existing_project.commit('master').id)
- expect(path.project_file_path).to eq('component/template.yml')
+ before do
+ project.add_developer(user)
end
- context 'when content exists' do
- let(:content) { 'image: alpine' }
+ context 'when user does not have permissions' do
+ it 'raises an error when fetching the content' do
+ expect { path.fetch_content!(current_user: build(:user)) }
+ .to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
- before do
- allow_next_instance_of(Repository) do |instance|
- allow(instance)
- .to receive(:blob_data_at)
- .with(existing_project.commit('master').id, 'component/template.yml')
- .and_return(content)
- end
+ context 'when the component is simple (single file template)' do
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('templates/secret-detection.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
end
+ end
- context 'when user has permissions to read code' do
- before do
- existing_project.add_developer(user)
- end
+ context 'when the component is complex (directory-based template)' do
+ let(:address) { "acme.com/#{project_path}/dast@#{version}" }
+
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('templates/dast/template.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
- it 'fetches the content' do
- expect(path.fetch_content!(current_user: user)).to eq(content)
+ context 'when there is an invalid nested component folder' do
+ let(:address) { "acme.com/#{project_path}/dast/another-folder@#{version}" }
+
+ it 'returns nil' do
+ expect(path.fetch_content!(current_user: user)).to be_nil
end
end
- context 'when user does not have permissions to download code' do
- it 'raises an error when fetching the content' do
- expect { path.fetch_content!(current_user: user) }
- .to raise_error(Gitlab::Access::AccessDeniedError)
+ context 'when there is an invalid nested component path' do
+ let(:address) { "acme.com/#{project_path}/dast/another-template@#{version}" }
+
+ it 'returns nil' do
+ expect(path.fetch_content!(current_user: user)).to be_nil
end
end
end
- end
- context 'when project path is nested under a subgroup' do
- let(:existing_group) { create(:group, :nested) }
- let(:existing_project) { create(:project, :repository, group: existing_group) }
+ context 'when fetching the latest version of a component' do
+ let_it_be(:project) do
+ create(
+ :project, :custom_repo,
+ files: {
+ 'templates/secret-detection.yml' => 'image: alpine_1'
+ }
+ )
+ end
- it 'provides the expected attributes', :aggregate_failures do
- expect(path.project).to eq(existing_project)
- expect(path.host).to eq(current_host)
- expect(path.sha).to eq(existing_project.commit('master').id)
- expect(path.project_file_path).to eq('component/template.yml')
- end
- end
+ let(:version) { '~latest' }
- context 'when current GitLab instance is installed on a relative URL' do
- let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" }
- let(:current_host) { 'acme.com/gitlab/' }
+ let(:latest_sha) do
+ project.repository.commit('master').id
+ end
- it 'provides the expected attributes', :aggregate_failures do
- expect(path.project).to eq(existing_project)
- expect(path.host).to eq(current_host)
- expect(path.sha).to eq(existing_project.commit('master').id)
- expect(path.project_file_path).to eq('component/template.yml')
+ before do
+ create(:release, project: project, sha: project.repository.root_ref_sha,
+ released_at: Time.zone.now - 1.day)
+
+ project.repository.update_file(
+ user, 'templates/secret-detection.yml', 'image: alpine_2',
+ message: 'Updates image', branch_name: project.default_branch
+ )
+
+ create(:release, project: project, sha: latest_sha,
+ released_at: Time.zone.now)
+ end
+
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('templates/secret-detection.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(latest_sha)
+ end
end
- end
- context 'when version does not exist' do
- let(:version) { 'non-existent' }
+ context 'when version does not exist' do
+ let(:version) { 'non-existent' }
- it 'provides the expected attributes', :aggregate_failures do
- expect(path.project).to eq(existing_project)
- expect(path.host).to eq(current_host)
- expect(path.sha).to be_nil
- expect(path.project_file_path).to eq('component/template.yml')
+ it 'returns nil', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to be_nil
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to be_nil
+ expect(path.project).to eq(project)
+ expect(path.sha).to be_nil
+ end
end
- it 'returns nil when fetching the content' do
- expect(path.fetch_content!(current_user: user)).to be_nil
+ context 'when current GitLab instance is installed on a relative URL' do
+ let(:address) { "acme.com/gitlab/#{project_path}/secret-detection@#{version}" }
+ let(:current_host) { 'acme.com/gitlab/' }
+
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('templates/secret-detection.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
end
end
- context 'when version is `~latest`' do
- let(:version) { '~latest' }
+ # All the following tests are for deprecated code and will be removed
+ # in https://gitlab.com/gitlab-org/gitlab/-/issues/415855
+ context 'when the project does not contain a templates directory' do
+ let(:project_path) { project.full_path }
+ let(:address) { "acme.com/#{project_path}/component@#{version}" }
+
+ let_it_be(:project) do
+ create(
+ :project, :custom_repo,
+ files: {
+ 'component/template.yml' => 'image: alpine'
+ }
+ )
+ end
+
+ before do
+ project.add_developer(user)
+ end
- context 'when project has releases' do
- let_it_be(:latest_release) do
- create(:release, project: existing_project, sha: 'sha-1', released_at: Time.zone.now)
- end
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('component/template.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
- before_all do
- # Previous release
- create(:release, project: existing_project, sha: 'sha-2', released_at: Time.zone.now - 1.day)
+ context 'when project path is nested under a subgroup' do
+ let_it_be(:group) { create(:group, :nested) }
+ let_it_be(:project) do
+ create(
+ :project, :custom_repo,
+ files: {
+ 'component/template.yml' => 'image: alpine'
+ },
+ group: group
+ )
end
- it 'returns the sha of the latest release' do
- expect(path.sha).to eq(latest_release.sha)
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('component/template.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
end
end
- context 'when project does not have releases' do
- it { expect(path.sha).to be_nil }
+ context 'when current GitLab instance is installed on a relative URL' do
+ let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" }
+ let(:current_host) { 'acme.com/gitlab/' }
+
+ it 'fetches the component content', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to eq('image: alpine')
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to eq('component/template.yml')
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
end
- end
- context 'when project does not exist' do
- let(:project_path) { 'non-existent/project' }
+ context 'when version does not exist' do
+ let(:version) { 'non-existent' }
- it 'provides the expected attributes', :aggregate_failures do
- expect(path.project).to be_nil
- expect(path.host).to eq(current_host)
- expect(path.sha).to be_nil
- expect(path.project_file_path).to be_nil
+ it 'returns nil', :aggregate_failures do
+ expect(path.fetch_content!(current_user: user)).to be_nil
+ expect(path.host).to eq(current_host)
+ expect(path.project_file_path).to be_nil
+ expect(path.project).to eq(project)
+ expect(path.sha).to be_nil
+ end
end
- it 'returns nil when fetching the content' do
- expect(path.fetch_content!(current_user: user)).to be_nil
+ context 'when user does not have permissions' do
+ it 'raises an error when fetching the content' do
+ expect { path.fetch_content!(current_user: build(:user)) }
+ .to raise_error(Gitlab::Access::AccessDeniedError)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 736c184a289..567ffa68836 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
# as they do not have sense in context of Bridge
let(:ignored_inheritable_columns) do
%i[before_script after_script hooks image services cache interruptible timeout
- retry tags artifacts]
+ retry tags artifacts id_tokens]
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/default_spec.rb b/spec/lib/gitlab/ci/config/entry/default_spec.rb
index 46e96843ee3..17e716629cd 100644
--- a/spec/lib/gitlab/ci/config/entry/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/default_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Default do
it 'contains the expected node names' do
expect(described_class.nodes.keys)
.to match_array(%i[before_script after_script hooks cache image services
- interruptible timeout retry tags artifacts])
+ interruptible timeout retry tags artifacts id_tokens])
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index dd15b049b9b..cd8e35ede61 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed
+require 'fast_spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
@@ -14,21 +14,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category
entry.compose!
end
- shared_examples 'a valid config' do
+ shared_examples 'a valid config' do |expected_value = nil|
it { is_expected.to be_valid }
it 'returns the expected value' do
- expect(entry.value).to eq(config.compact)
- end
-
- context 'when FF `ci_refactor_external_rules` is disabled' do
- before do
- stub_feature_flags(ci_refactor_external_rules: false)
- end
-
- it 'returns the expected value' do
- expect(entry.value).to eq(config)
- end
+ expect(entry.value).to eq(expected_value || config.compact)
end
end
@@ -99,19 +89,37 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category
it_behaves_like 'a valid config'
- context 'when array' do
+ context 'when exists: clause is an array' do
let(:config) { { exists: ['./this.md', './that.md'] } }
it_behaves_like 'a valid config'
end
- context 'when null' do
+ context 'when exists: clause is null' do
let(:config) { { exists: nil } }
it_behaves_like 'a valid config'
end
end
+ context 'when specifying a changes: clause' do
+ let(:config) { { changes: %w[Dockerfile lib/* paths/**/*.rb] } }
+
+ it_behaves_like 'a valid config', { changes: { paths: %w[Dockerfile lib/* paths/**/*.rb] } }
+
+ context 'with paths:' do
+ let(:config) { { changes: { paths: %w[Dockerfile lib/* paths/**/*.rb] } } }
+
+ it_behaves_like 'a valid config'
+ end
+
+ context 'with paths: and compare_to:' do
+ let(:config) { { changes: { paths: ['Dockerfile'], compare_to: 'branch1' } } }
+
+ it_behaves_like 'a valid config'
+ end
+ end
+
context 'when specifying an unknown keyword' do
let(:config) { { invalid: :something } }
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
index 05db81abfc1..503020e2202 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed
+require 'fast_spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pipeline_composition do
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip
entry.compose!
end
- it_behaves_like 'an invalid config', /contains unknown keys: changes/
+ it_behaves_like 'a valid config'
end
end
@@ -80,7 +80,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip
let(:config) do
[
{ if: '$THIS == "that"' },
- { if: '$SKIP', when: 'never' }
+ { if: '$SKIP', when: 'never' },
+ { changes: ['Dockerfile'] }
]
end
@@ -96,7 +97,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip
is_expected.to eq(
[
{ if: '$THIS == "that"' },
- { if: '$SKIP', when: 'never' }
+ { if: '$SKIP', when: 'never' },
+ { changes: { paths: ['Dockerfile'] } }
]
)
end
@@ -115,30 +117,5 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip
end
end
end
-
- context 'when FF `ci_refactor_external_rules` is disabled' do
- before do
- stub_feature_flags(ci_refactor_external_rules: false)
- end
-
- context 'with an "if"' do
- let(:config) do
- [{ if: '$THIS == "that"' }]
- end
-
- it { is_expected.to eq(config) }
- end
-
- context 'with a list of two rules' do
- let(:config) do
- [
- { if: '$THIS == "that"' },
- { if: '$SKIP' }
- ]
- end
-
- it { is_expected.to eq(config) }
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 4be7c11fab0..1a78d929871 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_composition do
+ using RSpec::Parameterized::TableSyntax
+
let(:entry) { described_class.new(config, name: :rspec) }
it_behaves_like 'with inheritable CI config' do
@@ -29,7 +31,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
let(:result) do
%i[before_script script after_script hooks stage cache
image services only except rules needs variables artifacts
- environment coverage retry interruptible timeout release tags
+ coverage retry interruptible timeout release tags
inherit parallel]
end
@@ -696,8 +698,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
end
context 'with workflow rules' do
- using RSpec::Parameterized::TableSyntax
-
where(:name, :has_workflow_rules?, :only, :rules, :result) do
"uses default only" | false | nil | nil | { refs: %w[branches tags] }
"uses user only" | false | %w[branches] | nil | { refs: %w[branches] }
@@ -739,6 +739,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
end
end
+ describe '#pages_job?', :aggregate_failures, feature_category: :pages do
+ where(:name, :result) do
+ :pages | true
+ :'pages:staging' | false
+ :'something:pages:else' | false
+ end
+
+ with_them do
+ subject { described_class.new({}, name: name).pages_job? }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
context 'when composed' do
before do
entry.compose!
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 4f13940d7e2..132e75a808b 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -371,6 +371,39 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli
end
end
+ context 'with environment' do
+ context 'when environment name is specified' do
+ let(:config) { { script: 'ls', environment: 'prod' }.compact }
+
+ it 'sets environment name and action to the entry value' do
+ entry.compose!(deps)
+
+ expect(entry.value[:environment]).to eq({ action: 'start', name: 'prod' })
+ expect(entry.value[:environment_name]).to eq('prod')
+ end
+ end
+
+ context 'when environment name, url and action are specified' do
+ let(:config) do
+ {
+ script: 'ls',
+ environment: {
+ name: 'staging',
+ url: 'https://gitlab.com',
+ action: 'prepare'
+ }
+ }.compact
+ end
+
+ it 'sets environment name, action and url to the entry value' do
+ entry.compose!(deps)
+
+ expect(entry.value[:environment]).to eq({ action: 'prepare', name: 'staging', url: 'https://gitlab.com' })
+ expect(entry.value[:environment_name]).to eq('staging')
+ end
+ end
+ end
+
context 'with inheritance' do
context 'of default:tags' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index d8bd578be94..9ac72ebbac8 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipeline_composition do
let(:project) { build(:project) }
+ let(:pipeline) { double('Pipeline') }
let(:user) { double('User') }
let(:sha) { '12345' }
let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'a', 'value' => 'b' }]) }
@@ -11,6 +12,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
let(:attributes) do
{
project: project,
+ pipeline: pipeline,
user: user,
sha: sha,
variables: variables,
@@ -32,7 +34,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
end
context 'without values' do
- let(:attributes) { { project: nil, user: nil, sha: nil } }
+ let(:attributes) { { project: nil, pipeline: nil, user: nil, sha: nil } }
it { is_expected.to have_attributes(**attributes) }
it { expect(subject.expandset).to eq([]) }
@@ -148,6 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
let(:attributes) do
{
project: project,
+ pipeline: pipeline,
user: user,
sha: sha,
logger: double('logger')
@@ -165,6 +168,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
it { expect(mutated).not_to eq(subject) }
it { expect(mutated).to be_a(described_class) }
it { expect(mutated).to have_attributes(new_attributes) }
+ it { expect(mutated.pipeline).to eq(subject.pipeline) }
it { expect(mutated.expandset).to eq(subject.expandset) }
it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
it { expect(mutated.logger).to eq(mutated.logger) }
diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
index 487690296b5..0f7b811b5df 100644
--- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
@@ -120,6 +120,41 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
end
end
+ describe '#content' do
+ context 'when component is valid' do
+ let(:content) do
+ <<~COMPONENT
+ job:
+ script: echo
+ COMPONENT
+ end
+
+ let(:response) do
+ ServiceResponse.success(payload: {
+ content: content,
+ path: instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345')
+ })
+ end
+
+ it 'tracks the event' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with('cicd_component_usage',
+ values: external_resource.context.user.id)
+
+ external_resource.content
+ end
+ end
+
+ context 'when component is invalid' do
+ let(:content) { 'the-content' }
+
+ it 'does not track the event' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ external_resource.content
+ end
+ end
+ end
+
describe '#metadata' do
subject(:metadata) { external_resource.metadata }
diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
index 69b0524be9e..f542c0485e0 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
@@ -409,32 +409,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
expect { process }.to raise_error(expected_error_class)
end
end
-
- context 'when introduce_ci_max_total_yaml_size_bytes is disabled' do
- before do
- stub_feature_flags(introduce_ci_max_total_yaml_size_bytes: false)
- end
-
- context 'when pipeline tree size is within the limit' do
- before do
- stub_application_setting(ci_max_total_yaml_size_bytes: 10000)
- end
-
- it 'passes the verification' do
- expect(process.all?(&:valid?)).to be_truthy
- end
- end
-
- context 'when pipeline tree size is larger then the limit' do
- before do
- stub_application_setting(ci_max_total_yaml_size_bytes: 100)
- end
-
- it 'passes the verification' do
- expect(process.all?(&:valid?)).to be_truthy
- end
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 19113ce6a4e..68cdf56f198 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -557,21 +557,11 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
context 'when rules defined' do
context 'when a rule is invalid' do
let(:values) do
- { include: [{ local: 'builds.yml', rules: [{ changes: ['$MY_VAR'] }] }] }
+ { include: [{ local: 'builds.yml', rules: [{ allow_failure: ['$MY_VAR'] }] }] }
end
it 'raises IncludeError' do
- expect { subject }.to raise_error(described_class::IncludeError, /contains unknown keys: changes/)
- end
-
- context 'when FF `ci_refactor_external_rules` is disabled' do
- before do
- stub_feature_flags(ci_refactor_external_rules: false)
- end
-
- it 'raises IncludeError' do
- expect { subject }.to raise_error(described_class::IncludeError, /invalid include rule/)
- end
+ expect { subject }.to raise_error(described_class::IncludeError, /contains unknown keys: allow_failure/)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index 8674af7ab65..15d7801ff2a 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -4,76 +4,45 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_composition do
let(:context) { double(variables_hash: {}) }
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
+ let(:rule_hashes) {}
+ let(:pipeline) { instance_double(Ci::Pipeline) }
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'file.txt' => 'file' }) }
subject(:rules) { described_class.new(rule_hashes) }
+ before do
+ allow(context).to receive(:project).and_return(project)
+ allow(context).to receive(:pipeline).and_return(pipeline)
+ end
+
describe '#evaluate' do
subject(:result) { rules.evaluate(context).pass? }
context 'when there is no rule' do
- let(:rule_hashes) {}
-
it { is_expected.to eq(true) }
end
- shared_examples 'when there is a rule with if' do |rule_matched_result = true, rule_not_matched_result = false|
- context 'when the rule matches' do
- let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) }
-
- it { is_expected.to eq(rule_matched_result) }
- end
-
- context 'when the rule does not match' do
- let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) }
-
- it { is_expected.to eq(rule_not_matched_result) }
- end
- end
-
- shared_examples 'when there is a rule with exists' do |file_exists_result = true, file_not_exists_result = false|
- let(:project) { create(:project, :repository) }
-
- context 'when the file exists' do
- let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['Dockerfile']) }
-
+ shared_examples 'with when: specified' do
+ context 'with when: never' do
before do
- project.repository.create_file(project.first_owner, 'Dockerfile', "commit", message: 'test', branch_name: "master")
+ rule_hashes.first[:when] = 'never'
end
- it { is_expected.to eq(file_exists_result) }
- end
-
- context 'when the file does not exist' do
- let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) }
-
- it { is_expected.to eq(file_not_exists_result) }
- end
- end
-
- it_behaves_like 'when there is a rule with if'
-
- context 'when there is a rule with exists' do
- let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
-
- it_behaves_like 'when there is a rule with exists'
- end
-
- context 'when there is a rule with if and when' do
- context 'with when: never' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] }
-
- it_behaves_like 'when there is a rule with if', false, false
+ it { is_expected.to eq(false) }
end
context 'with when: always' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] }
+ before do
+ rule_hashes.first[:when] = 'always'
+ end
- it_behaves_like 'when there is a rule with if'
+ it { is_expected.to eq(true) }
end
context 'with when: <invalid string>' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+ before do
+ rule_hashes.first[:when] = 'on_success'
+ end
it 'raises an error' do
expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/)
@@ -81,132 +50,125 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
end
context 'with when: null' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] }
+ before do
+ rule_hashes.first[:when] = nil
+ end
- it_behaves_like 'when there is a rule with if'
+ it { is_expected.to eq(true) }
end
end
- context 'when there is a rule with exists and when' do
- context 'with when: never' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] }
+ context 'when there is a rule with if:' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
- it_behaves_like 'when there is a rule with exists', false, false
- end
+ context 'when the rule matches' do
+ let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) }
- context 'with when: always' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] }
+ it { is_expected.to eq(true) }
- it_behaves_like 'when there is a rule with exists'
+ it_behaves_like 'with when: specified'
end
- context 'with when: <invalid string>' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
+ context 'when the rule does not match' do
+ let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) }
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/)
- end
+ it { is_expected.to eq(false) }
end
+ end
- context 'with when: null' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] }
+ context 'when there is a rule with exists:' do
+ let(:rule_hashes) { [{ exists: 'file.txt' }] }
- it_behaves_like 'when there is a rule with exists'
+ context 'when the file exists' do
+ let(:context) { double(top_level_worktree_paths: ['file.txt']) }
+
+ it { is_expected.to eq(true) }
+
+ it_behaves_like 'with when: specified'
end
- end
- context 'when there is a rule with changes' do
- let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] }
+ context 'when the file does not exist' do
+ let(:context) { double(top_level_worktree_paths: ['README.md']) }
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /contains unknown keys: changes/)
+ it { is_expected.to eq(false) }
end
end
- context 'when FF `ci_refactor_external_rules` is disabled' do
- before do
- stub_feature_flags(ci_refactor_external_rules: false)
- end
+ context 'when there is a rule with changes:' do
+ let(:rule_hashes) { [{ changes: ['file.txt'] }] }
- context 'when there is no rule' do
- let(:rule_hashes) {}
+ shared_examples 'when the pipeline has modified paths' do
+ let(:modified_paths) { ['file.txt'] }
- it { is_expected.to eq(true) }
- end
+ before do
+ allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
+ end
- it_behaves_like 'when there is a rule with if'
+ context 'when the file has changed' do
+ it { is_expected.to eq(true) }
- context 'when there is a rule with exists' do
- let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
+ it_behaves_like 'with when: specified'
+ end
- it_behaves_like 'when there is a rule with exists'
+ context 'when the file has not changed' do
+ let(:modified_paths) { ['README.md'] }
+
+ it { is_expected.to eq(false) }
+ end
end
- context 'when there is a rule with if and when' do
- context 'with when: never' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] }
+ it_behaves_like 'when the pipeline has modified paths'
- it_behaves_like 'when there is a rule with if', false, false
- end
+ context 'with paths: specified' do
+ let(:rule_hashes) { [{ changes: { paths: ['file.txt'] } }] }
- context 'with when: always' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] }
+ it_behaves_like 'when the pipeline has modified paths'
+ end
- it_behaves_like 'when there is a rule with if'
- end
+ context 'with paths: and compare_to: specified' do
+ before_all do
+ project.repository.add_branch(project.owner, 'branch1', 'master')
- context 'with when: <invalid string>' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+ project.repository.update_file(
+ project.owner, 'file.txt', 'file updated', message: 'Update file.txt', branch_name: 'branch1'
+ )
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
- end
+ project.repository.add_branch(project.owner, 'branch2', 'branch1')
end
- context 'with when: null' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] }
-
- it_behaves_like 'when there is a rule with if'
+ let_it_be(:pipeline) do
+ build(:ci_pipeline, project: project, ref: 'branch2', sha: project.commit('branch2').sha)
end
- end
- context 'when there is a rule with exists and when' do
- context 'with when: never' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] }
+ context 'when the file has changed compared to the given ref' do
+ let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'master' } }] }
+
+ it { is_expected.to eq(true) }
- it_behaves_like 'when there is a rule with exists', false, false
+ it_behaves_like 'with when: specified'
end
- context 'with when: always' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] }
+ context 'when the file has not changed compared to the given ref' do
+ let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'branch1' } }] }
- it_behaves_like 'when there is a rule with exists'
+ it { is_expected.to eq(false) }
end
- context 'with when: <invalid string>' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
+ context 'when compare_to: is invalid' do
+ let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'invalid' } }] }
it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}')
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /compare_to is not a valid ref/)
end
end
-
- context 'with when: null' do
- let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] }
-
- it_behaves_like 'when there is a rule with exists'
- end
end
+ end
- context 'when there is a rule with changes' do
- let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] }
+ context 'when there is a rule with an invalid key' do
+ let(:rule_hashes) { [{ invalid: ['$MY_VAR'] }] }
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:changes=>["$MY_VAR"]}')
- end
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /contains unknown keys: invalid/)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
index 7bb09d35064..804164c933a 100644
--- a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb
@@ -57,7 +57,8 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Interpolator, feature_category
expect(subject).not_to be_valid
expect(subject.error_message).to eq subject.errors.first
- expect(subject.errors).to include('unknown input arguments')
+ expect(subject.errors).to include('Given inputs not defined in the `spec` section of the included ' \
+ 'configuration file')
end
end
diff --git a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
index bf89942bf14..0af1b721eb6 100644
--- a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference do
+RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference, feature_category: :pipeline_composition do
let(:config) do
- Gitlab::Ci::Config::Yaml.load!(yaml)
+ Gitlab::Ci::Config::Yaml::Loader.new(yaml).load.content
end
describe '.tag' do
diff --git a/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb
index 594242c33cc..74d7513ebdf 100644
--- a/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Resolver do
+RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Resolver, feature_category: :pipeline_composition do
let(:config) do
- Gitlab::Ci::Config::Yaml.load!(yaml)
+ Gitlab::Ci::Config::Yaml::Loader.new(yaml).load.content
end
describe '#to_hash' do
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index d06537ac330..a331af9a9ac 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -3,18 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependency_management do
- let(:report) { instance_double('Gitlab::Ci::Reports::Sbom::Report') }
+ let(:report) { Gitlab::Ci::Reports::Sbom::Report.new }
let(:report_data) { base_report_data }
let(:raw_report_data) { report_data.to_json }
let(:report_valid?) { true }
let(:validator_errors) { [] }
let(:properties_parser) { class_double('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties') }
+ let(:uuid) { 'c9d550a3-feb8-483b-a901-5aa892d039f9' }
let(:base_report_data) do
{
'bomFormat' => 'CycloneDX',
'specVersion' => '1.4',
- 'version' => 1
+ 'version' => 1,
+ 'serialNumber' => "urn:uuid:#{uuid}"
}
end
@@ -28,6 +30,7 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
allow(properties_parser).to receive(:parse_source)
stub_const('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties', properties_parser)
+ allow(SecureRandom).to receive(:uuid).and_return(uuid)
end
context 'when report JSON is invalid' do
@@ -149,8 +152,22 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
end
end
- context 'when report has metadata properties' do
- let(:report_data) { base_report_data.merge({ 'metadata' => { 'properties' => properties } }) }
+ context 'when report has metadata tools, author and properties' do
+ let(:report_data) { base_report_data.merge(metadata) }
+
+ let(:tools) do
+ [
+ { name: 'Gemnasium', vendor: 'vendor-1', version: '2.34.0' },
+ { name: 'Gemnasium', vendor: 'vendor-2', version: '2.34.0' }
+ ]
+ end
+
+ let(:authors) do
+ [
+ { name: 'author-1', email: 'support@gitlab.com' },
+ { name: 'author-2', email: 'support@gitlab.com' }
+ ]
+ end
let(:properties) do
[
@@ -163,10 +180,44 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
]
end
- it 'passes them to the properties parser' do
- expect(properties_parser).to receive(:parse_source).with(properties)
+ context 'when metadata attributes are present' do
+ let(:metadata) do
+ {
+ 'metadata' => {
+ 'tools' => tools,
+ 'authors' => authors,
+ 'properties' => properties
+ }
+ }
+ end
- parse!
+ it 'passes them to the report' do
+ expect(properties_parser).to receive(:parse_source).with(properties)
+
+ parse!
+
+ expect(report.metadata).to have_attributes(
+ tools: tools.map(&:with_indifferent_access),
+ authors: authors.map(&:with_indifferent_access),
+ properties: properties.map(&:with_indifferent_access)
+ )
+ end
+ end
+
+ context 'when metadata attributes are not present' do
+ let(:metadata) { { 'metadata' => {} } }
+
+ it 'passes them to the report' do
+ expect(properties_parser).to receive(:parse_source).with(nil)
+
+ parse!
+
+ expect(report.metadata).to have_attributes(
+ tools: [],
+ authors: [],
+ properties: []
+ )
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index dc16ddf4e0e..9470d59f502 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -229,8 +229,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
describe 'parsing finding.details' do
context 'when details are provided' do
+ let(:finding) { report.findings[4] }
+
it 'sets details from the report' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
expect(finding.details).to eq(expected_details)
@@ -238,8 +239,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
end
context 'when details are not provided' do
+ let(:finding) { report.findings[5] }
+
it 'sets empty hash' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
expect(finding.details).to eq({})
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 5f87e0ccc33..54e569f424b 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -1081,6 +1081,126 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co
end
end
+ context 'with a rule using CI_ENVIRONMENT_ACTION variable' do
+ let(:rule_set) do
+ [{ if: '$CI_ENVIRONMENT_ACTION == "start"' }]
+ end
+
+ context 'when environment:action satisfies the rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { action: 'start' } } }
+ end
+
+ it { is_expected.to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'on_success')
+ end
+ end
+
+ context 'when environment:action does not satisfy rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { action: 'stop' } } }
+ end
+
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+
+ context 'when environment:action is not set' do
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+ end
+
+ context 'with a rule using CI_ENVIRONMENT_TIER variable' do
+ let(:rule_set) do
+ [{ if: '$CI_ENVIRONMENT_TIER == "production"' }]
+ end
+
+ context 'when environment:deployment_tier satisfies the rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { deployment_tier: 'production' } } }
+ end
+
+ it { is_expected.to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'on_success')
+ end
+ end
+
+ context 'when environment:deployment_tier does not satisfy rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { deployment_tier: 'development' } } }
+ end
+
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+
+ context 'when environment:action is not set' do
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+ end
+
+ context 'with a rule using CI_ENVIRONMENT_URL variable' do
+ let(:rule_set) do
+ [{ if: '$CI_ENVIRONMENT_URL == "http://gitlab.com"' }]
+ end
+
+ context 'when environment:url satisfies the rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { url: 'http://gitlab.com' } } }
+ end
+
+ it { is_expected.to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'on_success')
+ end
+ end
+
+ context 'when environment:url does not satisfy rule' do
+ let(:attributes) do
+ { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success',
+ options: { environment: { url: 'http://staging.gitlab.com' } } }
+ end
+
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+
+ context 'when environment:action is not set' do
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+ end
+ end
+
context 'with no rules' do
let(:rule_set) { [] }
diff --git a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
index d62d25aeefe..4c9fd00f96a 100644
--- a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
@@ -49,6 +49,18 @@ RSpec.describe Gitlab::Ci::Reports::Sbom::Component, feature_category: :dependen
end
end
+ describe '#purl_type' do
+ subject { component.purl_type }
+
+ it { is_expected.to eq(purl_type) }
+ end
+
+ describe '#type' do
+ subject { component.type }
+
+ it { is_expected.to eq(component_type) }
+ end
+
describe '#<=>' do
where do
{
diff --git a/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb b/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb
new file mode 100644
index 00000000000..fe0b9481039
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Sbom::Metadata, feature_category: :dependency_management do
+ let(:tools) do
+ [
+ {
+ vendor: "vendor",
+ name: "Gemnasium",
+ version: "2.34.0"
+ }
+ ]
+ end
+
+ let(:authors) do
+ [
+ {
+ name: "author_name",
+ email: "support@gitlab.com"
+ }
+ ]
+ end
+
+ let(:properties) do
+ [
+ {
+ name: "property_name",
+ value: "package-lock.json"
+ }
+ ]
+ end
+
+ let(:timestamp) { "2020-04-13T20:20:39+00:00" }
+
+ subject(:metadata) do
+ metadata = described_class.new(
+ tools: tools,
+ authors: authors,
+ properties: properties
+ )
+ metadata.timestamp = timestamp
+ metadata
+ end
+
+ it 'has correct attributes' do
+ expect(metadata).to have_attributes(
+ tools: tools,
+ authors: authors,
+ properties: properties,
+ timestamp: timestamp
+ )
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
index 3889d1fc8c9..8b6ff7f27a2 100644
--- a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
+++ b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'MATLAB.gitlab-ci.yml' do
end
it 'creates all jobs' do
- expect(build_names).to include('command', 'test', 'test_artifacts')
+ expect(build_names).to include('command', 'test', 'test_artifacts', 'build')
end
end
end
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index d65b6fb41f6..9439d29aa11 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -243,6 +243,56 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
expect(result.encoding).to eq(Encoding.default_external)
end
end
+
+ context 'limit max size' do
+ before do
+ # specifying BUFFER_SIZE forces to seek backwards
+ allow(described_class).to receive(:BUFFER_SIZE)
+ .and_return(2)
+ end
+
+ it 'returns every lines with respect of the size' do
+ all_lines = lines.join
+ max_size = all_lines.bytesize.div(2)
+ result = stream.raw(max_size: max_size)
+
+ expect(result.bytes).to eq(all_lines.bytes[-max_size..])
+ expect(result.lines.count).to be > 1
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+
+ it 'returns everything if trying to get too many bytes' do
+ all_lines = lines.join
+ result = stream.raw(max_size: all_lines.bytesize * 2)
+
+ expect(result).to eq(all_lines)
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+ end
+
+ context 'limit max lines and max size' do
+ before do
+ # specifying BUFFER_SIZE forces to seek backwards
+ allow(described_class).to receive(:BUFFER_SIZE)
+ .and_return(2)
+ end
+
+ it 'returns max lines if max size is greater' do
+ result = stream.raw(last_lines: 2, max_size: lines.join.bytesize * 2)
+
+ expect(result).to eq(lines.last(2).join)
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+
+ it 'returns max size if max lines is greater' do
+ all_lines = lines.join
+ max_size = all_lines.bytesize.div(2)
+ result = stream.raw(last_lines: lines.size * 2, max_size: max_size)
+
+ expect(result.bytes).to eq(all_lines.bytes[-max_size..])
+ expect(result.encoding).to eq(Encoding.default_external)
+ end
+ end
end
let(:path) { __FILE__ }
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index 0880c556523..860a1fd30bd 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -108,12 +108,17 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '',
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_PROTECTED' => ProtectedBranch.protected?(
+ merge_request.source_project,
+ merge_request.source_branch
+ ).to_s,
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached',
- 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true))
+ 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true)),
+ 'CI_MERGE_REQUEST_SQUASH_ON_MERGE' => merge_request.squash_on_merge?.to_s
end
it 'exposes diff variables' do
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 3411426fcdb..af745c75f42 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -10,18 +10,27 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:job) do
create(:ci_build,
+ :with_deployment,
name: 'rspec:test 1',
pipeline: pipeline,
user: user,
yaml_variables: [{ key: 'YAML_VARIABLE', value: 'value' }],
- environment: 'test'
+ environment: 'review/$CI_COMMIT_REF_NAME',
+ options: {
+ environment: {
+ name: 'review/$CI_COMMIT_REF_NAME',
+ action: 'prepare',
+ deployment_tier: 'testing',
+ url: 'https://gitlab.com'
+ }
+ }
)
end
let(:builder) { described_class.new(pipeline) }
describe '#scoped_variables' do
- let(:environment) { job.expanded_environment_name }
+ let(:environment_name) { job.expanded_environment_name }
let(:dependencies) { true }
let(:predefined_variables) do
[
@@ -34,7 +43,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
{ key: 'CI_NODE_TOTAL',
value: '1' },
{ key: 'CI_ENVIRONMENT_NAME',
- value: 'test' },
+ value: 'review/master' },
+ { key: 'CI_ENVIRONMENT_ACTION',
+ value: 'prepare' },
+ { key: 'CI_ENVIRONMENT_TIER',
+ value: 'testing' },
+ { key: 'CI_ENVIRONMENT_URL',
+ value: 'https://gitlab.com' },
{ key: 'CI',
value: 'true' },
{ key: 'GITLAB_CI',
@@ -150,7 +165,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
].map { |var| var.merge(public: true, masked: false) }
end
- subject { builder.scoped_variables(job, environment: environment, dependencies: dependencies) }
+ subject { builder.scoped_variables(job, environment: environment_name, dependencies: dependencies) }
it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) }
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index f8f1d71e773..c09c0b31e97 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -794,6 +794,28 @@ module Gitlab
it_behaves_like 'returns errors', 'test_job_1 has the following needs duplicated: test_job_2.'
end
+
+ context 'when needed job name is too long' do
+ let(:job_name) { 'a' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH + 1) }
+
+ let(:config) do
+ <<-EOYML
+ lint_job:
+ script: 'echo lint_job'
+ rules:
+ - if: $var == null
+ needs: [#{job_name}]
+ #{job_name}:
+ script: 'echo job'
+ EOYML
+ end
+
+ it 'returns an error' do
+ expect(subject.errors).to include(
+ "lint_job job: need `#{job_name}` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)"
+ )
+ end
+ end
end
context 'rule needs as hash' do
@@ -2020,6 +2042,52 @@ module Gitlab
end
end
+ describe 'id_tokens' do
+ subject(:execute) { described_class.new(config).execute }
+
+ let(:build) { execute.builds.first }
+ let(:id_tokens_vars) { { ID_TOKEN_1: { aud: 'http://gcp.com' } } }
+ let(:job_id_tokens_vars) { { ID_TOKEN_2: { aud: 'http://job.com' } } }
+
+ context 'when defined on job level' do
+ let(:config) do
+ YAML.dump({
+ rspec: { script: 'rspec', id_tokens: id_tokens_vars }
+ })
+ end
+
+ it 'returns defined id_tokens' do
+ expect(build[:id_tokens]).to eq(id_tokens_vars)
+ end
+ end
+
+ context 'when defined as default' do
+ let(:config) do
+ YAML.dump({
+ default: { id_tokens: id_tokens_vars },
+ rspec: { script: 'rspec' }
+ })
+ end
+
+ it 'returns inherited by default id_tokens' do
+ expect(build[:id_tokens]).to eq(id_tokens_vars)
+ end
+ end
+
+ context 'when defined as default and on job level' do
+ let(:config) do
+ YAML.dump({
+ default: { id_tokens: id_tokens_vars },
+ rspec: { script: 'rspec', id_tokens: job_id_tokens_vars }
+ })
+ end
+
+ it 'overrides default and returns defined on job level' do
+ expect(build[:id_tokens]).to eq(job_id_tokens_vars)
+ end
+ end
+ end
+
describe "Artifacts" do
it "returns artifacts when defined" do
config = YAML.dump(
@@ -2553,6 +2621,60 @@ module Gitlab
scheduling_type: :dag
)
end
+
+ context 'when expanded job name is too long' do
+ let(:parallel_job_name) { 'a' * ::Ci::BuildNeed::MAX_JOB_NAME_LENGTH }
+ let(:needs) { [parallel_job_name] }
+
+ before do
+ config[parallel_job_name] = { stage: 'build', script: 'test', parallel: 1 }
+ end
+
+ it 'returns an error' do
+ expect(subject.errors).to include(
+ "test1 job: need `#{parallel_job_name} 1/1` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)"
+ )
+ end
+ end
+
+ context 'when parallel job has matrix specified' do
+ let(:var1) { '1' }
+ let(:var2) { '2' }
+
+ before do
+ config[:parallel] = { stage: 'build', script: 'test', parallel: { matrix: [{ VAR1: var1, VAR2: var2 }] } }
+ end
+
+ it 'does create jobs with valid specification' do
+ expect(subject.builds.size).to eq(6)
+ expect(subject.builds[3]).to eq(
+ stage: 'test',
+ stage_idx: 2,
+ name: 'test1',
+ only: { refs: %w[branches tags] },
+ options: { script: ['test'] },
+ needs_attributes: [
+ { name: 'parallel: [1, 2]', artifacts: true, optional: false }
+ ],
+ when: "on_success",
+ allow_failure: false,
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :dag
+ )
+ end
+
+ context 'when expanded job name is too long' do
+ let(:var1) { '1' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH / 2) }
+ let(:var2) { '2' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH / 2) }
+
+ it 'returns an error' do
+ expect(subject.errors).to include(
+ "test1 job: need `parallel: [#{var1}, #{var2}]` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)"
+ )
+ end
+ end
+ end
end
context 'needs dependencies artifacts' do
diff --git a/spec/lib/gitlab/composer/version_index_spec.rb b/spec/lib/gitlab/composer/version_index_spec.rb
index a4d016636aa..63efa8cae95 100644
--- a/spec/lib/gitlab/composer/version_index_spec.rb
+++ b/spec/lib/gitlab/composer/version_index_spec.rb
@@ -2,52 +2,111 @@
require 'spec_helper'
-RSpec.describe Gitlab::Composer::VersionIndex do
+RSpec.describe Gitlab::Composer::VersionIndex, feature_category: :package_registry do
let_it_be(:package_name) { 'sample-project' }
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+ let_it_be(:files) { { 'composer.json' => json.to_json } }
+ let_it_be_with_reload(:project) { create(:project, :public, :custom_repo, files: files, group: group) }
let_it_be(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let_it_be(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+ let(:url) { "http://localhost/#{group.path}/#{project.path}.git" }
let(:branch) { project.repository.find_branch('master') }
-
let(:packages) { [package1, package2] }
describe '#as_json' do
+ let(:index) { described_class.new(packages).as_json }
+ let(:ssh_path_prefix) { 'username@localhost:' }
+
subject(:package_index) { index['packages'][package_name] }
- let(:index) { described_class.new(packages).as_json }
+ before do
+ allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix)
+ .and_return(ssh_path_prefix)
+ end
+
+ shared_examples 'returns the packages json' do
+ def expected_json(package)
+ {
+ 'dist' => {
+ 'reference' => branch.target,
+ 'shasum' => '',
+ 'type' => 'zip',
+ 'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}"
+ },
+ 'source' => {
+ 'reference' => branch.target,
+ 'type' => 'git',
+ 'url' => url
+ },
+ 'name' => package.name,
+ 'uid' => package.id,
+ 'version' => package.version
+ }
+ end
+
+ it 'returns the packages json' do
+ expect(package_index['1.0.0']).to eq(expected_json(package1))
+ expect(package_index['2.0.0']).to eq(expected_json(package2))
+ end
+
+ context 'with an unordered list of packages' do
+ let(:packages) { [package2, package1] }
+
+ it 'returns the packages sorted by version' do
+ expect(package_index.keys).to eq ['1.0.0', '2.0.0']
+ end
+ end
+ end
+
+ context 'with a public project' do
+ it_behaves_like 'returns the packages json'
+ end
+
+ context 'with an internal project' do
+ let(:url) { "#{ssh_path_prefix}#{group.path}/#{project.path}.git" }
+
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::INTERNAL)
+ end
- def expected_json(package)
- {
- 'dist' => {
- 'reference' => branch.target,
- 'shasum' => '',
- 'type' => 'zip',
- 'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}"
- },
- 'source' => {
- 'reference' => branch.target,
- 'type' => 'git',
- 'url' => project.http_url_to_repo
- },
- 'name' => package.name,
- 'uid' => package.id,
- 'version' => package.version
- }
+ it_behaves_like 'returns the packages json'
end
- it 'returns the packages json' do
- expect(package_index['1.0.0']).to eq(expected_json(package1))
- expect(package_index['2.0.0']).to eq(expected_json(package2))
+ context 'with a private project' do
+ let(:url) { "#{ssh_path_prefix}#{group.path}/#{project.path}.git" }
+
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it_behaves_like 'returns the packages json'
end
- context 'with an unordered list of packages' do
- let(:packages) { [package2, package1] }
+ context 'with composer_use_ssh_source_urls disabled' do
+ before do
+ stub_feature_flags(composer_use_ssh_source_urls: false)
+ end
+
+ context 'with a public project' do
+ it_behaves_like 'returns the packages json'
+ end
+
+ context 'with an internal project' do
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ it_behaves_like 'returns the packages json'
+ end
+
+ context 'with a private project' do
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE)
+ end
- it 'returns the packages sorted by version' do
- expect(package_index.keys).to eq ['1.0.0', '2.0.0']
+ it_behaves_like 'returns the packages json'
end
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 6d24ced138e..3682a654181 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -80,6 +80,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s
let(:style_src) { directives['style_src'] }
let(:worker_src) { directives['worker_src'] }
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', nil)
+ end
+
it 'returns default directives' do
directive_names = (described_class::DIRECTIVES - ['report_uri'])
directive_names.each do |directive|
@@ -542,6 +546,58 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s
end
end
end
+
+ describe 'browsersdk_tracking' do
+ let(:analytics_url) { 'https://analytics.gitlab.com' }
+ let(:is_gitlab_com) { true }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'when browsersdk_tracking is enabled, GITLAB_ANALYTICS_URL is set, and Gitlab.com? is true' do
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', analytics_url)
+ end
+
+ it 'adds GITLAB_ANALYTICS_URL to connect-src' do
+ expect(connect_src).to include(analytics_url)
+ end
+ end
+
+ context 'when Gitlab.com? is false' do
+ let(:is_gitlab_com) { false }
+
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', analytics_url)
+ end
+
+ it 'does not add GITLAB_ANALYTICS_URL to connect-src' do
+ expect(connect_src).not_to include(analytics_url)
+ end
+ end
+
+ context 'when browsersdk_tracking is disabled' do
+ before do
+ stub_feature_flags(browsersdk_tracking: false)
+ stub_env('GITLAB_ANALYTICS_URL', analytics_url)
+ end
+
+ it 'does not add GITLAB_ANALYTICS_URL to connect-src' do
+ expect(connect_src).not_to include(analytics_url)
+ end
+ end
+
+ context 'when GITLAB_ANALYTICS_URL is not set' do
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', nil)
+ end
+
+ it 'does not add GITLAB_ANALYTICS_URL to connect-src' do
+ expect(connect_src).not_to include(analytics_url)
+ end
+ end
+ end
end
describe '#load' do
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 0e93a85764f..df1b12e479f 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -2,15 +2,14 @@
require 'spec_helper'
-RSpec.describe Gitlab::CurrentSettings do
+RSpec.describe Gitlab::CurrentSettings, feature_category: :shared do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
shared_context 'with settings in cache' do
before do
- create(:application_setting)
- described_class.current_application_settings # warm the cache
+ 2.times { described_class.current_application_settings } # warm the cache
end
end
@@ -29,7 +28,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when there are allowed domains' do
before do
- create(:application_setting, domain_allowlist: ['www.gitlab.com'])
+ stub_application_setting(domain_allowlist: ['www.gitlab.com'])
end
it { is_expected.to be_truthy }
@@ -37,7 +36,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when there are email restrictions' do
before do
- create(:application_setting, email_restrictions_enabled: true)
+ stub_application_setting(email_restrictions_enabled: true)
end
it { is_expected.to be_truthy }
@@ -45,7 +44,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when the admin has to approve signups' do
before do
- create(:application_setting, require_admin_approval_after_user_signup: true)
+ stub_application_setting(require_admin_approval_after_user_signup: true)
end
it { is_expected.to be_truthy }
@@ -53,7 +52,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when new users are set to external' do
before do
- create(:application_setting, user_default_external: true)
+ stub_application_setting(user_default_external: true)
end
it { is_expected.to be_truthy }
@@ -61,7 +60,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when there are no restrictions' do
before do
- create(:application_setting, domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false, user_default_external: false)
+ stub_application_setting(domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false, user_default_external: false)
end
it { is_expected.to be_falsey }
@@ -73,7 +72,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when signup is enabled' do
before do
- create(:application_setting, signup_enabled: true)
+ stub_application_setting(signup_enabled: true)
end
it { is_expected.to be_falsey }
@@ -81,7 +80,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when signup is disabled' do
before do
- create(:application_setting, signup_enabled: false)
+ stub_application_setting(signup_enabled: false)
end
it { is_expected.to be_truthy }
@@ -90,11 +89,9 @@ RSpec.describe Gitlab::CurrentSettings do
describe '#current_application_settings', :use_clean_rails_memory_store_caching do
it 'allows keys to be called directly' do
- db_settings = create(:application_setting,
- home_page_url: 'http://mydomain.com',
- signup_enabled: false)
+ described_class.update!(home_page_url: 'http://mydomain.com', signup_enabled: false)
- expect(described_class.home_page_url).to eq(db_settings.home_page_url)
+ expect(described_class.home_page_url).to eq('http://mydomain.com')
expect(described_class.signup_enabled?).to be_falsey
expect(described_class.signup_enabled).to be_falsey
expect(described_class.metrics_sample_interval).to be(15)
@@ -253,12 +250,14 @@ RSpec.describe Gitlab::CurrentSettings do
end
context 'with an existing ApplicationSetting DB record' do
- let!(:db_settings) { ApplicationSetting.build_from_defaults(home_page_url: 'http://mydomain.com').save! && ApplicationSetting.last }
+ before do
+ described_class.update!(home_page_url: 'http://mydomain.com')
+ end
it_behaves_like 'a non-persisted ApplicationSetting object'
it 'uses the value from the DB attribute if present and not overridden by an accessor' do
- expect(current_settings.home_page_url).to eq(db_settings.home_page_url)
+ expect(current_settings.home_page_url).to eq('http://mydomain.com')
end
end
end
@@ -277,10 +276,11 @@ RSpec.describe Gitlab::CurrentSettings do
describe '#current_application_settings?', :use_clean_rails_memory_store_caching do
before do
allow(described_class).to receive(:current_application_settings?).and_call_original
+ ApplicationSetting.delete_all # ensure no settings exist
end
it 'returns true when settings exist' do
- create(:application_setting,
+ described_class.update!(
home_page_url: 'http://mydomain.com',
signup_enabled: false)
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index bbcfa1973ea..bf97f40e97f 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -50,6 +50,15 @@ RSpec.describe Gitlab::DataBuilder::Deployment, feature_category: :continuous_de
expect(data[:deployable_url]).to be_nil
end
+ it 'does not include the deployable URL when deployable is bridge' do
+ project = create(:project, :repository)
+ bridge = create(:ci_bridge, project: project)
+ deployment = create(:deployment, status: :failed, project: project, deployable: bridge)
+ data = described_class.build(deployment, 'failed', Time.current)
+
+ expect(data[:deployable_url]).to be_nil
+ end
+
context 'when commit does not exist in the repository' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:deployment) { create(:deployment, project: project) }
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index 309bbf1e3f0..c5a20b5ef3d 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -141,6 +141,14 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_categor
expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
end
end
+
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { migration.prepare_async_index(:non_existent_table, 'id') }.to(
+ raise_error("Table non_existent_table does not exist")
+ )
+ end
+ end
end
describe '#prepare_async_index_from_sql' do
diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb
index 50086795b2b..6e63ae56557 100644
--- a/spec/lib/gitlab/database/click_house_client_spec.rb
+++ b/spec/lib/gitlab/database/click_house_client_spec.rb
@@ -2,98 +2,135 @@
require 'spec_helper'
-RSpec.describe 'ClickHouse::Client', feature_category: :database do
- context 'when click_house spec tag is not added' do
- it 'does not have any ClickHouse databases configured' do
- databases = ClickHouse::Client.configuration.databases
+RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database do
+ it 'has a ClickHouse database configured' do
+ databases = ClickHouse::Client.configuration.databases
- expect(databases).to be_empty
- end
+ expect(databases).not_to be_empty
end
- describe 'when click_house spec tag is added', :click_house do
- it 'has a ClickHouse database configured' do
- databases = ClickHouse::Client.configuration.databases
-
- expect(databases).not_to be_empty
- end
+ it 'does not return data via `execute` method' do
+ result = ClickHouse::Client.execute("SELECT 1 AS value", :main)
- it 'does not return data via `execute` method' do
- result = ClickHouse::Client.execute("SELECT 1 AS value", :main)
+ # does not return data, just true if successful. Otherwise error.
+ expect(result).to eq(true)
+ end
- # does not return data, just true if successful. Otherwise error.
- expect(result).to eq(true)
- end
+ describe 'data manipulation' do
+ describe 'inserting' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:author1) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:author2) { create(:user).tap { |u| project.add_developer(u) } }
+
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let_it_be(:event1) { create(:event, :created, target: issue1, author: author1) }
+ let_it_be(:event2) { create(:event, :closed, target: issue2, author: author2) }
+ let_it_be(:event3) { create(:event, :merged, target: merge_request, author: author1) }
+
+ let(:events) { [event1, event2, event3] }
+
+ def format_row(event)
+ path = event.project.reload.project_namespace.traversal_ids.join('/')
+
+ action = Event.actions[event.action]
+ [
+ event.id,
+ "'#{path}/'",
+ event.author_id,
+ event.target_id,
+ "'#{event.target_type}'",
+ action,
+ event.created_at.to_f,
+ event.updated_at.to_f
+ ].join(',')
+ end
- describe 'data manipulation' do
- describe 'inserting' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project) }
-
- let_it_be(:author1) { create(:user).tap { |u| project.add_developer(u) } }
- let_it_be(:author2) { create(:user).tap { |u| project.add_developer(u) } }
-
- let_it_be(:issue1) { create(:issue, project: project) }
- let_it_be(:issue2) { create(:issue, project: project) }
- let_it_be(:merge_request) { create(:merge_request, source_project: project) }
-
- let_it_be(:event1) { create(:event, :created, target: issue1, author: author1) }
- let_it_be(:event2) { create(:event, :closed, target: issue2, author: author2) }
- let_it_be(:event3) { create(:event, :merged, target: merge_request, author: author1) }
-
- let(:events) { [event1, event2, event3] }
-
- def format_row(event)
- path = event.project.reload.project_namespace.traversal_ids.join('/')
-
- action = Event.actions[event.action]
- [
- event.id,
- "'#{path}/'",
- event.author_id,
- event.target_id,
- "'#{event.target_type}'",
- action,
- event.created_at.to_f,
- event.updated_at.to_f
- ].join(',')
+ describe 'RSpec hooks' do
+ it 'ensures that tables are empty' do
+ results = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(results).to be_empty
end
- describe 'RSpec hooks' do
- it 'ensures that tables are empty' do
- results = ClickHouse::Client.select('SELECT * FROM events', :main)
- expect(results).to be_empty
+ it 'inserts data from CSV' do
+ time = Time.current.utc
+ Tempfile.open(['test', '.csv.gz']) do |f|
+ csv = "id,path,created_at\n10,1/2/,#{time.to_f}\n20,1/,#{time.to_f}"
+ File.binwrite(f.path, ActiveSupport::Gzip.compress(csv))
+
+ ClickHouse::Client.insert_csv('INSERT INTO events (id, path, created_at) FORMAT CSV', File.open(f.path),
+ :main)
end
- end
- it 'inserts and modifies data' do
- insert_query = <<~SQL
- INSERT INTO events
- (id, path, author_id, target_id, target_type, action, created_at, updated_at)
- VALUES
- (#{format_row(event1)}),
- (#{format_row(event2)}),
- (#{format_row(event3)})
- SQL
+ results = ClickHouse::Client.select('SELECT id, path, created_at FROM events ORDER BY id', :main)
- ClickHouse::Client.execute(insert_query, :main)
+ expect(results).to match([
+ { 'id' => 10, 'path' => '1/2/', 'created_at' => be_within(0.1.seconds).of(time) },
+ { 'id' => 20, 'path' => '1/', 'created_at' => be_within(0.1.seconds).of(time) }
+ ])
+ end
+ end
- results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
- expect(results.size).to eq(3)
+ it 'inserts and modifies data' do
+ insert_query = <<~SQL
+ INSERT INTO events
+ (id, path, author_id, target_id, target_type, action, created_at, updated_at)
+ VALUES
+ (#{format_row(event1)}),
+ (#{format_row(event2)}),
+ (#{format_row(event3)})
+ SQL
+
+ ClickHouse::Client.execute(insert_query, :main)
+
+ results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+ expect(results.size).to eq(3)
+
+ last = results.last
+ expect(last).to match(a_hash_including(
+ 'id' => event3.id,
+ 'author_id' => event3.author_id,
+ 'created_at' => be_within(0.05).of(event3.created_at),
+ 'target_type' => event3.target_type
+ ))
+
+ delete_query = ClickHouse::Client::Query.new(
+ raw_query: 'DELETE FROM events WHERE id = {id:UInt64}',
+ placeholders: { id: event3.id }
+ )
+
+ ClickHouse::Client.execute(delete_query, :main)
+
+ select_query = ClickHouse::Client::Query.new(
+ raw_query: 'SELECT * FROM events WHERE id = {id:UInt64}',
+ placeholders: { id: event3.id }
+ )
+
+ results = ClickHouse::Client.select(select_query, :main)
+ expect(results).to be_empty
+ end
+ end
+ end
- last = results.last
- expect(last).to match(a_hash_including(
- 'id' => event3.id,
- 'author_id' => event3.author_id,
- 'created_at' => be_within(0.05).of(event3.created_at),
- 'target_type' => event3.target_type
- ))
+ describe 'logging' do
+ let(:query_string) { "SELECT * FROM events WHERE id IN (4, 5, 6)" }
- ClickHouse::Client.execute("DELETE FROM events WHERE id = #{event3.id}", :main)
+ context 'on dev and test environments' do
+ it 'logs the un-redacted query' do
+ expect(ClickHouse::Client.configuration.logger).to receive(:info).with({
+ query: query_string,
+ correlation_id: a_kind_of(String)
+ })
- results = ClickHouse::Client.select("SELECT * FROM events WHERE id = #{event3.id}", :main)
- expect(results).to be_empty
- end
+ ClickHouse::Client.select(query_string, :main)
+ end
+
+ it 'has a ClickHouse logger' do
+ expect(ClickHouse::Client.configuration.logger).to be_a(ClickHouse::Logger)
end
end
end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 14ff1a462e3..e402014df90 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -148,7 +148,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
subject { described_class.table_schemas!(tables) }
it 'returns the matched schemas' do
- expect(subject).to match_array %i[gitlab_main_cell gitlab_main gitlab_ci].to_set
+ expect(subject).to match_array %i[gitlab_main_cell gitlab_ci].to_set
end
context 'when one of the tables does not have a matching table schema' do
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index 5ef6d9173c4..89cecaff075 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing::Host do
+RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :database do
let(:load_balancer) do
Gitlab::Database::LoadBalancing::LoadBalancer
.new(Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base))
@@ -124,13 +124,36 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
end
it 'refreshes the status' do
- expect(Gitlab::Database::LoadBalancing::Logger).to receive(:info)
- .with(hash_including(event: :host_online))
- .and_call_original
-
expect(host).to be_online
end
+ context 'and the host was previously online' do
+ # Hosts are online by default
+
+ it 'does not log the online event' do
+ expect(Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:info)
+ .with(hash_including(event: :host_online))
+
+ expect(host).to be_online
+ end
+ end
+
+ context 'and the host was previously offline' do
+ before do
+ host.offline!
+ end
+
+ it 'logs the online event' do
+ expect(Gitlab::Database::LoadBalancing::Logger)
+ .to receive(:info)
+ .with(hash_including(event: :host_online))
+ .and_call_original
+
+ expect(host).to be_online
+ end
+ end
+
context 'and replica is not up to date' do
before do
expect(host).to receive(:replica_is_up_to_date?).and_return(false)
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 26c8969efd8..c975f5b5ee4 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -469,25 +469,58 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe
context 'when none of the replicas are caught up' do
before do
- expect(hosts).to all(receive(:caught_up?).with(location).and_return(false))
+ expect(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
+ expect(hosts[1]).to receive(:caught_up?).with(location).and_return(false)
end
- it 'returns false and does not update the host thread-local variable' do
- expect(subject).to be false
+ it 'returns NONE_CAUGHT_UP and does not update the host thread-local variable' do
+ expect(subject).to eq(described_class::NONE_CAUGHT_UP)
expect(set_host).to be_nil
end
+
+ it 'notifies caught_up_replica_pick.load_balancing with result false' do
+ expect(ActiveSupport::Notifications).to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: false })
+
+ subject
+ end
end
- context 'when any of the replicas is caught up' do
+ context 'when any replica is caught up' do
before do
- # `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier
- allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
+ expect(hosts[0]).to receive(:caught_up?).with(location).and_return(true)
+ expect(hosts[1]).to receive(:caught_up?).with(location).and_return(false)
+ end
+
+ it 'returns ANY_CAUGHT_UP and sets host thread-local variable' do
+ expect(subject).to eq(described_class::ANY_CAUGHT_UP)
+ expect(set_host).to eq(hosts[0])
+ end
+
+ it 'notifies caught_up_replica_pick.load_balancing with result true' do
+ expect(ActiveSupport::Notifications).to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: true })
+
+ subject
+ end
+ end
+
+ context 'when all of the replicas is caught up' do
+ before do
+ expect(hosts[0]).to receive(:caught_up?).with(location).and_return(true)
expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true)
end
- it 'returns true and sets host thread-local variable' do
- expect(subject).to be true
- expect(set_host).to eq(hosts[1])
+ it 'returns ALL_CAUGHT_UP and sets host thread-local variable' do
+ expect(subject).to eq(described_class::ALL_CAUGHT_UP)
+ expect(set_host).to be_in([hosts[0], hosts[1]])
+ end
+
+ it 'notifies caught_up_replica_pick.load_balancing with result true' do
+ expect(ActiveSupport::Notifications).to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: true })
+
+ subject
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index 713bff5feea..863b1fb099b 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
let(:warden_user) { double(:warden, user: double(:user, id: 42)) }
- let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 42]]) }
+ let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 99]]) }
let(:multiple_sticking_objects) do
Set.new([
[ActiveRecord::Base.sticking, :user, 42],
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
expect(middleware).to receive(:clear).twice
- expect(middleware).to receive(:unstick_or_continue_sticking).with(env)
+ expect(middleware).to receive(:find_caught_up_replica).with(env)
expect(middleware).to receive(:stick_if_necessary).with(env)
expect(app).to receive(:call).with(env).and_return(10)
@@ -41,12 +41,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
end
end
- describe '#unstick_or_continue_sticking' do
+ describe '#find_caught_up_replica' do
it 'does not stick if no namespace and identifier could be found' do
expect(ApplicationRecord.sticking)
- .not_to receive(:unstick_or_continue_sticking)
+ .not_to receive(:find_caught_up_replica)
- middleware.unstick_or_continue_sticking({})
+ middleware.find_caught_up_replica({})
end
it 'sticks to the primary if a warden user is found' do
@@ -54,94 +54,125 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
Gitlab::Database::LoadBalancing.base_models.each do |model|
expect(model.sticking)
- .to receive(:unstick_or_continue_sticking)
+ .to receive(:find_caught_up_replica)
.with(:user, 42)
end
- middleware.unstick_or_continue_sticking(env)
+ middleware.find_caught_up_replica(env)
end
it 'sticks to the primary if a sticking namespace and identifier is found' do
env = { described_class::STICK_OBJECT => single_sticking_object }
expect(ApplicationRecord.sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
+ .to receive(:find_caught_up_replica)
+ .with(:user, 99)
- middleware.unstick_or_continue_sticking(env)
+ middleware.find_caught_up_replica(env)
end
it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do
env = { described_class::STICK_OBJECT => multiple_sticking_objects }
expect(ApplicationRecord.sticking)
- .to receive(:unstick_or_continue_sticking)
+ .to receive(:find_caught_up_replica)
.with(:user, 42)
.ordered
expect(ApplicationRecord.sticking)
- .to receive(:unstick_or_continue_sticking)
+ .to receive(:find_caught_up_replica)
.with(:runner, '123456789')
.ordered
expect(ApplicationRecord.sticking)
- .to receive(:unstick_or_continue_sticking)
+ .to receive(:find_caught_up_replica)
.with(:runner, '1234')
.ordered
- middleware.unstick_or_continue_sticking(env)
+ middleware.find_caught_up_replica(env)
end
end
describe '#stick_if_necessary' do
- it 'does not stick to the primary if not necessary' do
- expect(ApplicationRecord.sticking)
- .not_to receive(:stick_if_necessary)
-
- middleware.stick_if_necessary({})
+ let(:env) { { 'warden' => warden, described_class::STICK_OBJECT => stick_object }.compact }
+ let(:stick_object) { nil }
+ let(:write_performed) { true }
+ let(:warden) { warden_user }
+
+ before do
+ allow(::Gitlab::Database::LoadBalancing::Session.current).to receive(:performed_write?)
+ .and_return(write_performed)
end
- it 'sticks to the primary if a warden user is found' do
- env = { 'warden' => warden_user }
+ subject { middleware.stick_if_necessary(env) }
+ it 'sticks to the primary for the user' do
Gitlab::Database::LoadBalancing.base_models.each do |model|
expect(model.sticking)
- .to receive(:stick_if_necessary)
+ .to receive(:stick)
.with(:user, 42)
end
- middleware.stick_if_necessary(env)
+ subject
end
- it 'sticks to the primary if a a single sticking object is found' do
- env = { described_class::STICK_OBJECT => single_sticking_object }
+ context 'when no write was performed' do
+ let(:write_performed) { false }
- expect(ApplicationRecord.sticking)
- .to receive(:stick_if_necessary)
- .with(:user, 42)
+ it 'does not stick to the primary' do
+ expect(ApplicationRecord.sticking)
+ .not_to receive(:stick)
- middleware.stick_if_necessary(env)
+ subject
+ end
end
- it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do
- env = { described_class::STICK_OBJECT => multiple_sticking_objects }
+ context 'when there is no user in the env' do
+ let(:warden) { nil }
- expect(ApplicationRecord.sticking)
- .to receive(:stick_if_necessary)
- .with(:user, 42)
- .ordered
+ context 'when there is an explicit single sticking object in the env' do
+ let(:stick_object) { single_sticking_object }
- expect(ApplicationRecord.sticking)
- .to receive(:stick_if_necessary)
- .with(:runner, '123456789')
- .ordered
+ it 'sticks to the single sticking object' do
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick)
+ .with(:user, 99)
- expect(ApplicationRecord.sticking)
- .to receive(:stick_if_necessary)
- .with(:runner, '1234')
- .ordered
+ subject
+ end
+ end
+
+ context 'when there is multiple explicit sticking objects' do
+ let(:stick_object) { multiple_sticking_objects }
+
+ it 'sticks to the sticking objects' do
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick)
+ .with(:user, 42)
+ .ordered
- middleware.stick_if_necessary(env)
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick)
+ .with(:runner, '123456789')
+ .ordered
+
+ expect(ApplicationRecord.sticking)
+ .to receive(:stick)
+ .with(:runner, '1234')
+ .ordered
+
+ subject
+ end
+ end
+
+ context 'when there no explicit sticking objects' do
+ it 'does not stick to the primary' do
+ expect(ApplicationRecord.sticking)
+ .not_to receive(:stick)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index 789919d2a51..7197b99fe33 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -15,7 +15,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
load_balancer,
nameserver: 'localhost',
port: 8600,
- record: 'foo'
+ record: 'foo',
+ disconnect_timeout: 1 # Short disconnect timeout to keep tests fast
)
end
@@ -192,6 +193,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
end
describe '#replace_hosts' do
+ before do
+ stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'true')
+ allow(service)
+ .to receive(:load_balancer)
+ .and_return(load_balancer)
+ end
+
let(:address_foo) { described_class::Address.new('foo') }
let(:address_bar) { described_class::Address.new('bar') }
@@ -202,19 +210,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
)
end
- before do
- allow(service)
- .to receive(:load_balancer)
- .and_return(load_balancer)
- end
-
it 'replaces the hosts of the load balancer' do
service.replace_hosts([address_bar])
expect(load_balancer.host_list.host_names_and_ports).to eq([['bar', nil]])
end
- it 'disconnects the old connections' do
+ it 'disconnects the old connections gracefully if possible' do
host = load_balancer.host_list.hosts.first
allow(service)
@@ -222,11 +224,59 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
.and_return(2)
expect(host)
- .to receive(:disconnect!)
- .with(timeout: 2)
+ .to receive(:try_disconnect).and_return(true)
+
+ expect(host).not_to receive(:force_disconnect!)
service.replace_hosts([address_bar])
end
+
+ it 'disconnects the old connections forcefully if necessary' do
+ host = load_balancer.host_list.hosts.first
+
+ allow(service)
+ .to receive(:disconnect_timeout)
+ .and_return(2)
+
+ expect(host)
+ .to receive(:try_disconnect).and_return(false)
+
+ expect(host).to receive(:force_disconnect!)
+
+ service.replace_hosts([address_bar])
+ end
+
+ context 'without old hosts' do
+ before do
+ allow(load_balancer.host_list).to receive(:hosts).and_return([])
+ end
+
+ it 'does not log any load balancing event' do
+ expect(::Gitlab::Database::LoadBalancing::Logger).not_to receive(:info)
+
+ service.replace_hosts([address_foo, address_bar])
+ end
+ end
+
+ context 'when LOAD_BALANCER_PARALLEL_DISCONNECT is false' do
+ before do
+ stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'false')
+ end
+
+ it 'disconnects them sequentially' do
+ host = load_balancer.host_list.hosts.first
+
+ allow(service)
+ .to receive(:disconnect_timeout)
+ .and_return(2)
+
+ expect(host)
+ .to receive(:disconnect!)
+ .with(timeout: 2)
+
+ service.replace_hosts([address_bar])
+ end
+ end
end
describe '#addresses_from_dns' do
@@ -475,4 +525,61 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego
end
end
end
+
+ context 'with service discovery connected to a real load balancer' do
+ let(:database_address) do
+ host, port = ApplicationRecord.connection_pool.db_config.configuration_hash.fetch(:host, :port)
+ described_class::Address.new(host, port)
+ end
+
+ before do
+ # set up the load balancer to point to the test postgres instance with three seperate conections
+ allow(service).to receive(:addresses_from_dns)
+ .and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL,
+ [database_address, database_address, database_address]])
+ .once
+
+ service.perform_service_discovery
+ end
+
+ it 'configures service discovery with three replicas' do
+ expect(service.load_balancer.host_list.hosts.count).to eq(3)
+ end
+
+ it 'swaps the hosts out gracefully when not contended' do
+ expect(service.load_balancer.host_list.hosts.count).to eq(3)
+
+ host = service.load_balancer.host_list.next
+
+ # Check out and use a connection from a host so that there is something to clean up
+ host.pool.with_connection do |connection|
+ expect { connection.execute('select 1') }.not_to raise_error
+ end
+
+ allow(service).to receive(:addresses_from_dns).and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL, []])
+
+ service.load_balancer.host_list.hosts.each do |h|
+ # Expect that the host gets gracefully disconnected
+ expect(h).not_to receive(:force_disconnect!)
+ end
+
+ expect { service.perform_service_discovery }.to change { host.pool.stat[:connections] }.from(1).to(0)
+ end
+
+ it 'swaps the hosts out forcefully when contended' do
+ host = service.load_balancer.host_list.next
+
+ # Check out a connection and leave it checked out (simulate a web request)
+ connection = host.pool.checkout
+ connection.execute('select 1')
+
+ # Expect that the connection is forcefully checked in
+ expect(host).to receive(:force_disconnect!).and_call_original
+ expect(connection).to receive(:steal!).and_call_original
+
+ allow(service).to receive(:addresses_from_dns).and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL, []])
+
+ service.perform_service_discovery
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 7703b5680c2..aaca544ef80 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -8,6 +8,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
let(:location) { '0/D525E3A8' }
let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_s => location } }
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
+ let(:any_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP }
+ let(:all_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::ALL_CAUGHT_UP }
+ let(:none_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP }
before do
skip_feature_flags_yaml_validation
@@ -67,7 +70,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
expect(ActiveRecord::Base.load_balancer)
.to receive(:select_up_to_date_host)
.with(location)
- .and_return(true)
+ .and_return(any_caught_up)
run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy
@@ -86,7 +89,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
allow(lb)
.to receive(:select_up_to_date_host)
.with(location)
- .and_return(true)
+ .and_return(any_caught_up)
end
end
@@ -100,7 +103,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
allow(ActiveRecord::Base.load_balancer)
.to receive(:select_up_to_date_host)
.with(wal_locations[:main])
- .and_return(true)
+ .and_return(any_caught_up)
end
it_behaves_like 'replica is up to date', 'replica'
@@ -133,7 +136,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
context 'when replica is up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).and_return(true)
+ allow(lb).to receive(:select_up_to_date_host).and_return(any_caught_up)
end
end
@@ -147,7 +150,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
context 'when replica is not up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).and_return(false, true)
+ allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up, any_caught_up)
end
end
@@ -161,7 +164,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
context 'when replica is never not up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).and_return(false, false)
+ allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up, none_caught_up)
end
end
@@ -267,7 +270,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
context 'when replica is not up to date' do
before do
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).and_return(false)
+ allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up)
end
end
@@ -282,7 +285,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
.to eq(true)
end
- it 'returns true when all load balancers are in sync' do
+ it 'returns true when all load balancers are in sync for some replicas' do
locations = {}
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
@@ -291,7 +294,23 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
expect(lb)
.to receive(:select_up_to_date_host)
.with('foo')
- .and_return(true)
+ .and_return(any_caught_up)
+ end
+
+ expect(middleware.send(:databases_in_sync?, locations))
+ .to eq(true)
+ end
+
+ it 'returns true when all load balancers are in sync for all replicas' do
+ locations = {}
+
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ locations[lb.name] = 'foo'
+
+ expect(lb)
+ .to receive(:select_up_to_date_host)
+ .with('foo')
+ .and_return(all_caught_up)
end
expect(middleware.send(:databases_in_sync?, locations))
@@ -307,7 +326,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
allow(lb)
.to receive(:select_up_to_date_host)
.with('foo')
- .and_return(false)
+ .and_return(none_caught_up)
end
expect(middleware.send(:databases_in_sync?, locations))
@@ -324,7 +343,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
allow(lb)
.to receive(:select_up_to_date_host)
.with('foo')
- .and_return(false)
+ .and_return(none_caught_up)
end
expect(middleware.send(:databases_in_sync?, locations))
@@ -346,8 +365,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
def replication_lag!(exists)
+ caught_up = exists ? none_caught_up : all_caught_up
Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).and_return(!exists)
+ allow(lb).to receive(:select_up_to_date_host).and_return(caught_up)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index ff31a5cd6cb..8c2901c3b89 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -3,327 +3,142 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
- let(:sticking) do
- described_class.new(ActiveRecord::Base.load_balancer)
- end
+ let(:load_balancer) { ActiveRecord::Base.load_balancer }
+ let(:primary_write_location) { 'the-primary-lsn' }
+ let(:last_write_location) { 'the-last-write-lsn' }
- after do
- Gitlab::Database::LoadBalancing::Session.clear_session
+ let(:sticking) do
+ described_class.new(load_balancer)
end
- shared_examples 'sticking' do
- before do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_write_location)
- .and_return('foo')
- end
-
- it 'sticks an entity to the primary', :aggregate_failures do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_only?)
- .and_return(false)
-
- ids.each do |id|
- expect(sticking)
- .to receive(:set_write_location_for)
- .with(:user, id, 'foo')
- end
+ let(:redis) { instance_double(::Gitlab::Redis::MultiStore) }
- expect(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:use_primary!)
-
- subject
- end
+ before do
+ allow(::Gitlab::Redis::DbLoadBalancing).to receive(:with).and_yield(redis)
- it 'does not update the write location when no replicas are used' do
- expect(sticking).not_to receive(:set_write_location_for)
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_write_location)
+ .and_return(primary_write_location)
- subject
- end
+ allow(redis).to receive(:get)
+ .with("database-load-balancing/write-location/#{load_balancer.name}/user/42")
+ .and_return(last_write_location)
end
- shared_examples 'tracking status in redis' do
- describe '#stick_or_unstick_request' do
- it 'sticks or unsticks a single object and updates the Rack environment' do
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
-
- env = {}
-
- sticking.stick_or_unstick_request(env, :user, 42)
-
- expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a)
- .to eq([[sticking, :user, 42]])
- end
-
- it 'sticks or unsticks multiple objects and updates the Rack environment' do
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:user, 42)
- .ordered
-
- expect(sticking)
- .to receive(:unstick_or_continue_sticking)
- .with(:runner, '123456789')
- .ordered
-
- env = {}
-
- sticking.stick_or_unstick_request(env, :user, 42)
- sticking.stick_or_unstick_request(env, :runner, '123456789')
-
- expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq(
- [
- [sticking, :user, 42],
- [sticking, :runner,
- '123456789']
- ])
- end
- end
-
- describe '#stick_if_necessary' do
- it 'does not stick if no write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(false)
-
- expect(sticking).not_to receive(:stick)
-
- sticking.stick_if_necessary(:user, 42)
- end
-
- it 'sticks to the primary if a write was performed' do
- allow(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:performed_write?)
- .and_return(true)
-
- expect(sticking)
- .to receive(:stick)
- .with(:user, 42)
+ after do
+ Gitlab::Database::LoadBalancing::Session.clear_session
+ end
- sticking.stick_if_necessary(:user, 42)
- end
+ describe '#find_caught_up_replica' do
+ before do
+ allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
end
- describe '#all_caught_up?' do
- let(:lb) { ActiveRecord::Base.load_balancer }
- let(:last_write_location) { 'foo' }
-
- before do
- allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
-
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return(last_write_location)
- end
-
- context 'when no write location could be found' do
- let(:last_write_location) { nil }
-
- it 'returns true' do
- expect(lb).not_to receive(:select_up_to_date_host)
-
- expect(sticking.all_caught_up?(:user, 42)).to eq(true)
- end
- end
-
- context 'when all secondaries have caught up' do
- before do
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
- end
-
- it 'returns true, and unsticks' do
- expect(sticking)
- .to receive(:unstick)
- .with(:user, 42)
-
- expect(sticking.all_caught_up?(:user, 42)).to eq(true)
- end
+ context 'when no write location could be found' do
+ let(:last_write_location) { nil }
- it 'notifies with the proper event payload' do
- expect(ActiveSupport::Notifications)
- .to receive(:instrument)
- .with('caught_up_replica_pick.load_balancing', { result: true })
- .and_call_original
+ it 'returns true' do
+ expect(load_balancer).not_to receive(:select_up_to_date_host)
- sticking.all_caught_up?(:user, 42)
- end
+ expect(sticking.find_caught_up_replica(:user, 42)).to eq(true)
end
- context 'when the secondaries have not yet caught up' do
- before do
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
- end
-
- it 'returns false' do
- expect(sticking.all_caught_up?(:user, 42)).to eq(false)
- end
+ context 'when use_primary_on_empty_location is true' do
+ it 'returns false, does not unstick and calls use_primary!' do
+ expect(load_balancer).not_to receive(:select_up_to_date_host)
- it 'notifies with the proper event payload' do
- expect(ActiveSupport::Notifications)
- .to receive(:instrument)
- .with('caught_up_replica_pick.load_balancing', { result: false })
- .and_call_original
+ expect(redis).not_to receive(:del)
+ expect(::Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary!)
- sticking.all_caught_up?(:user, 42)
+ expect(sticking.find_caught_up_replica(:user, 42, use_primary_on_empty_location: true)).to eq(false)
end
end
end
- describe '#unstick_or_continue_sticking' do
- let(:lb) { ActiveRecord::Base.load_balancer }
-
- it 'simply returns if no write location could be found' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return(nil)
-
- expect(lb).not_to receive(:select_up_to_date_host)
-
- sticking.unstick_or_continue_sticking(:user, 42)
- end
-
- it 'unsticks if all secondaries have caught up' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ context 'when all replicas have caught up' do
+ it 'returns true and unsticks' do
+ expect(load_balancer).to receive(:select_up_to_date_host).with(last_write_location)
+ .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::ALL_CAUGHT_UP)
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
+ expect(redis)
+ .to receive(:del)
+ .with("database-load-balancing/write-location/#{load_balancer.name}/user/42")
- expect(sticking)
- .to receive(:unstick)
- .with(:user, 42)
-
- sticking.unstick_or_continue_sticking(:user, 42)
+ expect(sticking.find_caught_up_replica(:user, 42)).to eq(true)
end
+ end
- it 'continues using the primary if the secondaries have not yet caught up' do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
-
- allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
-
- expect(Gitlab::Database::LoadBalancing::Session.current)
- .to receive(:use_primary!)
+ context 'when only some of the replicas have caught up' do
+ it 'returns true and does not unstick' do
+ expect(load_balancer).to receive(:select_up_to_date_host).with(last_write_location)
+ .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP)
- sticking.unstick_or_continue_sticking(:user, 42)
- end
- end
+ expect(redis).not_to receive(:del)
- describe '#stick' do
- it_behaves_like 'sticking' do
- let(:ids) { [42] }
- subject { sticking.stick(:user, ids.first) }
+ expect(sticking.find_caught_up_replica(:user, 42)).to eq(true)
end
end
- describe '#bulk_stick' do
- it_behaves_like 'sticking' do
- let(:ids) { [42, 43] }
- subject { sticking.bulk_stick(:user, ids) }
+ context 'when none of the replicas have caught up' do
+ before do
+ allow(load_balancer).to receive(:select_up_to_date_host).with(last_write_location)
+ .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP)
end
- end
-
- describe '#mark_primary_write_location' do
- it 'updates the write location with the load balancer' do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_write_location)
- .and_return('foo')
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:primary_only?)
- .and_return(false)
+ it 'returns false, does not unstick and calls use_primary!' do
+ expect(redis).not_to receive(:del)
+ expect(::Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary!)
- expect(sticking)
- .to receive(:set_write_location_for)
- .with(:user, 42, 'foo')
-
- sticking.mark_primary_write_location(:user, 42)
+ expect(sticking.find_caught_up_replica(:user, 42)).to eq(false)
end
- it 'does nothing when no replicas are used' do
- expect(sticking).not_to receive(:set_write_location_for)
+ context 'when use_primary_on_failure is false' do
+ it 'does not call use_primary!' do
+ expect(redis).not_to receive(:del)
+ expect(::Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_primary!)
- sticking.mark_primary_write_location(:user, 42)
+ expect(sticking.find_caught_up_replica(:user, 42, use_primary_on_failure: false)).to eq(false)
+ end
end
end
+ end
- describe '#unstick' do
- it 'removes the sticking data from Redis' do
- sticking.set_write_location_for(:user, 4, 'foo')
- sticking.unstick(:user, 4)
+ shared_examples 'sticking' do
+ it 'sticks an entity to the primary', :aggregate_failures do
+ allow(ActiveRecord::Base.load_balancer)
+ .to receive(:primary_only?)
+ .and_return(false)
- expect(sticking.last_write_location_for(:user, 4)).to be_nil
+ ids.each do |id|
+ expect(redis)
+ .to receive(:set)
+ .with("database-load-balancing/write-location/#{load_balancer.name}/user/#{id}", 'the-primary-lsn', ex: 30)
end
- end
- describe '#last_write_location_for' do
- it 'returns the last WAL write location for a user' do
- sticking.set_write_location_for(:user, 4, 'foo')
+ expect(Gitlab::Database::LoadBalancing::Session.current)
+ .to receive(:use_primary!)
- expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
- end
+ subject
end
- describe '#select_caught_up_replicas' do
- let(:lb) { ActiveRecord::Base.load_balancer }
-
- context 'with no write location' do
- before do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:project, 42)
- .and_return(nil)
- end
-
- it 'returns false and does not try to find caught up hosts' do
- expect(lb).not_to receive(:select_up_to_date_host)
- expect(sticking.select_caught_up_replicas(:project, 42)).to be false
- end
- end
-
- context 'with write location' do
- before do
- allow(sticking)
- .to receive(:last_write_location_for)
- .with(:project, 42)
- .and_return('foo')
- end
+ it 'does not update the write location when no replicas are used' do
+ expect(sticking).not_to receive(:set_write_location_for)
- it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
- expect(lb).to receive(:select_up_to_date_host).and_return(true)
- expect(sticking)
- .to receive(:unstick)
- .with(:project, 42)
- expect(sticking.select_caught_up_replicas(:project, 42)).to be true
- end
- end
+ subject
end
end
- context 'with multi-store feature flags turned on' do
- it_behaves_like 'tracking status in redis'
- end
-
- context 'when both multi-store feature flags are off' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_db_load_balancing: false)
- stub_feature_flags(use_primary_store_as_default_for_db_load_balancing: false)
+ describe '#stick' do
+ it_behaves_like 'sticking' do
+ let(:ids) { [42] }
+ subject { sticking.stick(:user, ids.first) }
end
-
- it_behaves_like 'tracking status in redis'
end
- describe '#redis_key_for' do
- it 'returns a String' do
- expect(sticking.redis_key_for(:user, 42))
- .to eq('database-load-balancing/write-location/main/user/42')
+ describe '#bulk_stick' do
+ it_behaves_like 'sticking' do
+ let(:ids) { [42, 43] }
+ subject { sticking.bulk_stick(:user, ids) }
end
end
end
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 0b25389c667..a12e0909dc2 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -107,6 +107,8 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
before do
observe
+ rescue Exception # rubocop:disable Lint/RescueException
+ # ignore (we expect this exception)
end
it 'records a valid observation', :aggregate_failures do
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index 6cac7abb703..2fa4c9e562f 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -14,14 +14,17 @@ RSpec.describe 'cross-database foreign keys' do
'gitlab_subscriptions.hosted_plan_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422012
'group_import_states.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421210
'identities.saml_provider_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422010
- 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
+ 'issues.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154
+ 'issues.closed_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154
+ 'issues.updated_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154
+ 'issue_assignees.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154
'merge_requests.assignee_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
'merge_requests.updated_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
'merge_requests.merge_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
'merge_requests.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080
+ 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
'projects.creator_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
'projects.marked_for_deletion_by_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844
- 'routes.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/420869
'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287
]
end
diff --git a/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb b/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb
new file mode 100644
index 00000000000..ca7b6c8aa98
--- /dev/null
+++ b/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'overridden has_many :through associations', :eager_load, feature_category: :database do
+ let!(:allowed_overrides) do
+ [
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/424851
+ override_class.new(:assignees, 'app/models/concerns/deprecated_assignee.rb'),
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/424852
+ override_class.new(:authorized_projects, 'app/models/user.rb'),
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/424853
+ override_class.new(:project, 'app/models/incident_management/issuable_escalation_status.rb'),
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/424854
+ override_class.new(:remediations, 'ee/app/models/vulnerabilities/finding.rb')
+ ]
+ end
+
+ let!(:override_class) do
+ Struct.new(:method_name, :file_path, :association_class) do
+ def initialize(method_name, file_path, association_class = nil)
+ super(method_name, file_path, association_class)
+ end
+
+ def ==(other)
+ full_source_path, short_path =
+ file_path.length > other.file_path.length ? [file_path, other.file_path] : [other.file_path, file_path]
+ method_name == other.method_name && full_source_path.include?(short_path)
+ end
+
+ def association_type_name
+ if association_class == ActiveRecord::Associations::HasOneThroughAssociation
+ 'has_one through:'
+ else
+ 'has_many through:'
+ end
+ end
+ end
+ end
+
+ let!(:documentation_link) do
+ 'https://docs.gitlab.com/ee/development/gotchas.html#do-not-override-has_many-through-or-has_one-through-associations'
+ end
+
+ it 'onlies have allowed list of overridden has_many/has_one :through associations', :aggregate_failures do
+ overridden_associations.each do |overriden_method|
+ expect(allowed_override?(overriden_method)).to be_truthy,
+ "Found an overridden #{overriden_method.association_type_name} association " \
+ "named `#{overriden_method.method_name}`, in #{overriden_method.file_path}, which isn't allowed. " \
+ "Overriding such associations can have dangerous impacts, see: #{documentation_link}"
+ end
+ end
+
+ private
+
+ def allowed_override?(overriden_method)
+ allowed_overrides.find do |override|
+ override == overriden_method
+ end
+ end
+
+ def overridden_associations
+ ApplicationRecord.descendants.reject(&:abstract_class?).each_with_object([]) do |klass, array|
+ through_reflections = klass.reflect_on_all_associations.select do |assoc|
+ assoc.is_a?(ActiveRecord::Reflection::ThroughReflection)
+ end
+
+ overridden_methods = through_reflections
+ .map { |association| [association.association_class, association.name] }
+ .map { |association_class, method_name| [method_name, source_location(klass, method_name), association_class] }
+ .reject { |_, source_location, _| source_location.include?('activerecord-') }
+
+ array << override_class.new(*overridden_methods.flatten) if overridden_methods.any?
+ end
+ end
+
+ def source_location(klass, method_name)
+ klass.instance_method(method_name).source_location.first
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
index f415e892818..79c2c9e32d2 100644
--- a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
@@ -175,4 +175,30 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
end.not_to raise_error
end
end
+
+ describe 'attributes' do
+ let(:partitioning_key) { :partition }
+ let(:next_partition_if) { -> { true } }
+ let(:detach_partition_if) { -> { false } }
+ let(:analyze_interval) { 1.week }
+
+ subject(:strategy) do
+ described_class.new(
+ model, partitioning_key,
+ next_partition_if: next_partition_if,
+ detach_partition_if: detach_partition_if,
+ analyze_interval: analyze_interval
+ )
+ end
+
+ specify do
+ expect(strategy).to have_attributes({
+ model: model,
+ partitioning_key: partitioning_key,
+ next_partition_if: next_partition_if,
+ detach_partition_if: detach_partition_if,
+ analyze_interval: analyze_interval
+ })
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 50115a6f3dd..3afa338fdf7 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
+RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy, feature_category: :database do
let(:connection) { ActiveRecord::Base.connection }
describe '#current_partitions' do
@@ -273,4 +273,32 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
end
end
end
+
+ describe 'attributes' do
+ let(:partitioning_key) { :partition }
+ let(:retain_non_empty_partitions) { true }
+ let(:retain_for) { 12.months }
+ let(:analyze_interval) { 1.week }
+ let(:model) { class_double(ApplicationRecord, table_name: table_name, connection: connection) }
+ let(:table_name) { :_test_partitioned_test }
+
+ subject(:strategy) do
+ described_class.new(
+ model, partitioning_key,
+ retain_for: retain_for,
+ retain_non_empty_partitions: retain_non_empty_partitions,
+ analyze_interval: analyze_interval
+ )
+ end
+
+ specify do
+ expect(strategy).to have_attributes({
+ model: model,
+ partitioning_key: partitioning_key,
+ retain_for: retain_for,
+ retain_non_empty_partitions: retain_non_empty_partitions,
+ analyze_interval: analyze_interval
+ })
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index eac4a162879..c41228777ca 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
+RSpec.describe Gitlab::Database::Partitioning::PartitionManager, feature_category: :database do
+ include ActiveSupport::Testing::TimeHelpers
include Database::PartitioningHelpers
include ExclusiveLeaseHelpers
@@ -15,7 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:connection) { ActiveRecord::Base.connection }
let(:table) { partitioned_table_name }
let(:partitioning_strategy) do
- double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil)
+ double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil, analyze_interval: nil)
end
let(:partitions) do
@@ -125,7 +126,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:connection) { ActiveRecord::Base.connection }
let(:table) { :_test_foo }
let(:partitioning_strategy) do
- double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil)
+ double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil, analyze_interval: nil)
end
before do
@@ -256,6 +257,154 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
end
+ describe 'analyze partitioned table' do
+ let(:analyze) { true }
+ let(:analyze_table) { partitioned_table_name }
+ let(:analyze_partition) { "#{partitioned_table_name}_1" }
+ let(:analyze_regex) { /ANALYZE VERBOSE "#{analyze_table}"/ }
+ let(:analyze_interval) { 1.week }
+ let(:connection) { my_model.connection }
+ let(:create_partition) { true }
+ let(:my_model) do
+ interval = analyze_interval
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ partitioned_by :partition_id,
+ strategy: :ci_sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false },
+ analyze_interval: interval
+ end
+ end
+
+ shared_examples_for 'run only once analyze within interval' do
+ specify do
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).to include(analyze_regex)
+
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).not_to include(analyze_regex)
+
+ travel_to((analyze_interval * 2).since) do
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).to include(analyze_regex)
+ end
+ end
+ end
+
+ shared_examples_for 'not to run the analyze at all' do
+ specify do
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).not_to include(analyze_regex)
+
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).not_to include(analyze_regex)
+
+ travel_to((analyze_interval * 2).since) do
+ control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) }
+ expect(control.occurrences).not_to include(analyze_regex)
+ end
+ end
+ end
+
+ before do
+ my_model.table_name = partitioned_table_name
+
+ connection.execute(<<~SQL)
+ CREATE TABLE #{analyze_table}(id serial) PARTITION BY LIST (id);
+ SQL
+
+ connection.execute(<<~SQL) if create_partition
+ CREATE TABLE IF NOT EXISTS #{analyze_partition} PARTITION OF #{analyze_table} FOR VALUES IN (1);
+ SQL
+
+ allow(connection).to receive(:select_value).and_return(nil, Time.current, Time.current)
+ end
+
+ context 'when feature flag database_analyze_on_partitioned_tables is enabled' do
+ before do
+ stub_feature_flags(database_analyze_on_partitioned_tables: true)
+ end
+
+ it_behaves_like 'run only once analyze within interval'
+
+ context 'when analyze is false' do
+ let(:analyze) { false }
+
+ it_behaves_like 'not to run the analyze at all'
+ end
+
+ context 'when model does not set analyze_interval' do
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ partitioned_by :partition_id,
+ strategy: :ci_sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false }
+ end
+ end
+
+ it_behaves_like 'not to run the analyze at all'
+ end
+
+ context 'when no partition is created' do
+ let(:create_partition) { false }
+
+ it_behaves_like 'run only once analyze within interval'
+ end
+ end
+
+ context 'when feature flag database_analyze_on_partitioned_tables is disabled' do
+ before do
+ stub_feature_flags(database_analyze_on_partitioned_tables: false)
+ end
+
+ it_behaves_like 'not to run the analyze at all'
+
+ context 'when analyze is false' do
+ let(:analyze) { false }
+
+ it_behaves_like 'not to run the analyze at all'
+ end
+
+ context 'when model does not set analyze_interval' do
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ partitioned_by :partition_id,
+ strategy: :ci_sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false }
+ end
+ end
+
+ it_behaves_like 'not to run the analyze at all'
+ end
+
+ context 'when no partition is created' do
+ let(:create_partition) { false }
+
+ it_behaves_like 'not to run the analyze at all'
+ end
+ end
+ end
+
+ describe 'strategies that support analyze_interval' do
+ [
+ ::Gitlab::Database::Partitioning::MonthlyStrategy,
+ ::Gitlab::Database::Partitioning::SlidingListStrategy,
+ ::Gitlab::Database::Partitioning::CiSlidingListStrategy
+ ].each do |klass|
+ specify "#{klass} supports analyze_interval" do
+ expect(klass).to be_method_defined(:analyze_interval)
+ end
+ end
+ end
+
context 'creating and then detaching partitions for a table' do
let(:connection) { ActiveRecord::Base.connection }
let(:my_model) do
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 5b6967c2d14..ac4d345271e 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -290,4 +290,30 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_cate
expect(partition_3_model.partition).to eq(3)
end
end
+
+ describe 'attributes' do
+ let(:partitioning_key) { :partition }
+ let(:next_partition_if) { -> { puts "next_partition_if" } }
+ let(:detach_partition_if) { -> { puts "detach_partition_if" } }
+ let(:analyze_interval) { 1.week }
+
+ subject(:strategy) do
+ described_class.new(
+ model, partitioning_key,
+ next_partition_if: next_partition_if,
+ detach_partition_if: detach_partition_if,
+ analyze_interval: analyze_interval
+ )
+ end
+
+ specify do
+ expect(strategy).to have_attributes({
+ model: model,
+ partitioning_key: partitioning_key,
+ next_partition_if: next_partition_if,
+ detach_partition_if: detach_partition_if,
+ analyze_interval: analyze_interval
+ })
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index a1ae75ac916..e53e0cb8def 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
let(:main_connection) { ApplicationRecord.connection }
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
around do |example|
previously_registered_models = described_class.registered_models.dup
described_class.instance_variable_set(:@registered_models, Set.new)
@@ -32,7 +36,7 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
describe '.sync_partitions_ignore_db_error' do
it 'calls sync_partitions' do
- expect(described_class).to receive(:sync_partitions)
+ expect(described_class).to receive(:sync_partitions).with(analyze: false)
described_class.sync_partitions_ignore_db_error
end
@@ -100,6 +104,55 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
.and change { find_partitions(table_names.last).size }.from(0)
end
+ context 'for analyze' do
+ let(:analyze_regex) { /ANALYZE VERBOSE / }
+ let(:analyze) { true }
+
+ shared_examples_for 'not running analyze' do
+ specify do
+ control = ActiveRecord::QueryRecorder.new { described_class.sync_partitions(analyze: analyze) }
+ expect(control.occurrences).not_to include(analyze_regex)
+ end
+ end
+
+ context 'when analyze_interval is not set' do
+ it_behaves_like 'not running analyze'
+
+ context 'when analyze is set to false' do
+ it_behaves_like 'not running analyze'
+ end
+ end
+
+ context 'when analyze_interval is set' do
+ let(:models) do
+ [
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = :_test_partitioning_test1
+ partitioned_by :created_at, strategy: :monthly, analyze_interval: 1.week
+ end,
+ Class.new(Gitlab::Database::Partitioning::TableWithoutModel).tap do |klass|
+ klass.table_name = :_test_partitioning_test2
+ klass.partitioned_by(:created_at, strategy: :monthly, analyze_interval: 1.week)
+ klass.limit_connection_names = %i[main]
+ end
+ ]
+ end
+
+ it 'runs analyze' do
+ control = ActiveRecord::QueryRecorder.new { described_class.sync_partitions(models, analyze: analyze) }
+ expect(control.occurrences).to include(analyze_regex)
+ end
+
+ context 'analyze is false' do
+ let(:analyze) { false }
+
+ it_behaves_like 'not running analyze'
+ end
+ end
+ end
+
context 'with multiple databases' do
it 'creates partitions in each database' do
skip_if_shared_database(:ci)
@@ -165,11 +218,11 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
execute_on_each_database("DROP TABLE IF EXISTS #{table_name}")
execute_on_each_database(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial not null,
- created_at timestamptz not null,
- PRIMARY KEY (id, created_at))
- PARTITION BY RANGE (created_at);
+ CREATE TABLE #{table_name} (
+ id serial not null,
+ created_at timestamptz not null,
+ PRIMARY KEY (id, created_at))
+ PARTITION BY RANGE (created_at);
SQL
end
end
@@ -204,6 +257,20 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
described_class.sync_partitions(models)
end
end
+
+ context 'when disallow_database_ddl_feature_flags feature flag is enabled' do
+ before do
+ described_class.register_models(models)
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+ end
+
+ it 'skips sync_partitions' do
+ expect(described_class::PartitionManager).not_to receive(:new)
+ expect(described_class).to receive(:sync_partitions).and_call_original
+
+ described_class.sync_partitions(models)
+ end
+ end
end
describe '.report_metrics' do
@@ -277,6 +344,18 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
end
end
+ context 'when the feature disallow DDL feature flags is enabled' do
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+ end
+
+ it 'does not call the DetachedPartitionDropper' do
+ expect(Gitlab::Database::Partitioning::DetachedPartitionDropper).not_to receive(:new)
+
+ described_class.drop_detached_partitions
+ end
+ end
+
def table_exists?(table_name)
table_oid(table_name).present?
end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 851fc7ea3cd..441f6476abe 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t
include ExclusiveLeaseHelpers
include Database::DatabaseHelpers
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
describe '.invoke' do
let(:databases) { Gitlab::Database.database_base_models_with_gitlab_shared }
let(:databases_count) { databases.count }
@@ -44,6 +48,14 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t
described_class.invoke
end
+
+ it 'does not execute async index creation when disable ddl flag is enabled' do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!)
+
+ described_class.invoke
+ end
end
it 'executes async index destruction prior to any reindexing actions' do
@@ -86,6 +98,14 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t
described_class.invoke
end
+
+ it 'does not execute async index creation when disable ddl flag is enabled' do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:validate_pending_entries!)
+
+ described_class.invoke
+ end
end
end
diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb
index 04bec50088d..e41c7d34378 100644
--- a/spec/lib/gitlab/database/tables_truncate_spec.rb
+++ b/spec/lib/gitlab/database/tables_truncate_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
let(:min_batch_size) { 1 }
let(:main_connection) { ApplicationRecord.connection }
let(:ci_connection) { Ci::ApplicationRecord.connection }
+ let(:logger) { instance_double(Logger) }
# Main Database
let(:main_db_main_item_model) { table("_test_gitlab_main_items", database: "main") }
@@ -32,8 +33,123 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
table("gitlab_partitions_dynamic._test_gitlab_hook_logs_202201", database: "ci")
end
+ before do
+ skip_if_shared_database(:ci)
+
+ # Creating some test tables on the main database
+ main_tables_sql = <<~SQL
+ CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_gitlab_main_references (
+ id serial NOT NULL PRIMARY KEY,
+ item_id BIGINT NOT NULL,
+ CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
+ );
+
+ CREATE TABLE _test_gitlab_hook_logs (
+ id bigserial not null,
+ created_at timestamptz not null,
+ item_id BIGINT NOT NULL,
+ PRIMARY KEY (id, created_at),
+ CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201
+ PARTITION OF _test_gitlab_hook_logs
+ FOR VALUES FROM ('20220101') TO ('20220131');
+
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202
+ PARTITION OF _test_gitlab_hook_logs
+ FOR VALUES FROM ('20220201') TO ('20220228');
+
+ ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201;
+ SQL
+
+ execute_on_each_database(main_tables_sql)
+
+ ci_tables_sql = <<~SQL
+ CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY);
+
+ CREATE TABLE _test_gitlab_ci_references (
+ id serial NOT NULL PRIMARY KEY,
+ item_id BIGINT NOT NULL,
+ CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id)
+ );
+ SQL
+
+ execute_on_each_database(ci_tables_sql)
+
+ internal_tables_sql = <<~SQL
+ CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY);
+ SQL
+
+ execute_on_each_database(internal_tables_sql)
+
+ # Filling the tables
+ 5.times do |i|
+ # Main Database
+ main_db_main_item_model.create!(id: i)
+ main_db_main_reference_model.create!(item_id: i)
+ main_db_ci_item_model.create!(id: i)
+ main_db_ci_reference_model.create!(item_id: i)
+ main_db_shared_item_model.create!(id: i)
+ main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
+ main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
+ # CI Database
+ ci_db_main_item_model.create!(id: i)
+ ci_db_main_reference_model.create!(item_id: i)
+ ci_db_ci_item_model.create!(id: i)
+ ci_db_ci_reference_model.create!(item_id: i)
+ ci_db_shared_item_model.create!(id: i)
+ ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
+ ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
+ end
+
+ Gitlab::Database::SharedModel.using_connection(main_connection) do
+ Postgresql::DetachedPartition.create!(
+ table_name: '_test_gitlab_hook_logs_202201',
+ drop_after: Time.current
+ )
+ end
+
+ Gitlab::Database::SharedModel.using_connection(ci_connection) do
+ Postgresql::DetachedPartition.create!(
+ table_name: '_test_gitlab_hook_logs_202201',
+ drop_after: Time.current
+ )
+ end
+
+ allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return(
+ {
+ "_test_gitlab_main_items" => :gitlab_main,
+ "_test_gitlab_main_references" => :gitlab_main,
+ "_test_gitlab_hook_logs" => :gitlab_main,
+ "_test_gitlab_ci_items" => :gitlab_ci,
+ "_test_gitlab_ci_references" => :gitlab_ci,
+ "_test_gitlab_shared_items" => :gitlab_shared,
+ "_test_gitlab_geo_items" => :gitlab_geo
+ }
+ )
+
+ allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return(
+ {
+ "_test_gitlab_main_items" => :gitlab_main,
+ "_test_gitlab_main_references" => :gitlab_main,
+ "_test_gitlab_hook_logs" => :gitlab_main,
+ "_test_gitlab_ci_items" => :gitlab_ci,
+ "_test_gitlab_ci_references" => :gitlab_ci,
+ "_test_gitlab_shared_items" => :gitlab_shared,
+ "_test_gitlab_geo_items" => :gitlab_geo,
+ "detached_partitions" => :gitlab_shared,
+ "postgres_foreign_keys" => :gitlab_shared,
+ "postgres_partitions" => :gitlab_shared
+ }
+ )
+
+ allow(logger).to receive(:info).with(any_args)
+ end
+
shared_examples 'truncating legacy tables on a database' do
- let(:logger) { instance_double(Logger) }
let(:dry_run) { false }
let(:until_table) { nil }
@@ -47,122 +163,6 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
).execute
end
- before do
- skip_if_shared_database(:ci)
-
- # Creating some test tables on the main database
- main_tables_sql = <<~SQL
- CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY);
-
- CREATE TABLE _test_gitlab_main_references (
- id serial NOT NULL PRIMARY KEY,
- item_id BIGINT NOT NULL,
- CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
- );
-
- CREATE TABLE _test_gitlab_hook_logs (
- id bigserial not null,
- created_at timestamptz not null,
- item_id BIGINT NOT NULL,
- PRIMARY KEY (id, created_at),
- CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
- ) PARTITION BY RANGE(created_at);
-
- CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201
- PARTITION OF _test_gitlab_hook_logs
- FOR VALUES FROM ('20220101') TO ('20220131');
-
- CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202
- PARTITION OF _test_gitlab_hook_logs
- FOR VALUES FROM ('20220201') TO ('20220228');
-
- ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201;
- SQL
-
- execute_on_each_database(main_tables_sql)
-
- ci_tables_sql = <<~SQL
- CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY);
-
- CREATE TABLE _test_gitlab_ci_references (
- id serial NOT NULL PRIMARY KEY,
- item_id BIGINT NOT NULL,
- CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id)
- );
- SQL
-
- execute_on_each_database(ci_tables_sql)
-
- internal_tables_sql = <<~SQL
- CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY);
- SQL
-
- execute_on_each_database(internal_tables_sql)
-
- # Filling the tables
- 5.times do |i|
- # Main Database
- main_db_main_item_model.create!(id: i)
- main_db_main_reference_model.create!(item_id: i)
- main_db_ci_item_model.create!(id: i)
- main_db_ci_reference_model.create!(item_id: i)
- main_db_shared_item_model.create!(id: i)
- main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
- main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
- # CI Database
- ci_db_main_item_model.create!(id: i)
- ci_db_main_reference_model.create!(item_id: i)
- ci_db_ci_item_model.create!(id: i)
- ci_db_ci_reference_model.create!(item_id: i)
- ci_db_shared_item_model.create!(id: i)
- ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
- ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
- end
-
- Gitlab::Database::SharedModel.using_connection(main_connection) do
- Postgresql::DetachedPartition.create!(
- table_name: '_test_gitlab_hook_logs_202201',
- drop_after: Time.current
- )
- end
-
- Gitlab::Database::SharedModel.using_connection(ci_connection) do
- Postgresql::DetachedPartition.create!(
- table_name: '_test_gitlab_hook_logs_202201',
- drop_after: Time.current
- )
- end
-
- allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return(
- {
- "_test_gitlab_main_items" => :gitlab_main,
- "_test_gitlab_main_references" => :gitlab_main,
- "_test_gitlab_hook_logs" => :gitlab_main,
- "_test_gitlab_ci_items" => :gitlab_ci,
- "_test_gitlab_ci_references" => :gitlab_ci,
- "_test_gitlab_shared_items" => :gitlab_shared,
- "_test_gitlab_geo_items" => :gitlab_geo
- }
- )
-
- allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return(
- {
- "_test_gitlab_main_items" => :gitlab_main,
- "_test_gitlab_main_references" => :gitlab_main,
- "_test_gitlab_hook_logs" => :gitlab_main,
- "_test_gitlab_ci_items" => :gitlab_ci,
- "_test_gitlab_ci_references" => :gitlab_ci,
- "_test_gitlab_shared_items" => :gitlab_shared,
- "_test_gitlab_geo_items" => :gitlab_geo,
- "detached_partitions" => :gitlab_shared,
- "postgres_foreign_keys" => :gitlab_shared,
- "postgres_partitions" => :gitlab_shared
- }
- )
-
- allow(logger).to receive(:info).with(any_args)
- end
-
context 'when the truncated tables are not locked for writes' do
it 'raises an error that the tables are not locked for writes' do
error_message = /is not locked for writes. Run the rake task gitlab:db:lock_writes first/
@@ -348,6 +348,50 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
end
end
+ describe '#needs_truncation?' do
+ let(:database_name) { 'ci' }
+
+ subject { described_class.new(database_name: database_name).needs_truncation? }
+
+ context 'when running in a single database mode' do
+ before do
+ skip_if_multiple_databases_are_setup(:ci)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when running in a multiple database mode' do
+ before do
+ skip_if_shared_database(:ci)
+ end
+
+ context 'with main data in ci database' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with no main data in ci datatabase' do
+ before do
+ # Remove 'main' data in ci database
+ ci_connection.truncate_tables([:_test_gitlab_main_items, :_test_gitlab_main_references])
+ end
+
+ it { is_expected.to eq(false) }
+
+ it 'supresses some QueryAnalyzers' do
+ expect(
+ Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection
+ ).to receive(:with_suppressed).and_call_original
+ expect(
+ Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer
+ ).to receive(:with_suppressed).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+
def geo_configured?
!!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo')
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 0d8fa4dad6d..6dd7d29ab42 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -198,59 +198,6 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
end
- describe '.check_postgres_version_and_print_warning' do
- let(:reflect) { instance_spy(Gitlab::Database::Reflection) }
-
- subject { described_class.check_postgres_version_and_print_warning }
-
- before do
- allow(Gitlab::Database::Reflection)
- .to receive(:new)
- .and_return(reflect)
- end
-
- it 'prints a warning if not compliant with minimum postgres version' do
- allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false)
-
- expect(Kernel)
- .to receive(:warn)
- .with(/You are using PostgreSQL/)
- .exactly(described_class.database_base_models.length)
- .times
-
- subject
- end
-
- it 'doesnt print a warning if compliant with minimum postgres version' do
- allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(true)
-
- expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
-
- subject
- end
-
- it 'doesnt print a warning in Rails runner environment' do
- allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false)
- allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true)
-
- expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
-
- subject
- end
-
- it 'ignores ActiveRecord errors' do
- allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError)
-
- expect { subject }.not_to raise_error
- end
-
- it 'ignores Postgres errors' do
- allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error)
-
- expect { subject }.not_to raise_error
- end
- end
-
describe '.db_config_for_connection' do
context 'when the regular connection is used' do
it 'returns db_config' do
diff --git a/spec/lib/gitlab/database_warnings_spec.rb b/spec/lib/gitlab/database_warnings_spec.rb
new file mode 100644
index 00000000000..6658190b94c
--- /dev/null
+++ b/spec/lib/gitlab/database_warnings_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DatabaseWarnings, feature_category: :database do
+ describe '.check_postgres_version_and_print_warning' do
+ let(:reflect) { instance_spy(Gitlab::Database::Reflection) }
+
+ subject { described_class.check_postgres_version_and_print_warning }
+
+ before do
+ allow(Gitlab::Database::Reflection)
+ .to receive(:new)
+ .and_return(reflect)
+ end
+
+ it 'prints a warning if not compliant with minimum postgres version' do
+ allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false)
+
+ expect(Kernel)
+ .to receive(:warn)
+ .with(/You are using PostgreSQL/)
+ .exactly(Gitlab::Database.database_base_models.length)
+ .times
+
+ subject
+ end
+
+ it 'does not print a warning if compliant with minimum postgres version' do
+ allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(true)
+
+ expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
+
+ subject
+ end
+
+ it 'does not print a warning in Rails runner environment' do
+ allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true)
+
+ expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/)
+
+ subject
+ end
+
+ it 'ignores ActiveRecord errors' do
+ allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError)
+
+ expect { subject }.not_to raise_error
+ end
+
+ it 'ignores Postgres errors' do
+ allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error)
+
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ describe '.check_single_connection_and_print_warning' do
+ subject { described_class.check_single_connection_and_print_warning }
+
+ it 'prints a warning if single connection' do
+ allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_SINGLE_DATABASE)
+
+ expect(Kernel).to receive(:warn).with(/Your database has a single connection/)
+
+ subject
+ end
+
+ it 'does not print a warning if single ci connection' do
+ allow(Gitlab::Database).to receive(:database_mode)
+ .and_return(Gitlab::Database::MODE_SINGLE_DATABASE_CI_CONNECTION)
+
+ expect(Kernel).not_to receive(:warn)
+
+ subject
+ end
+
+ it 'does not print a warning if multiple connection' do
+ allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_MULTIPLE_DATABASES)
+
+ expect(Kernel).not_to receive(:warn)
+
+ subject
+ end
+
+ it 'does not print a warning in Rails runner environment' do
+ allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_SINGLE_DATABASE)
+ allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true)
+
+ expect(Kernel).not_to receive(:warn)
+
+ subject
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index e3b0e90bff9..c7b69f39951 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
context 'when the issue is a Service Desk issue' do
- let(:original_recipient) { User.support_bot }
+ let(:original_recipient) { Users::Internal.support_bot }
it 'does not raise a UserNotFoundError' do
expect { receiver.execute }.not_to raise_error
@@ -209,7 +209,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
context 'when note is authored from external author for service desk' do
before do
- SentNotification.find_by(reply_key: mail_key).update!(recipient: User.support_bot)
+ SentNotification.find_by(reply_key: mail_key).update!(recipient: Users::Internal.support_bot)
end
context 'when email contains text, quoted text and quick commands' do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 98522c53a47..6941ebd2e11 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
new_issue = Issue.last
- expect(new_issue.author).to eql(User.support_bot)
+ expect(new_issue.author).to eql(Users::Internal.support_bot)
expect(new_issue.confidential?).to be true
expect(new_issue.all_references.all).to be_empty
expect(new_issue.title).to eq("The message subject! @all")
@@ -131,7 +131,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
expect(notes.count).to eq(1)
expect(new_note.note).to eq("Service desk reply!\n\n`/label ~label2`")
- expect(new_note.author).to eql(User.support_bot)
+ expect(new_note.author).to eql(Users::Internal.support_bot)
end
it 'does not send thank you email' do
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
issue = Issue.last
expect(issue.description).to include('Text from service_desk2 template')
expect(issue.label_ids).to include(label.id)
- expect(issue.author_id).to eq(User.support_bot.id)
+ expect(issue.author_id).to eq(Users::Internal.support_bot.id)
expect(issue.milestone).to eq(milestone)
end
end
@@ -294,7 +294,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
note = Note.last
expect(note.note).to include("WARNING: The template file unknown.md used for service desk issues is empty or could not be found.")
- expect(note.author).to eq(User.support_bot)
+ expect(note.author).to eq(Users::Internal.support_bot)
end
it 'does not send warning note email' do
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
deleted file mode 100644
index 7a09feb5b64..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::AdminVerify do
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- let(:series) { 0 }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe 'public methods' do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to eq 'Create a custom CI runner with just a few clicks'
- expect(message.tagline).to be_nil
- expect(message.title).to eq 'Spin up an autoscaling runner in GitLab'
- expect(message.subtitle).to eq 'Use our AWS cloudformation template to spin up your runners in just a few clicks!'
- expect(message.body_line1).to be_empty
- expect(message.body_line2).to be_empty
- expect(message.cta_text).to eq 'Create a custom runner'
- expect(message.logo_path).to eq 'mailers/in_product_marketing/admin_verify-0.png'
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to eq('This is email 1 of 1 in the Admin series.') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include('This is email 1 of 1 in the Admin series', Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
deleted file mode 100644
index ab6b1cd6171..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- let(:series) { 0 }
- let(:test_class) { Gitlab::Email::Message::InProductMarketing::Create }
-
- describe 'initialize' do
- subject { test_class.new(group: group, user: user, series: series) }
-
- context 'when series does not exist' do
- let(:series) { 3 }
-
- it 'raises error' do
- expect { subject }.to raise_error(ArgumentError)
- end
- end
-
- context 'when series exists' do
- let(:series) { 0 }
-
- it 'does not raise error' do
- expect { subject }.not_to raise_error
- end
- end
- end
-
- describe '#logo_path' do
- subject { test_class.new(group: group, user: user, series: series).logo_path }
-
- it { is_expected.to eq('mailers/in_product_marketing/create-0.png') }
- end
-
- describe '#unsubscribe' do
- subject { test_class.new(group: group, user: user, series: series).unsubscribe }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to include('%tag_unsubscribe_url%') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include(Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
-
- describe '#cta_link' do
- subject(:cta_link) { test_class.new(group: group, user: user, series: series).cta_link }
-
- it 'renders link' do
- expect(CGI.unescapeHTML(cta_link)).to include(Gitlab::Routing.url_helpers.group_email_campaigns_url(group, track: :create, series: series))
- end
- end
-
- describe '#progress' do
- subject { test_class.new(group: group, user: user, series: series).progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to include('This is email 1 of 3 in the Create series') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include('This is email 1 of 3 in the Create series', Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
-
- describe '#series?' do
- using RSpec::Parameterized::TableSyntax
-
- subject do
- test_class = "Gitlab::Email::Message::InProductMarketing::#{track.to_s.classify}".constantize
- test_class.new(group: group, user: user, series: series).series?
- end
-
- where(:track, :result) do
- :create | true
- :team_short | true
- :trial_short | true
- :admin_verify | true
- :verify | true
- :trial | true
- :team | true
- end
-
- with_them do
- it { is_expected.to eq result }
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
deleted file mode 100644
index d5aec280ea6..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe "public methods" do
- where(series: [0, 1, 2])
-
- with_them do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_present
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
deleted file mode 100644
index 3ac2076bf35..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::TeamShort do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- let(:series) { 0 }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe 'public methods' do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to eq 'Team up in GitLab for greater efficiency'
- expect(message.tagline).to be_nil
- expect(message.title).to eq 'Turn coworkers into collaborators'
- expect(message.subtitle).to eq 'Invite your team today to build better code (and processes) together'
- expect(message.body_line1).to be_empty
- expect(message.body_line2).to be_empty
- expect(message.cta_text).to eq 'Invite your colleagues today'
- expect(message.logo_path).to eq 'mailers/in_product_marketing/team-0.png'
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to include('This is email 1 of 4 in the Team series') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include('This is email 1 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
deleted file mode 100644
index 3354b2ed5cf..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe "public methods" do
- where(series: [0, 1])
-
- with_them do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_present
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to include("This is email #{series + 2} of 4 in the Team series") }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include("This is email #{series + 2} of 4 in the Team series", Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
-
- context 'with series 2' do
- let(:series) { 2 }
-
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_nil
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_present
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to include('This is email 4 of 4 in the Team series') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include('This is email 4 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
deleted file mode 100644
index cf0a119ea80..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::TrialShort do
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- let(:series) { 0 }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe 'public methods' do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to eq 'Be a DevOps hero'
- expect(message.tagline).to be_nil
- expect(message.title).to eq 'Expand your DevOps journey with a free GitLab trial'
- expect(message.subtitle).to eq 'Start your trial today to experience single application success and discover all the features of GitLab Ultimate for free!'
- expect(message.body_line1).to be_empty
- expect(message.body_line2).to be_empty
- expect(message.cta_text).to eq 'Start a trial'
- expect(message.logo_path).to eq 'mailers/in_product_marketing/trial-0.png'
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to eq('This is email 1 of 4 in the Trial series.') }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include('This is email 1 of 4 in the Trial series', Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
deleted file mode 100644
index 7f86c9a6c6f..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe "public methods" do
- where(series: [0, 1, 2])
-
- with_them do
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_present
- end
-
- describe '#progress' do
- subject { message.progress }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
- end
-
- context 'on gitlab.com' do
- let(:is_gitlab_com) { true }
-
- it { is_expected.to eq("This is email #{series + 2} of 4 in the Trial series.") }
- end
-
- context 'not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- it { is_expected.to include("This is email #{series + 2} of 4 in the Trial series", Gitlab::Routing.url_helpers.profile_notifications_url) }
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
deleted file mode 100644
index 7e6f62289d2..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do
- let_it_be(:group) { build(:group) }
- let_it_be(:user) { build(:user) }
-
- subject(:message) { described_class.new(group: group, user: user, series: series) }
-
- describe "public methods" do
- context 'with series 0' do
- let(:series) { 0 }
-
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_nil
- expect(message.cta_text).to be_present
- end
- end
-
- context 'with series 1' do
- let(:series) { 1 }
-
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_present
- expect(message.cta_text).to be_present
- end
- end
-
- context 'with series 2' do
- let(:series) { 2 }
-
- it 'returns value for series', :aggregate_failures do
- expect(message.subject_line).to be_present
- expect(message.tagline).to be_present
- expect(message.title).to be_present
- expect(message.subtitle).to be_present
- expect(message.body_line1).to be_present
- expect(message.body_line2).to be_nil
- expect(message.cta_text).to be_present
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
deleted file mode 100644
index 1c59d9c8208..00000000000
--- a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Email::Message::InProductMarketing do
- describe '.for' do
- using RSpec::Parameterized::TableSyntax
-
- subject { described_class.for(track) }
-
- context 'when track exists' do
- where(:track, :expected_class) do
- :create | described_class::Create
- :team_short | described_class::TeamShort
- :trial_short | described_class::TrialShort
- :admin_verify | described_class::AdminVerify
- :verify | described_class::Verify
- :trial | described_class::Trial
- :team | described_class::Team
- end
-
- with_them do
- it { is_expected.to eq(expected_class) }
- end
- end
-
- context 'when track does not exist' do
- let(:track) { :non_existent }
-
- it 'raises error' do
- expect { subject }.to raise_error(described_class::UnknownTrackError)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
new file mode 100644
index 00000000000..bba1ca1c8be
--- /dev/null
+++ b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :service_desk do
+ let(:reply_key) { 'b7721fc7e8419911a8bea145236a0519' }
+ let(:custom_email) { 'support@example.com' }
+ let(:email_with_reply_key) { 'support+b7721fc7e8419911a8bea145236a0519@example.com' }
+
+ describe '.reply_address' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:reply_address) { described_class.reply_address(nil, nil) }
+
+ it { is_expected.to be nil }
+
+ context 'with reply key' do
+ subject(:reply_address) { described_class.reply_address(nil, reply_key) }
+
+ it { is_expected.to be nil }
+
+ context 'with issue' do
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ subject(:reply_address) { described_class.reply_address(issue, reply_key) }
+
+ it { is_expected.to be nil }
+
+ context 'with service_desk_setting and custom email' do
+ let!(:service_desk_setting) { create(:service_desk_setting, custom_email: custom_email, project: project) }
+
+ it { is_expected.to eq(email_with_reply_key) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index d25511843ff..8df4a9fa84a 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
let(:middleware) { described_class.new(app) }
let(:app_status_code) { 200 }
let(:if_none_match) { nil }
- let(:enabled_path) { '/gitlab-org/gitlab-foss/noteable/issue/1/notes' }
- let(:endpoint) { 'issue_notes' }
+ let(:enabled_path) { '/gitlab-org/gitlab-foss/commit/aaaaaaaa/pipelines.json' }
+ let(:endpoint) { 'commit_pipelines' }
describe '.skip!' do
it 'sets the skip header on the response' do
@@ -124,12 +124,12 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
method: 'GET',
path: enabled_path,
status: status_code,
- request_urgency: :medium,
- target_duration_s: 0.5,
+ request_urgency: :low,
+ target_duration_s: 5,
metadata: a_hash_including(
{
- 'meta.caller_id' => 'Projects::NotesController#index',
- 'meta.feature_category' => 'team_planning'
+ 'meta.caller_id' => 'Projects::CommitController#pipelines',
+ 'meta.feature_category' => 'source_code_management'
}
)
}
@@ -185,8 +185,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
it "pushes expected information in to the context" do
expect(Gitlab::ApplicationContext).to receive(:push).with(
- feature_category: 'team_planning',
- caller_id: 'Projects::NotesController#index',
+ feature_category: 'source_code_management',
+ caller_id: 'Projects::CommitController#pipelines',
remote_ip: '127.0.0.1'
)
diff --git a/spec/lib/gitlab/etag_caching/router/rails_spec.rb b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
index 251f634aac1..de260f43dfb 100644
--- a/spec/lib/gitlab/etag_caching/router/rails_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
@@ -3,20 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::EtagCaching::Router::Rails do
- it 'matches issue notes endpoint' do
- result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes')
-
- expect(result).to be_present
- expect(result.name).to eq 'issue_notes'
- end
-
- it 'matches MR notes endpoint' do
- result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/merge_request/1/notes')
-
- expect(result).to be_present
- expect(result.name).to eq 'merge_request_notes'
- end
-
it 'matches issue title endpoint' do
result = match_route('/my-group/my-project/-/issues/123/realtime_changes')
diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb
index 6188a3fc8b3..117480f2a99 100644
--- a/spec/lib/gitlab/etag_caching/store_spec.rb
+++ b/spec/lib/gitlab/etag_caching/store_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_cache do
let(:store) { described_class.new }
describe '#get' do
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
index bbdfecc897a..04d0706c130 100644
--- a/spec/lib/gitlab/event_store/store_spec.rb
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::EventStore::Store do
+RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
let(:event_klass) { stub_const('TestEvent', Class.new(Gitlab::EventStore::Event)) }
let(:event) { event_klass.new(data: data) }
let(:another_event_klass) { stub_const('TestAnotherEvent', Class.new(Gitlab::EventStore::Event)) }
@@ -222,8 +222,6 @@ RSpec.describe Gitlab::EventStore::Store do
end
end
- let(:event) { event_klass.new(data: data) }
-
it 'dispatches the event to the workers satisfying the condition' do
expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(another_worker).not_to receive(:perform_async)
@@ -232,6 +230,20 @@ RSpec.describe Gitlab::EventStore::Store do
end
end
+ context 'when subscription has delayed dispatching of event' do
+ let(:store) do
+ described_class.new do |s|
+ s.subscribe worker, to: event_klass, delay: 1.minute
+ end
+ end
+
+ it 'dispatches the event to the worker after some time' do
+ expect(worker).to receive(:perform_in).with(1.minute, 'TestEvent', serialized_data)
+
+ store.publish(event)
+ end
+ end
+
context 'when the event does not have any subscribers' do
let(:store) do
described_class.new do |s|
@@ -239,8 +251,6 @@ RSpec.describe Gitlab::EventStore::Store do
end
end
- let(:event) { event_klass.new(data: data) }
-
it 'returns successfully' do
expect { store.publish(event) }.not_to raise_error
end
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index a66f4fea207..cd46e7b3386 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
- subject { described_class.new.for(subject_experiment) }
+ subject { described_class.new(subject_experiment) }
let(:subject_experiment) { experiment('namespaced/stub') }
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index d21ac36bf34..77361b09857 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Blame do
+RSpec.describe Gitlab::Git::Blame, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw }
let(:sha) { TestEnv::BRANCH_SHA['master'] }
@@ -38,6 +38,14 @@ RSpec.describe Gitlab::Git::Blame do
expect(result.size).to eq(range.size)
expect(result.map { |r| r[:line] }).to eq(['', 'This guide details how contribute to GitLab.', ''])
end
+
+ context 'when range is outside of the file content range' do
+ let(:range) { 9999..10000 }
+
+ it 'returns an empty array' do
+ expect(result).to eq([])
+ end
+ end
end
context "ISO-8859 encoding" do
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 6745c700b92..4d78e194da8 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -131,6 +131,31 @@ EOT
expect(diff.diff).to be_utf8
end
end
+
+ context 'using a diff that it too large but collecting all paths' do
+ let(:gitaly_diff) do
+ Gitlab::GitalyClient::Diff.new(
+ from_path: '.gitmodules',
+ to_path: '.gitmodules',
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '0792c58905eff3432b721f8c4a64363d8e28d9ae',
+ to_id: 'efd587ccb47caf5f31fc954edb21f0a713d9ecc3',
+ overflow_marker: true,
+ collapsed: false,
+ too_large: false,
+ patch: ''
+ )
+ end
+
+ let(:diff) { described_class.new(gitaly_diff) }
+
+ it 'is already pruned and collapsed but not too large' do
+ expect(diff.diff).to be_empty
+ expect(diff).not_to be_too_large
+ expect(diff).to be_collapsed
+ end
+ end
end
context 'using a Gitaly::CommitDelta' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index e27b97ea0e6..18a090a00be 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -589,6 +589,37 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
+ describe '#update_refs' do
+ let(:repository) { mutable_repository }
+ let(:sha) { TestEnv::BRANCH_SHA['master'] }
+ let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
+
+ before do
+ repository.write_ref(tmp_ref, sha)
+ end
+
+ it 'updates the ref' do
+ expect do
+ repository.update_refs(
+ [
+ {
+ old_sha: sha,
+ new_sha: Gitlab::Git::BLANK_SHA,
+ reference: tmp_ref
+ }
+ ]
+ )
+ end.to change { repository.ref_exists?(tmp_ref) }
+ .from(true).to(false)
+ end
+
+ it 'does not call gitaly when no refs given' do
+ expect_any_instance_of(Gitlab::GitalyClient::RefService).not_to receive(:update_refs)
+
+ repository.update_refs([])
+ end
+ end
+
describe '#delete_refs' do
let(:repository) { mutable_repository }
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 9ba021e838e..7916481a853 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
- let_it_be(:migration_bot) { User.migration_bot }
+ let_it_be(:migration_bot) { Users::Internal.migration_bot }
let(:repository) { snippet.repository }
let(:actor) { user }
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 9055b284119..bd0341d51bf 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -567,20 +567,58 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
end
end
- describe '#user_cherry_pick' do
+ describe '#user_cherry_pick', :freeze_time do
let(:response_class) { Gitaly::UserCherryPickResponse }
+ let(:sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
+ let(:branch_name) { 'master' }
+ let(:cherry_pick_message) { 'Cherry-pick message' }
+ let(:time) { Time.now.utc }
+
+ let(:branch_update) do
+ Gitaly::OperationBranchUpdate.new(
+ commit_id: sha,
+ repo_created: false,
+ branch_created: false
+ )
+ end
+
+ let(:request) do
+ Gitaly::UserCherryPickRequest.new(
+ repository: repository.gitaly_repository,
+ user: gitaly_user,
+ commit: repository.commit.to_gitaly_commit,
+ branch_name: branch_name,
+ start_branch_name: branch_name,
+ start_repository: repository.gitaly_repository,
+ message: cherry_pick_message,
+ timestamp: Google::Protobuf::Timestamp.new(seconds: time.to_i)
+ )
+ end
+
+ let(:response) { Gitaly::UserCherryPickResponse.new(branch_update: branch_update) }
subject do
client.user_cherry_pick(
user: user,
commit: repository.commit,
- branch_name: 'master',
- message: 'Cherry-pick message',
- start_branch_name: 'master',
+ branch_name: branch_name,
+ message: cherry_pick_message,
+ start_branch_name: branch_name,
start_repository: repository
)
end
+ it 'sends a user_cherry_pick message and returns a BranchUpdate' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_cherry_pick).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate)
+ expect(subject.newrev).to be_present
+ expect(subject.repo_created).to be(false)
+ expect(subject.branch_created).to be(false)
+ end
+
context 'when AccessCheckError is raised' do
let(:raised_error) do
new_detailed_error(
@@ -641,27 +679,68 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
end
end
- describe '#user_revert' do
- let(:response_class) { Gitaly::UserRevertResponse }
+ describe '#user_revert', :freeze_time do
+ let(:sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
+ let(:branch_name) { 'master' }
+ let(:revert_message) { 'revert message' }
+ let(:time) { Time.now.utc }
+
+ let(:branch_update) do
+ Gitaly::OperationBranchUpdate.new(
+ commit_id: sha,
+ repo_created: false,
+ branch_created: false
+ )
+ end
+
+ let(:request) do
+ Gitaly::UserRevertRequest.new(
+ repository: repository.gitaly_repository,
+ user: gitaly_user,
+ commit: repository.commit.to_gitaly_commit,
+ branch_name: branch_name,
+ start_branch_name: branch_name,
+ start_repository: repository.gitaly_repository,
+ message: revert_message,
+ timestamp: Google::Protobuf::Timestamp.new(seconds: time.to_i)
+ )
+ end
+
+ let(:response) { Gitaly::UserRevertResponse.new(branch_update: branch_update) }
subject do
client.user_revert(
user: user,
commit: repository.commit,
- branch_name: 'master',
- message: 'Revert message',
- start_branch_name: 'master',
+ branch_name: branch_name,
+ message: revert_message,
+ start_branch_name: branch_name,
start_repository: repository
)
end
- before do
+ it 'sends a user_revert message and returns a BranchUpdate' do
expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_revert).with(kind_of(Gitaly::UserRevertRequest), kind_of(Hash))
- .and_return(response)
+ .to receive(:user_revert).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate)
+ expect(subject.newrev).to be_present
+ expect(subject.repo_created).to be(false)
+ expect(subject.branch_created).to be(false)
end
- it_behaves_like 'cherry pick and revert errors'
+ context 'when errors are raised' do
+ let(:response_class) { Gitaly::UserRevertResponse }
+
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_revert).with(kind_of(Gitaly::UserRevertRequest), kind_of(Hash))
+ .and_return(response)
+ end
+
+ it_behaves_like 'cherry pick and revert errors'
+ end
end
describe '#rebase' do
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index fe04ad36e9a..ae9276cf90b 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -314,6 +314,116 @@ RSpec.describe Gitlab::GitalyClient::RefService, feature_category: :gitaly do
end
end
+ describe '#update_refs' do
+ let(:old_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
+ let(:new_sha) { Gitlab::Git::EMPTY_TREE_ID }
+ let(:reference) { 'refs/does/not/exist' }
+ let(:expected_param) do
+ Gitaly::UpdateReferencesRequest::Update.new(
+ old_object_id: old_sha,
+ new_object_id: new_sha,
+ reference: reference
+ )
+ end
+
+ let(:ref_list) do
+ [
+ {
+ old_sha: old_sha,
+ new_sha: new_sha,
+ reference: reference
+ }
+ ]
+ end
+
+ subject(:update_refs) { client.update_refs(ref_list: ref_list) }
+
+ it 'sends a update_refs message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:update_references)
+ .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash))
+ .and_return(double('update_refs_response', git_error: ""))
+
+ update_refs
+ end
+
+ context 'with a generic BadStatus error' do
+ let(:generic_error) do
+ GRPC::BadStatus.new(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ "error message"
+ )
+ end
+
+ it 'raises the BadStatus error' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:update_references)
+ .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash))
+ .and_raise(generic_error)
+
+ expect { update_refs }.to raise_error(GRPC::BadStatus)
+ end
+ end
+
+ context 'with a reference state mismatch error' do
+ let(:reference_state_mismatch_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ "error message",
+ Gitaly::UpdateReferencesError.new(reference_state_mismatch: Gitaly::ReferenceStateMismatchError.new))
+ end
+
+ it 'raises ReferencesLockedError' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:update_references)
+ .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash))
+ .and_raise(reference_state_mismatch_error)
+
+ expect { update_refs }.to raise_error(Gitlab::Git::ReferenceStateMismatchError)
+ end
+ end
+
+ context 'with a references locked error' do
+ let(:references_locked_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ "error message",
+ Gitaly::UpdateReferencesError.new(references_locked: Gitaly::ReferencesLockedError.new))
+ end
+
+ it 'raises ReferencesLockedError' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:update_references)
+ .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash))
+ .and_raise(references_locked_error)
+
+ expect { update_refs }.to raise_error(Gitlab::Git::ReferencesLockedError)
+ end
+ end
+
+ context 'with a invalid format error' do
+ let(:invalid_refs) { ['\invali.\d/1', '\.invali/d/2'] }
+ let(:invalid_reference_format_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ "error message",
+ Gitaly::UpdateReferencesError.new(invalid_format: Gitaly::InvalidRefFormatError.new(refs: invalid_refs)))
+ end
+
+ it 'raises InvalidRefFormatError' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:update_references)
+ .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash))
+ .and_raise(invalid_reference_format_error)
+
+ expect { update_refs }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::InvalidRefFormatError)
+ expect(error.message).to eq("references have an invalid format: #{invalid_refs.join(",")}")
+ end
+ end
+ end
+ end
+
describe '#delete_refs' do
let(:prefixes) { %w(refs/heads refs/keep-around) }
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index d8ae7d70bb2..8e0e4525729 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -319,19 +319,8 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
end
end
- describe '#create_from_snapshot' do
- it 'sends a create_repository_from_snapshot message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:create_repository_from_snapshot)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double)
-
- client.create_from_snapshot('http://example.com?wiki=1', 'Custom xyz')
- end
- end
-
describe '#raw_changes_between' do
- it 'sends a create_repository_from_snapshot message' do
+ it 'sends a get_raw_changes message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:get_raw_changes)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
diff --git a/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
index 42153a9a3d8..49b4f90cdf9 100644
--- a/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
end.new
end
+ let_it_be(:group) { create(:group) }
+
describe '#user_actor' do
context 'when user is not available in ApplicationContext' do
it 'returns nil' do
@@ -40,7 +42,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
describe '#repository, #project_actor, #group_actor' do
context 'when normal project repository' do
- let_it_be(:project) { create(:project, group: create(:group)) }
+ let_it_be(:project) { create(:project, group: group) }
let(:expected_project) { project }
let(:expected_group) { Feature::Gitaly::ActorWrapper.new(::Group, project.group.id) }
@@ -58,7 +60,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
end
context 'when project wiki repository' do
- let_it_be(:project) { create(:project, :wiki_repo, group: create(:group)) }
+ let_it_be(:project) { create(:project, :wiki_repo, group: group) }
let(:expected_project) { nil }
let(:expected_group) { nil }
@@ -112,7 +114,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
end
context 'when project snippet' do
- let_it_be(:project) { create(:project, group: create(:group)) }
+ let_it_be(:project) { create(:project, group: group) }
let(:snippet) { create(:project_snippet, project: project) }
let(:expected_project) { nil }
let(:expected_group) { nil }
@@ -131,23 +133,20 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
end
context 'when project design' do
- let_it_be(:design_repo) do
- create(:design_management_repository, project: create(:project, group: create(:group)))
- end
-
- let(:expected_project) { design_repo.project }
- let(:expected_group) { design_repo.project.group }
+ let_it_be(:project) { create(:project_with_design, group: group) }
+ let(:expected_project) { project }
+ let(:expected_group) { group }
it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
- let(:repository) { design_repo.repository }
+ let(:repository) { project.design_repository }
end
it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
- let(:repository) { design_repo.repository.raw }
+ let(:repository) { project.design_repository.raw }
end
it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
- let(:repository) { raw_repo_without_container(design_repo.repository) }
+ let(:repository) { raw_repo_without_container(project.design_repository) }
end
end
end
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index 086aa4be17e..72d8a9c0403 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -93,6 +93,57 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
expect(File.basename(file)).to eq('av.png')
end
end
+
+ context 'when attachment is behind a redirect' do
+ let_it_be(:file_url) { "https://github.com/test/project/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
+ let(:redirect_url) { "https://https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.jpg" }
+ let(:sample_response) do
+ instance_double(HTTParty::Response, redirection?: true, headers: { location: redirect_url })
+ end
+
+ it 'gets redirection url' do
+ expect(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, { follow_redirects: false })
+ .and_return sample_response
+
+ expect(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, redirect_url, stream_body: true).and_yield(chunk_double)
+
+ file = downloader.perform
+
+ expect(File.exist?(file.path)).to eq(true)
+ end
+
+ context 'when url is not a redirection' do
+ let(:sample_response) do
+ instance_double(HTTParty::Response, code: 200, redirection?: false)
+ end
+
+ before do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, { follow_redirects: false })
+ .and_return sample_response
+ end
+
+ it 'raises upon unsuccessful redirection' do
+ expect { downloader.perform }.to raise_error("expected a redirect response, got #{sample_response.code}")
+ end
+ end
+
+ context 'when redirection url is not supported' do
+ let(:redirect_url) { "https://https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.idk" }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, { follow_redirects: false })
+ .and_return sample_response
+ end
+
+ it 'raises UnsupportedAttachmentError on unsupported extension' do
+ expect { downloader.perform }.to raise_error(described_class::UnsupportedAttachmentError)
+ end
+ end
+ end
end
describe '#delete' do
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index c9f7fd4f748..4b0d61e3188 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -20,11 +20,29 @@ RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do
end
describe '#user' do
+ let(:status_code) { 200 }
+ let(:body) { { id: 1 } }
+ let(:headers) { { 'Content-Type' => 'application/json' } }
+
+ before do
+ stub_request(:get, 'https://api.github.com/users/foo')
+ .to_return(status: status_code, body: body.to_json, headers: headers)
+ end
+
+ subject(:user) { client.user('foo') }
+
it 'returns the details for the given username' do
- expect(client.octokit).to receive(:user).with('foo')
expect(client).to receive(:with_rate_limit).and_yield
+ expect(user).to eq({ id: 1 })
+ end
+
+ context 'when a not modified response is returned' do
+ let(:status_code) { 304 }
- client.user('foo')
+ it 'returns nil' do
+ expect(client).to receive(:with_rate_limit).and_yield
+ expect(user).to eq(nil)
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb
index 450ebe9a719..b9829c09cfd 100644
--- a/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb
@@ -53,6 +53,19 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteAttachmentsImporter, feature_
record.reload
expect(record.description).to include("[link to other project blob file](#{other_project_blob_url})")
end
+
+ context 'with new github image format' do
+ let(:image_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11' }
+ let(:image_tag_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11' }
+
+ it 'changes image attachment links' do
+ importer.execute
+
+ record.reload
+ expect(record.description).to include('![image.jpeg](/uploads/')
+ expect(record.description).to include('<img width="248" alt="tag-image" src="/uploads')
+ end
+ end
end
describe '#execute' do
@@ -60,16 +73,19 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteAttachmentsImporter, feature_
let(:tmp_stub_doc) { Tempfile.create('attachment_download_test.txt') }
let(:tmp_stub_image) { Tempfile.create('image.jpeg') }
let(:tmp_stub_image_tag) { Tempfile.create('image-tag.jpeg') }
+ let(:access_token) { 'exampleGitHubToken' }
+ let(:options) { { headers: { 'Authorization' => "Bearer #{access_token}" } } }
before do
- allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(doc_url)
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(doc_url, options: options)
.and_return(downloader_stub)
- allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_url)
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_url, options: options)
.and_return(downloader_stub)
- allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_tag_url)
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_tag_url, options: options)
.and_return(downloader_stub)
allow(downloader_stub).to receive(:perform).and_return(tmp_stub_doc, tmp_stub_image, tmp_stub_image_tag)
allow(downloader_stub).to receive(:delete).exactly(3).times
+ allow(client).to receive_message_chain(:octokit, :access_token).and_return(access_token)
end
context 'when importing release attachments' do
@@ -118,5 +134,24 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteAttachmentsImporter, feature_
expect(record.note).to include("[link to other project blob file](#{other_project_blob_url})")
end
end
+
+ context "when attachment behind redirection link is unsupported file type" do
+ let(:record) { create(:issue, project: project, description: text) }
+ let(:image_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/123' }
+ let(:image_tag_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/123' }
+
+ before do
+ allow(downloader_stub).to receive(:perform)
+ .and_raise(Gitlab::GithubImport::AttachmentsDownloader::UnsupportedAttachmentError)
+ end
+
+ it "does not replace url" do
+ importer.execute
+
+ record.reload
+ expect(record.description).to include("![image.jpeg](#{image_url}")
+ expect(record.description).to include("<img width=\"248\" alt=\"tag-image\" src=\"#{image_tag_url}\"")
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/merged_by_importer_spec.rb
index 25381594632..8cd96295bbd 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/merged_by_importer_spec.rb
@@ -26,6 +26,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::MergedByImporter,
subject { described_class.new(pull_request, project, client_double) }
+ before do
+ allow(client_double).to receive_message_chain(:octokit, :last_response, :headers).and_return({ etag: nil })
+ end
+
shared_examples 'adds a note referencing the merger user' do
it 'adds a note referencing the merger user' do
expect { subject.execute }
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
index ba14ea603e0..6846c99fb63 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
@@ -17,6 +17,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
)
end
+ before do
+ allow(client_double).to receive_message_chain(:octokit, :last_response, :headers).and_return({ etag: nil })
+ end
+
subject { described_class.new(review, project, client_double) }
shared_examples 'imports a reviewer for the Merge Request' do
diff --git a/spec/lib/gitlab/github_import/markdown/attachment_spec.rb b/spec/lib/gitlab/github_import/markdown/attachment_spec.rb
index 84b0886ebcc..5b9d077aac9 100644
--- a/spec/lib/gitlab/github_import/markdown/attachment_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown/attachment_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Markdown::Attachment, feature_category: :importers do
let(:name) { FFaker::Lorem.word }
let(:url) { FFaker::Internet.uri('https') }
+ let(:import_source) { 'nickname/public-test-repo' }
describe '.from_markdown' do
context "when it's a doc attachment" do
@@ -75,6 +76,17 @@ RSpec.describe Gitlab::GithubImport::Markdown::Attachment, feature_category: :im
it { expect(described_class.from_markdown(markdown_node)).to eq nil }
end
+
+ context 'when image attachment is in the new format' do
+ let(:url) { "https://github.com/#{import_source}/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
+
+ it 'returns instance with attachment info' do
+ attachment = described_class.from_markdown(markdown_node)
+
+ expect(attachment.name).to eq name
+ expect(attachment.url).to eq url
+ end
+ end
end
context "when it's an inline html node" do
@@ -103,7 +115,6 @@ RSpec.describe Gitlab::GithubImport::Markdown::Attachment, feature_category: :im
describe '#part_of_project_blob?' do
let(:attachment) { described_class.new('test', url) }
- let(:import_source) { 'nickname/public-test-repo' }
context 'when url is a part of project blob' do
let(:url) { "https://github.com/#{import_source}/blob/main/example.md" }
@@ -120,7 +131,6 @@ RSpec.describe Gitlab::GithubImport::Markdown::Attachment, feature_category: :im
describe '#doc_belongs_to_project?' do
let(:attachment) { described_class.new('test', url) }
- let(:import_source) { 'nickname/public-test-repo' }
context 'when url relates to this project' do
let(:url) { "https://github.com/#{import_source}/files/9020437/git-cheat-sheet.txt" }
@@ -147,13 +157,19 @@ RSpec.describe Gitlab::GithubImport::Markdown::Attachment, feature_category: :im
context 'when it is a media link' do
let(:url) { 'https://user-images.githubusercontent.com/6833842/0cf366b61ef2.jpeg' }
- it { expect(attachment.media?).to eq true }
+ it { expect(attachment.media?(import_source)).to eq true }
+
+ context 'when it is a new media link' do
+ let(:url) { "https://github.com/#{import_source}/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
+
+ it { expect(attachment.media?(import_source)).to eq true }
+ end
end
context 'when it is not a media link' do
let(:url) { 'https://github.com/nickname/public-test-repo/files/9020437/git-cheat-sheet.txt' }
- it { expect(attachment.media?).to eq false }
+ it { expect(attachment.media?(import_source)).to eq false }
end
end
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
index 92a979eddd2..e41a2cff989 100644
--- a/spec/lib/gitlab/github_import/object_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://github.com/vim/vim.git') }
it 'validates the operation being incremented' do
@@ -38,9 +38,6 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
expect(Gitlab::Metrics)
.not_to receive(:counter)
- expect(Gitlab::Metrics)
- .not_to receive(:counter)
-
described_class.increment(project, :issue, :fetched, value: 0)
described_class.increment(project, :issue, :imported, value: nil)
@@ -73,6 +70,27 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
end
end
+ context 'when import is in progress but cache expired' do
+ before do
+ described_class.increment(project, :issue, :fetched, value: 10)
+ described_class.increment(project, :issue, :imported, value: 8)
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(nil)
+ end
+
+ it 'returns 0 instead of nil so process can complete' do
+ expect(described_class.summary(project)).to eq(
+ {
+ "fetched" => {
+ "issue" => 0
+ },
+ "imported" => {
+ "issue" => 0
+ }
+ }
+ )
+ end
+ end
+
context 'when there are no cached import statistics' do
context 'when project import is in progress' do
it 'includes an empty object counts stats in response' do
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 1739425c294..a394b4eba13 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -37,11 +37,11 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'returns the ID of the ghost user when the object has no user' do
note = { author: nil }
- expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
+ expect(finder.author_id_for(note)).to eq([Users::Internal.ghost.id, true])
end
it 'returns the ID of the ghost user when the given object is nil' do
- expect(finder.author_id_for(nil)).to eq([User.ghost.id, true])
+ expect(finder.author_id_for(nil)).to eq([Users::Internal.ghost.id, true])
end
end
@@ -208,57 +208,254 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
describe '#email_for_github_username' do
let(:email) { 'kittens@example.com' }
+ let(:username) { 'kittens' }
+ let(:user) { {} }
+ let(:etag) { 'etag' }
+ let(:cache_key) { described_class::EMAIL_FOR_USERNAME_CACHE_KEY % username }
+ let(:etag_cache_key) { described_class::USERNAME_ETAG_CACHE_KEY % username }
+ let(:email_fetched_for_project_key) do
+ format(described_class::EMAIL_FETCHED_FOR_PROJECT_CACHE_KEY, project: project.id, username: username)
+ end
- context 'when an Email address is cached' do
- it 'reads the Email address from the cache' do
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:read)
- .and_return(email)
+ subject(:email_for_github_username) { finder.email_for_github_username(username) }
+
+ shared_examples 'returns and caches the email' do
+ it 'returns the email' do
+ expect(email_for_github_username).to eq(email)
+ end
+
+ it 'caches the email and expires the etag and project check caches' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, email).once
+ expect(Gitlab::Cache::Import::Caching).to receive(:expire).with(etag_cache_key, 0).once
+ expect(Gitlab::Cache::Import::Caching).to receive(:expire).with(email_fetched_for_project_key, 0).once
- expect(client).not_to receive(:user)
- expect(finder.email_for_github_username('kittens')).to eq(email)
+ email_for_github_username
+ email_for_github_username
end
end
- context 'when an Email address is not cached' do
- let(:user) { { email: email } }
+ shared_examples 'returns nil and caches a negative lookup' do
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
- it 'retrieves and caches the Email address when an Email address is available' do
- expect(client).to receive(:user).with('kittens').and_return(user).once
+ it 'caches a blank email and marks the project as checked' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:write)
- .with(an_instance_of(String), email, timeout: Gitlab::Cache::Import::Caching::TIMEOUT).and_call_original
+ email_for_github_username
+ email_for_github_username
+ end
+ end
- expect(finder.email_for_github_username('kittens')).to eq(email)
- expect(finder.email_for_github_username('kittens')).to eq(email)
+ shared_examples 'does not change caches' do
+ it 'does not write to any of the caches' do
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(email_fetched_for_project_key, anything)
+
+ email_for_github_username
+ email_for_github_username
end
+ end
- it 'shortens the timeout for Email address in cache when an Email address is private/nil from GitHub' do
- user = { email: nil }
- expect(client).to receive(:user).with('kittens').and_return(user).once
+ shared_examples 'a user resource not found on GitHub' do
+ before do
+ allow(client).to receive(:user).and_raise(::Octokit::NotFound)
+ end
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:write)
- .with(an_instance_of(String), '', timeout: Gitlab::Cache::Import::Caching::SHORTER_TIMEOUT)
- .and_call_original
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
+
+ it 'caches a blank email' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(email_fetched_for_project_key, anything)
+
+ email_for_github_username
+ email_for_github_username
+ end
+ end
+
+ context 'when the email is cached' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, email)
+ end
+
+ it 'returns the email from the cache' do
+ expect(email_for_github_username).to eq(email)
+ end
+
+ it 'does not make a rate-limited API call' do
+ expect(client).not_to receive(:user).with(username, { headers: {} })
+
+ email_for_github_username
+ email_for_github_username
+ end
+ end
+
+ context 'when the email cache is nil' do
+ context 'if the email has not been checked for the project' do
+ context 'if the cached etag is nil' do
+ before do
+ allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return({ etag: etag })
+ end
+
+ it 'makes an API call' do
+ expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once
+
+ email_for_github_username
+ end
+
+ context 'if the response contains an email' do
+ before do
+ allow(client).to receive(:user).and_return({ email: email })
+ end
+
+ it_behaves_like 'returns and caches the email'
+ end
+
+ context 'if the response does not contain an email' do
+ before do
+ allow(client).to receive(:user).and_return({})
+ end
+
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
+
+ it 'caches a blank email and etag and marks the project as checked' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(etag_cache_key, etag).once
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once
- expect(finder.email_for_github_username('kittens')).to be_nil
- expect(finder.email_for_github_username('kittens')).to be_nil
+ email_for_github_username
+ email_for_github_username
+ end
+ end
+ end
+
+ context 'if the cached etag is not nil' do
+ before do
+ Gitlab::Cache::Import::Caching.write(etag_cache_key, etag)
+ end
+
+ it 'makes a non-rate-limited API call' do
+ expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+
+ email_for_github_username
+ end
+
+ context 'if the response contains an email' do
+ before do
+ allow(client).to receive(:user).and_return({ email: email })
+ end
+
+ it_behaves_like 'returns and caches the email'
+ end
+
+ context 'if the response does not contain an email' do
+ before do
+ allow(client).to receive(:user).and_return({})
+ end
+
+ it_behaves_like 'returns nil and caches a negative lookup'
+ end
+
+ context 'if the response is nil' do
+ before do
+ allow(client).to receive(:user).and_return(nil)
+ end
+
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
+
+ it 'marks the project as checked' do
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything)
+ expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once
+
+ email_for_github_username
+ email_for_github_username
+ end
+ end
+ end
+ end
+
+ context 'if the email has been checked for the project' do
+ before do
+ Gitlab::Cache::Import::Caching.write(email_fetched_for_project_key, 1)
+ end
+
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
+
+ it_behaves_like 'does not change caches'
end
- context 'when a username does not exist on GitHub' do
- it 'caches github username inexistence' do
- expect(client)
- .to receive(:user)
- .with('kittens')
- .and_raise(::Octokit::NotFound)
- .once
+ it_behaves_like 'a user resource not found on GitHub'
+ end
+
+ context 'when the email cache is blank' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, '')
+ end
+
+ context 'if the email has not been checked for the project' do
+ context 'if the cached etag is not nil' do
+ before do
+ Gitlab::Cache::Import::Caching.write(etag_cache_key, etag)
+ end
+
+ it 'makes a non-rate-limited API call' do
+ expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+
+ email_for_github_username
+ end
+
+ context 'if the response contains an email' do
+ before do
+ allow(client).to receive(:user).and_return({ email: email })
+ end
+
+ it_behaves_like 'returns and caches the email'
+ end
+
+ context 'if the response does not contain an email' do
+ before do
+ allow(client).to receive(:user).and_return({})
+ end
- expect(finder.email_for_github_username('kittens')).to be_nil
- expect(finder.email_for_github_username('kittens')).to be_nil
+ it_behaves_like 'returns nil and caches a negative lookup'
+ end
+
+ context 'if the response is nil' do
+ before do
+ allow(client).to receive(:user).and_return(nil)
+ end
+
+ it_behaves_like 'returns nil and caches a negative lookup'
+ end
+
+ it_behaves_like 'a user resource not found on GitHub'
end
end
+
+ context 'if the email has been checked for the project' do
+ before do
+ Gitlab::Cache::Import::Caching.write(email_fetched_for_project_key, 1)
+ end
+
+ it 'returns nil' do
+ expect(email_for_github_username).to be_nil
+ end
+
+ it_behaves_like 'does not change caches'
+ end
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 898bc40ec1f..8453f002bc0 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
end
it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do
- expect(described_class.ghost_user_id).to eq(User.ghost.id)
+ expect(described_class.ghost_user_id).to eq(Users::Internal.ghost.id)
end
it 'caches the ghost user ID', :clean_gitlab_redis_cache do
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
end
it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do
- expect(described_class.ghost_user_id).to eq(User.ghost.id)
+ expect(described_class.ghost_user_id).to eq(Users::Internal.ghost.id)
end
it 'caches the ghost user ID', :clean_gitlab_redis_cache do
diff --git a/spec/lib/gitlab/gl_repository/identifier_spec.rb b/spec/lib/gitlab/gl_repository/identifier_spec.rb
index dbdcafea6d6..bf7a21899f0 100644
--- a/spec/lib/gitlab/gl_repository/identifier_spec.rb
+++ b/spec/lib/gitlab/gl_repository/identifier_spec.rb
@@ -68,12 +68,10 @@ RSpec.describe Gitlab::GlRepository::Identifier do
end
describe 'design' do
- let(:design_repository_container) { project.design_repository.container }
-
it_behaves_like 'parsing gl_repository identifier' do
let(:record_id) { project.id }
- let(:identifier) { "design-#{design_repository_container.id}" }
- let(:expected_container) { design_repository_container }
+ let(:identifier) { "design-#{project.find_or_create_design_management_repository.id}" }
+ let(:expected_container) { project.design_management_repository }
let(:expected_type) { Gitlab::GlRepository::DESIGN }
end
end
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 4ff8137dbd4..807f37b96c9 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:personal_snippet_path) { "snippets/#{personal_snippet.id}" }
let(:project_snippet_path) { "#{project.full_path}/snippets/#{project_snippet.id}" }
- let(:expected_repository_resolver) { expected_container }
-
describe Gitlab::GlRepository::PROJECT do
it_behaves_like 'a repo type' do
let(:expected_id) { project.id }
@@ -136,11 +134,10 @@ RSpec.describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::DESIGN do
it_behaves_like 'a repo type' do
let(:expected_repository) { project.design_repository }
- let(:expected_container) { expected_repository.container }
+ let(:expected_container) { project.design_management_repository }
let(:expected_id) { expected_container.id }
let(:expected_identifier) { "design-#{expected_id}" }
let(:expected_suffix) { '.design' }
- let(:expected_repository_resolver) { project }
end
it 'uses the design access checker' do
@@ -167,15 +164,22 @@ RSpec.describe Gitlab::GlRepository::RepoType do
end
describe '.project_for' do
- it 'returns a project' do
- expect(described_class.project_for(project.design_repository.container)).to be_instance_of(Project)
+ it 'returns a project when container is a design_management_repository' do
+ expect(described_class.project_for(project.design_management_repository)).to be_instance_of(Project)
end
end
+ end
- describe '.repository_for' do
- it 'returns a DesignManagement::GitRepository when a project is passed' do
- expect(described_class.repository_for(project)).to be_instance_of(DesignManagement::GitRepository)
- end
+ describe '.repository_for' do
+ subject { Gitlab::GlRepository::DESIGN }
+
+ let(:expected_message) do
+ "Expected container class to be #{subject.container_class} for " \
+ "repo type #{subject.name}, but found #{project.class.name} instead."
+ end
+
+ it 'raises an error when container class does not match given container_class' do
+ expect { subject.repository_for(project) }.to raise_error(Gitlab::GlRepository::ContainerClassMismatchError, expected_message)
end
end
end
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index 7be01507a82..b03edcb31a6 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -4,9 +4,8 @@ require 'spec_helper'
RSpec.describe ::Gitlab::GlRepository do
describe '.parse' do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:snippet) { create(:personal_snippet) }
- let(:design_repository_container) { project.design_repository.container }
it 'parses a project gl_repository' do
expect(described_class.parse("project-#{project.id}")).to eq([project, project, Gitlab::GlRepository::PROJECT])
@@ -21,11 +20,11 @@ RSpec.describe ::Gitlab::GlRepository do
end
it 'parses a design gl_repository' do
- expect(described_class.parse("design-#{design_repository_container.id}")).to eq(
+ expect(described_class.parse("design-#{project.design_management_repository.id}")).to eq(
[
- design_repository_container,
- project,
- Gitlab::GlRepository::DESIGN
+ project.design_management_repository, # container
+ project, # project for container
+ Gitlab::GlRepository::DESIGN # repo type
]
)
end
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 1135cfc22ac..fc722402917 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -58,6 +58,7 @@ RSpec.describe Gitlab::GonHelper do
context 'when sentry is configured' do
let(:clientside_dsn) { 'https://xxx@sentry.example.com/1' }
let(:environment) { 'staging' }
+ let(:sentry_clientside_traces_sample_rate) { 0.5 }
context 'with legacy sentry configuration' do
before do
@@ -77,6 +78,15 @@ RSpec.describe Gitlab::GonHelper do
stub_application_setting(sentry_enabled: true)
stub_application_setting(sentry_clientside_dsn: clientside_dsn)
stub_application_setting(sentry_environment: environment)
+ stub_application_setting(sentry_clientside_traces_sample_rate: sentry_clientside_traces_sample_rate)
+ end
+
+ it 'sets sentry dsn and environment from config' do
+ expect(gon).to receive(:sentry_dsn=).with(clientside_dsn)
+ expect(gon).to receive(:sentry_environment=).with(environment)
+ expect(gon).to receive(:sentry_clientside_traces_sample_rate=).with(sentry_clientside_traces_sample_rate)
+
+ helper.add_gon_variables
end
context 'when enable_new_sentry_clientside_integration is disabled' do
@@ -87,19 +97,8 @@ RSpec.describe Gitlab::GonHelper do
it 'does not set sentry dsn and environment from config' do
expect(gon).not_to receive(:sentry_dsn=).with(clientside_dsn)
expect(gon).not_to receive(:sentry_environment=).with(environment)
-
- helper.add_gon_variables
- end
- end
-
- context 'when enable_new_sentry_clientside_integration is enabled' do
- before do
- stub_feature_flags(enable_new_sentry_clientside_integration: true)
- end
-
- it 'sets sentry dsn and environment from config' do
- expect(gon).to receive(:sentry_dsn=).with(clientside_dsn)
- expect(gon).to receive(:sentry_environment=).with(environment)
+ expect(gon).not_to receive(:sentry_clientside_traces_sample_rate=)
+ .with(sentry_clientside_traces_sample_rate)
helper.add_gon_variables
end
@@ -169,4 +168,67 @@ RSpec.describe Gitlab::GonHelper do
expect(url).to match(/no_avatar.*png$/)
end
end
+
+ describe '#add_browsersdk_tracking' do
+ let(:gon) { double('gon').as_null_object }
+ let(:analytics_url) { 'https://analytics.gitlab.com' }
+ let(:is_gitlab_com) { true }
+
+ before do
+ allow(helper).to receive(:gon).and_return(gon)
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'when environment variables are set' do
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', analytics_url)
+ stub_env('GITLAB_ANALYTICS_ID', 'analytics-id')
+ end
+
+ it 'sets the analytics_url and analytics_id' do
+ expect(gon).to receive(:analytics_url=).with(analytics_url)
+ expect(gon).to receive(:analytics_id=).with('analytics-id')
+
+ helper.add_browsersdk_tracking
+ end
+
+ context 'when Gitlab.com? is false' do
+ let(:is_gitlab_com) { false }
+
+ it "doesn't set the analytics_url and analytics_id" do
+ expect(gon).not_to receive(:analytics_url=)
+ expect(gon).not_to receive(:analytics_id=)
+
+ helper.add_browsersdk_tracking
+ end
+ end
+
+ context 'when feature flag is false' do
+ before do
+ stub_feature_flags(browsersdk_tracking: false)
+ end
+
+ it "doesn't set the analytics_url and analytics_id" do
+ expect(gon).not_to receive(:analytics_url=)
+ expect(gon).not_to receive(:analytics_id=)
+
+ helper.add_browsersdk_tracking
+ end
+ end
+ end
+
+ context 'when environment variables are not set' do
+ before do
+ stub_env('GITLAB_ANALYTICS_URL', nil)
+ stub_env('GITLAB_ANALYTICS_ID', nil)
+ end
+
+ it "doesn't set the analytics_url and analytics_id" do
+ expect(gon).not_to receive(:analytics_url=)
+ expect(gon).not_to receive(:analytics_id=)
+
+ helper.add_browsersdk_tracking
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
index 172872fd7eb..55650b0480e 100644
--- a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
+++ b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecations::Deprecation, feature_category: :
it 'raises an error' do
expect { parsed_deprecation }.to raise_error(ArgumentError,
- '`experiment` and `deprecated` arguments cannot be passed at the same time'
+ '`alpha` and `deprecated` arguments cannot be passed at the same time'
)
end
end
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 071b303d777..314759fb8a4 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -51,6 +51,17 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
include_examples 'search results filtered by archived', 'search_merge_requests_hide_archived_projects'
end
+ describe 'milestones search' do
+ let!(:unarchived_project) { create(:project, :public, group: group) }
+ let!(:archived_project) { create(:project, :public, :archived, group: group) }
+ let!(:unarchived_result) { create(:milestone, project: unarchived_project, title: 'foo') }
+ let!(:archived_result) { create(:milestone, project: archived_project, title: 'foo') }
+ let(:query) { 'foo' }
+ let(:scope) { 'milestones' }
+
+ include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects'
+ end
+
describe '#projects' do
let(:scope) { 'projects' }
let(:query) { 'Test' }
@@ -60,7 +71,7 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') }
let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') }
- it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived'
+ it_behaves_like 'search results filtered by archived'
end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 93d48379414..9d89167bf81 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -35,11 +35,14 @@ RSpec.describe Gitlab::HTTP do
super do |response|
response.instance_eval do
def read_body(*)
- @body.each do |fragment|
+ mock_stream = @body.split(' ')
+ mock_stream.each do |fragment|
sleep 0.002.seconds
yield fragment if block_given?
end
+
+ @body
end
end
@@ -64,8 +67,8 @@ RSpec.describe Gitlab::HTTP do
before do
stub_const("#{described_class}::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
- WebMock.stub_request(:post, /.*/).to_return do |request|
- { body: %w(a b), status: 200 }
+ WebMock.stub_request(:post, /.*/).to_return do
+ { body: "chunk-1 chunk-2", status: 200 }
end
end
diff --git a/spec/lib/gitlab/import/errors_spec.rb b/spec/lib/gitlab/import/errors_spec.rb
index 3b45af0618b..21d96601609 100644
--- a/spec/lib/gitlab/import/errors_spec.rb
+++ b/spec/lib/gitlab/import/errors_spec.rb
@@ -39,6 +39,7 @@ RSpec.describe Gitlab::Import::Errors, feature_category: :importers do
"Noteable can't be blank",
"Author can't be blank",
"Project does not match noteable project",
+ "Namespace can't be blank",
"User can't be blank",
"Name is not a valid emoji name"
)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5bbb95b3ea5..d337a37c69f 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -85,6 +85,7 @@ events:
notes:
- award_emoji
- project
+- namespace
- noteable
- author
- updated_by
@@ -103,6 +104,7 @@ note_metadata:
- note
- email_participant
commit_notes:
+- namespace
- award_emoji
- noteable
- author
@@ -621,6 +623,7 @@ project:
- project_members
- project_repository
- users
+- maintainers
- requesters
- namespace_members
- namespace_requesters
@@ -690,7 +693,6 @@ project:
- pool_repository
- kubernetes_namespaces
- error_tracking_setting
-- metrics_setting
- gitlab_slack_application_integration
- github_integration
- protected_environments
@@ -738,6 +740,7 @@ project:
- project_registry
- packages
- package_files
+- package_protection_rules
- rpm_repository_files
- npm_metadata_caches
- packages_cleanup_policy
@@ -823,6 +826,7 @@ project:
- project_state
- security_policy_bots
- target_branch_rules
+- organization
award_emoji:
- awardable
- user
@@ -872,8 +876,6 @@ suggestions:
- note
diff_note_positions:
- note
-metrics_setting:
-- project
protected_environments:
- project
- group
diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
index 8089b40cae8..08abd7908d2 100644
--- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
@@ -97,7 +97,6 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter, feature_category: :imp
:user | true
:author | false
:ci_cd_settings | true
- :metrics_setting | true
:project_badges | true
:pipeline_schedules | true
:error_tracking_setting | true
diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
index e42a1d0ff8b..13d94fdb6fe 100644
--- a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
@@ -27,8 +27,8 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category
expect { saver.execute }.to change(project.issues, :count).by(1)
end
- context 'when subrelation is present' do
- let(:notes) { build_list(:note, 6, project: project, importing: true) }
+ context 'when subrelation collection is present' do
+ let(:notes) { build_list(:note, 2, project: project, importing: true) }
let(:relation_object) { build(:issue, project: project, notes: notes) }
let(:relation_definition) { { 'notes' => {} } }
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category
saver.execute
issue = project.issues.last
- expect(issue.notes.count).to eq(6)
+ expect(issue.notes.count).to eq(2)
end
end
@@ -58,25 +58,10 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category
end
end
- context 'when subrelation collection count is small' do
- let(:note) { build(:note, project: project, importing: true) }
- let(:relation_object) { build(:issue, project: project, notes: [note]) }
- let(:relation_definition) { { 'notes' => {} } }
-
- it 'saves subrelation as part of the relation object itself' do
- expect(relation_object.notes).not_to receive(:<<)
-
- saver.execute
-
- issue = project.issues.last
- expect(issue.notes.count).to eq(1)
- end
- end
-
context 'when some subrelations are invalid' do
- let(:notes) { build_list(:note, 5, project: project, importing: true) }
+ let(:note) { build(:note, project: project, importing: true) }
let(:invalid_note) { build(:note) }
- let(:relation_object) { build(:issue, project: project, notes: notes + [invalid_note]) }
+ let(:relation_object) { build(:issue, project: project, notes: [note, invalid_note]) }
let(:relation_definition) { { 'notes' => {} } }
it 'saves valid subrelations and logs invalid subrelation' do
@@ -88,7 +73,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category
issue = project.issues.last
expect(invalid_note.persisted?).to eq(false)
- expect(issue.notes.count).to eq(5)
+ expect(issue.notes.count).to eq(1)
end
context 'when invalid subrelation can still be persisted' do
@@ -112,14 +97,14 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category
let(:relation_key) { 'labels' }
let(:relation_definition) { { 'priorities' => {} } }
let(:importable) { create(:group) }
- let(:valid_priorities) { build_list(:label_priority, 5, importing: true) }
+ let(:valid_priorities) { [build(:label_priority, importing: true)] }
let(:invalid_priority) { build(:label_priority, priority: -1) }
let(:relation_object) { build(:group_label, group: importable, title: 'test', priorities: valid_priorities + [invalid_priority]) }
it 'saves relation without invalid subrelations' do
saver.execute
- expect(importable.labels.last.priorities.count).to eq(5)
+ expect(importable.labels.last.priorities.count).to eq(1)
end
end
end
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index 8ed3a60d7fc..76a35d07c7f 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -203,7 +203,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
it 'throws a blocked url error' do
Tempfile.create('test') do |file|
- expect { subject.download(url, file.path) }.to raise_error((Gitlab::HTTP::BlockedUrlError))
+ expect { subject.download(url, file.path) }.to raise_error(Gitlab::HTTP::BlockedUrlError)
end
end
diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
index aceea70be92..04e25dee905 100644
--- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
+++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
@@ -13,13 +13,13 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
FileUtils.rm(filepath)
end
- subject { described_class.new(archive_path: filepath, max_bytes: max_bytes) }
+ subject { described_class.new(archive_path: filepath) }
describe '#valid?' do
- let(:max_bytes) { 1 }
-
context 'when file does not exceed allowed decompressed size' do
- let(:max_bytes) { 20 }
+ before do
+ stub_application_setting(max_decompressed_archive_size: 20)
+ end
it 'returns true' do
expect(subject.valid?).to eq(true)
@@ -35,6 +35,10 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
context 'when file exceeds allowed decompressed size' do
+ before do
+ stub_application_setting(max_decompressed_archive_size: 0.000001)
+ end
+
it 'logs error message returns false' do
expect(Gitlab::Import::Logger)
.to receive(:info)
@@ -93,7 +97,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
context 'when timeout occurs' do
- let(:error_message) { 'Timeout reached during archive decompression' }
+ let(:error_message) { 'Timeout of 210 seconds reached during archive decompression' }
let(:exception) { Timeout::Error }
include_examples 'logs raised exception and terminates validator process group'
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index d449446d7be..ef118d2987c 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -198,8 +198,7 @@ RSpec.describe Gitlab::ImportExport::FileImporter, feature_category: :importers
context 'when validate_import_decompressed_archive_size feature flag is enabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: true)
-
- allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1)
+ stub_application_setting(max_decompressed_archive_size: 0.000001)
end
it 'returns false and sets an error on shared' do
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 1d3fc764b50..09a2417ce1e 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -48,7 +48,6 @@ RSpec.describe 'Test coverage of the Project Import', feature_category: :importe
project.ci_pipelines.notes.events.push_event_payload
project.protected_branches.unprotect_access_levels
project.prometheus_metrics
- project.metrics_setting
project.boards.lists.label.priorities
project.service_desk_setting
project.security_setting
diff --git a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
index 452d63d548e..486d179ae05 100644
--- a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do
+RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter, feature_category: :importers do
include ImportExport::CommonUtil
let(:path) { "#{Dir.tmpdir}/ndjson_writer_spec/tree" }
@@ -35,13 +35,18 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do
end
context "when single relation is already serialized" do
- it "raise exception" do
+ it "appends to the existing file" do
values = [{ "key" => "value_1", "key_1" => "value_1" }, { "key" => "value_2", "key_1" => "value_2" }]
relation = "relation"
file_path = File.join(path, exportable_path, "#{relation}.ndjson")
subject.write_relation(exportable_path, relation, values[0])
- expect { subject.write_relation(exportable_path, relation, values[1]) }.to raise_exception("The #{file_path} already exist")
+ expect { subject.write_relation(exportable_path, relation, values[1]) }.not_to raise_exception
+
+ file_data = File.read(file_path)
+
+ expect(file_data).to include(values[0].to_json)
+ expect(file_data).to include(values[1].to_json)
end
end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index f4c9189030b..e5058e029c8 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -70,8 +70,9 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer, feature_category
create_list(:issue, 3, :with_desc_relative_position, project: exportable ) # ascending ids, descending position
end
- it 'calls json_writer.write_relation_array with proper params' do
+ it 'calls json_writer.write_relation_array with proper params and clears SafeRequestStore' do
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json))
+ expect(Gitlab::SafeRequestStore).to receive(:clear!)
subject.execute
end
diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb
index 95971d08175..0837874526a 100644
--- a/spec/lib/gitlab/import_export/project/export_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout do
+RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout, feature_category: :importers do
let_it_be(:username) { 'root' }
let(:namespace_path) { username }
let_it_be(:user) { create(:user, username: username) }
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 47003707172..c83cfb0e2f5 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -467,7 +467,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
aggregate_failures do
expect(release.tag).to eq('release-1.0')
- expect(release.author_id).to eq(User.select(:id).ghost.id)
+ expect(release.author_id).to eq(Users::Internal.ghost.id)
end
end
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index b243780a020..db23e3b1fd4 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
describe '.importer' do
import_sources = {
'github' => Gitlab::GithubImport::ParallelImporter,
- 'bitbucket' => Gitlab::BitbucketImport::Importer,
+ 'bitbucket' => Gitlab::BitbucketImport::ParallelImporter,
'bitbucket_server' => Gitlab::BitbucketServerImport::ParallelImporter,
'fogbugz' => Gitlab::FogbugzImport::Importer,
'git' => nil,
@@ -72,45 +72,37 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
expect(described_class.importer(name)).to eq(klass)
end
end
-
- context 'when flag is disabled' do
- before do
- stub_feature_flags(bitbucket_server_parallel_importer: false)
- end
-
- it 'returns Gitlab::BitbucketServerImport::Importer when given bitbucket_server' do
- expect(described_class.importer('bitbucket_server')).to eq(Gitlab::BitbucketServerImport::Importer)
- end
- end
end
describe '.import_table' do
subject { described_class.import_table }
- it 'returns the ParallelImporter for Bitbucket server' do
- is_expected.to include(
- described_class::ImportSource.new(
- 'bitbucket_server',
- 'Bitbucket Server',
- Gitlab::BitbucketServerImport::ParallelImporter
- )
- )
- end
-
- context 'when flag is disabled' do
- before do
- stub_feature_flags(bitbucket_server_parallel_importer: false)
- end
-
- it 'returns the legacy Importer for Bitbucket server' do
+ describe 'Bitbucket cloud' do
+ it 'returns the ParallelImporter' do
is_expected.to include(
described_class::ImportSource.new(
- 'bitbucket_server',
- 'Bitbucket Server',
- Gitlab::BitbucketServerImport::Importer
+ 'bitbucket',
+ 'Bitbucket Cloud',
+ Gitlab::BitbucketImport::ParallelImporter
)
)
end
+
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_parallel_importer: false)
+ end
+
+ it 'returns the legacy Importer' do
+ is_expected.to include(
+ described_class::ImportSource.new(
+ 'bitbucket',
+ 'Bitbucket Cloud',
+ Gitlab::BitbucketImport::Importer
+ )
+ )
+ end
+ end
end
end
@@ -134,7 +126,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
end
describe 'imports_repository? checker' do
- let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
+ let(:allowed_importers) { %w[github gitlab_project bitbucket bitbucket_server] }
it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 6271885d80e..4168fdf5425 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -62,6 +62,7 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :request_store, featur
it 'counts successful pipelined requests' do
expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
redis_store_class.with do |redis|
redis.pipelined do |pipeline|
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index af2da8f20c0..b000f55e739 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -4,13 +4,21 @@ require 'spec_helper'
RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do
describe '.notify' do
- it 'pushes the jid to the named queue' do
- key = described_class.new.key
+ let(:key) { described_class.new.key }
+ it 'pushes the jid to the named queue', :freeze_time do
described_class.notify(key, 123)
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.ttl(key)).to be > 0
+ expect(redis.ttl(key)).to eq(described_class::DEFAULT_TTL)
+ end
+ end
+
+ it 'can be passed a custom TTL', :freeze_time do
+ described_class.notify(key, 123, ttl: 5.minutes)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(key)).to eq(5.minutes.to_i)
end
end
end
@@ -23,6 +31,32 @@ RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do
end
end
+ describe '.delete_key' do
+ let(:key) { described_class.generate_key }
+
+ it 'deletes the key' do
+ described_class.notify(key, '1')
+ described_class.delete_key(key)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.llen(key)).to eq(0)
+ end
+ end
+
+ context 'when key is not a JobWaiter key' do
+ let(:key) { 'foo' }
+
+ it 'does not delete the key' do
+ described_class.notify(key, '1')
+ described_class.delete_key(key)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.llen(key)).to eq(1)
+ end
+ end
+ end
+ end
+
describe '#wait' do
let(:waiter) { described_class.new(2) }
diff --git a/spec/lib/gitlab/manifest_import/metadata_spec.rb b/spec/lib/gitlab/manifest_import/metadata_spec.rb
index c55b407088d..011371782fe 100644
--- a/spec/lib/gitlab/manifest_import/metadata_spec.rb
+++ b/spec/lib/gitlab/manifest_import/metadata_spec.rb
@@ -46,16 +46,6 @@ RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_stat
expect(status.repositories).to eq(repositories)
end
-
- it 'reads non-hash-tagged keys if hash-tag keys are missing' do
- status = described_class.new(user)
-
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(repositories_key, Gitlab::Json.dump(repositories))
- end
-
- expect(status.repositories).to eq(repositories)
- end
end
describe '#group_id' do
@@ -73,13 +63,5 @@ RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_stat
expect(status.group_id).to eq(3)
end
-
- it 'reads non-hash-tagged keys if hash-tag keys are missing' do
- status = described_class.new(user)
-
- Gitlab::Redis::SharedState.with { |redis| redis.set(group_id_key, 2) }
-
- expect(status.group_id).to eq(2)
- end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
deleted file mode 100644
index 8c2edc85c35..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/RedundantFetchBlock
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_caching do
- let_it_be(:project1) { build_stubbed(:project) }
- let_it_be(:project2) { build_stubbed(:project) }
-
- let(:project1_key1) { "#{project1.id}_key1" }
- let(:project1_key2) { "#{project1.id}_key2" }
- let(:project2_key1) { "#{project2.id}_key1" }
-
- let(:cache1) { described_class.for(project1) }
- let(:cache2) { described_class.for(project2) }
-
- before do
- cache1.fetch(project1_key1) { 'data1' }
- cache1.fetch(project1_key2) { 'data2' }
- cache2.fetch(project2_key1) { 'data3' }
- end
-
- describe '.fetch' do
- it 'stores data correctly' do
- described_class.fetch('key1') { 'data1' }
- described_class.fetch('key2') { 'data2' }
-
- expect(described_class.fetch('key1')).to eq('data1')
- expect(described_class.fetch('key2')).to eq('data2')
- end
- end
-
- describe '.for' do
- it 'returns a new instance' do
- expect(described_class.for(project1)).to be_instance_of(described_class)
- end
- end
-
- describe '#fetch' do
- it 'stores data correctly' do
- expect(cache1.fetch(project1_key1)).to eq('data1')
- expect(cache1.fetch(project1_key2)).to eq('data2')
- expect(cache2.fetch(project2_key1)).to eq('data3')
- end
- end
-
- describe '#delete_all!' do
- it 'deletes keys of the given project', :aggregate_failures do
- cache1.delete_all!
-
- expect(Rails.cache.exist?(project1_key1)).to be(false)
- expect(Rails.cache.exist?(project1_key2)).to be(false)
- expect(cache2.fetch(project2_key1)).to eq('data3')
-
- cache2.delete_all!
-
- expect(Rails.cache.exist?(project2_key1)).to be(false)
- end
-
- it 'does not fail when nothing to delete' do
- project3 = build_stubbed(:project)
- cache3 = described_class.for(project3)
-
- expect { cache3.delete_all! }.not_to raise_error
- end
- end
-
- context 'multiple fetches and deletes' do
- specify :aggregate_failures do
- cache1.delete_all!
-
- expect(Rails.cache.exist?(project1_key1)).to be(false)
- expect(Rails.cache.exist?(project1_key2)).to be(false)
-
- cache1.fetch("#{project1.id}_key3") { 'data1' }
- cache1.fetch("#{project1.id}_key4") { 'data2' }
-
- expect(cache1.fetch("#{project1.id}_key3")).to eq('data1')
- expect(cache1.fetch("#{project1.id}_key4")).to eq('data2')
-
- cache1.delete_all!
-
- expect(Rails.cache.exist?("#{project1.id}_key3")).to be(false)
- expect(Rails.cache.exist?("#{project1.id}_key4")).to be(false)
- end
- end
-end
-# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
deleted file mode 100644
index 11b587e4905..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Processor do
- include MetricsDashboardHelpers
-
- let(:project) { build(:project) }
- let(:environment) { create(:environment, project: project) }
- let(:dashboard_yml) { load_sample_dashboard }
-
- describe 'process' do
- let(:sequence) do
- [
- Gitlab::Metrics::Dashboard::Stages::UrlValidator
- ]
- end
-
- let(:process_params) { [project, dashboard_yml, sequence, { environment: environment }] }
- let(:dashboard) { described_class.new(*process_params).process }
-
- context 'when the dashboard is not present' do
- let(:dashboard_yml) { nil }
-
- it 'returns nil' do
- expect(dashboard).to be_nil
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb
deleted file mode 100644
index a2c9906c0e9..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::RepoDashboardFinder do
- include MetricsDashboardHelpers
-
- let_it_be(:project) { create(:project) }
-
- describe '.list_dashboards' do
- it 'deletes dashboard cache entries' do
- cache = instance_double(Gitlab::Metrics::Dashboard::Cache)
- allow(Gitlab::Metrics::Dashboard::Cache).to receive(:for).and_return(cache)
-
- expect(cache).to receive(:delete_all!)
-
- described_class.list_dashboards(project)
- end
-
- it 'returns empty array when there are no dashboards' do
- expect(described_class.list_dashboards(project)).to eq([])
- end
-
- context 'when there are project dashboards available' do
- let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let_it_be(:project) { project_with_dashboard(dashboard_path) }
-
- it 'returns the dashboard list' do
- expect(described_class.list_dashboards(project)).to contain_exactly(dashboard_path)
- end
- end
- end
-
- describe '.read_dashboard' do
- it 'raises error when dashboard does not exist' do
- dashboard_path = '.gitlab/dashboards/test.yml'
-
- expect { described_class.read_dashboard(project, dashboard_path) }.to raise_error(
- Gitlab::Metrics::Dashboard::Errors::NOT_FOUND_ERROR
- )
- end
-
- context 'when there are project dashboards available' do
- let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let_it_be(:project) { project_with_dashboard(dashboard_path) }
-
- it 'reads dashboard' do
- expect(described_class.read_dashboard(project, dashboard_path)).to eq(
- fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')
- )
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb
deleted file mode 100644
index 83cf161c4e2..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Stages::UrlValidator do
- let(:project) { build_stubbed(:project) }
-
- describe '#transform!' do
- context 'when the links contain a blocked url' do
- let(:dashboard) do
- {
- dashboard: "Test Dashboard",
- links: [
- { url: "http://1.1.1.1.1" },
- { url: "https://gitlab.com" },
- { url: "http://0.0.0.0" }
- ],
- panel_groups: [
- {
- group: "Group A",
- panels: [
- {
- title: "Super Chart A1",
- type: "area-chart",
- y_label: "y_label",
- metrics: [
- {
- id: "metric_a1",
- query_range: "query",
- unit: "unit",
- label: "Legend Label"
- }
- ],
- links: [
- { url: "http://1.1.1.1.1" },
- { url: "https://gitlab.com" },
- { url: "http://0.0.0.0" }
- ]
- }
- ]
- }
- ]
- }
- end
-
- let(:expected) do
- [{ url: '' }, { url: 'https://gitlab.com' }, { url: 'http://0.0.0.0' }]
- end
-
- let(:transform!) { described_class.new(project, dashboard, nil).transform! }
-
- before do
- stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
- end
-
- context 'dashboard related links' do
- it 'replaces the blocked url with an empty string' do
- transform!
-
- expect(dashboard[:links]).to eq(expected)
- end
- end
-
- context 'chart links' do
- it 'replaces the blocked url with an empty string' do
- transform!
-
- result = dashboard.dig(:panel_groups, 0, :panels, 0, :links)
- expect(result).to eq(expected)
- end
- end
-
- context 'when local requests are not allowed' do
- before do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
- end
-
- let(:expected) do
- [{ url: '' }, { url: 'https://gitlab.com' }, { url: '' }]
- end
-
- it 'replaces the blocked url with an empty string' do
- transform!
-
- expect(dashboard[:links]).to eq(expected)
- end
- end
-
- context 'when the links are an array of strings instead of hashes' do
- before do
- dashboard[:links] = dashboard[:links].map(&:values)
- end
-
- it 'prevents an invalid link definition from erroring out' do
- expect { transform! }.not_to raise_error
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
deleted file mode 100644
index a035cf02da4..00000000000
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Dashboard::Url do
- include Gitlab::Routing.url_helpers
-
- describe '#clusters_regex' do
- let(:url) { Gitlab::Routing.url_helpers.namespace_project_cluster_url(*url_params) }
- let(:url_params) do
- [
- 'foo',
- 'bar',
- '1',
- {
- group: 'Cluster Health',
- title: 'Memory Usage',
- y_label: 'Memory 20(GiB)',
- anchor: 'title'
- }
- ]
- end
-
- let(:expected_params) do
- {
- 'url' => url,
- 'namespace' => 'foo',
- 'project' => 'bar',
- 'cluster_id' => '1',
- 'query' => '?group=Cluster+Health&title=Memory+Usage&y_label=Memory+20%28GiB%29',
- 'anchor' => '#title'
- }
- end
-
- subject { described_class.clusters_regex }
-
- it_behaves_like 'regex which matches url when expected'
-
- context 'for metrics_dashboard route' do
- let(:url) do
- metrics_dashboard_namespace_project_cluster_url(
- *url_params, cluster_type: :project, embedded: true, format: :json
- )
- end
-
- let(:expected_params) do
- {
- 'url' => url,
- 'namespace' => 'foo',
- 'project' => 'bar',
- 'cluster_id' => '1',
- 'query' => '?cluster_type=project&embedded=true',
- 'anchor' => nil
- }
- end
-
- it_behaves_like 'regex which matches url when expected'
- end
- end
-
- describe '#alert_regex' do
- let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params) }
- let(:url_params) do
- [
- 'foo',
- 'bar',
- '1',
- {
- start: '2020-02-10T12:59:49.938Z',
- end: '2020-02-10T20:59:49.938Z',
- anchor: "anchor"
- }
- ]
- end
-
- let(:expected_params) do
- {
- 'url' => url,
- 'namespace' => 'foo',
- 'project' => 'bar',
- 'alert' => '1',
- 'query' => "?end=2020-02-10T20%3A59%3A49.938Z&start=2020-02-10T12%3A59%3A49.938Z",
- 'anchor' => '#anchor'
- }
- end
-
- subject { described_class.alert_regex }
-
- it_behaves_like 'regex which matches url when expected'
-
- it_behaves_like 'regex which matches url when expected' do
- let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params, format: :json) }
-
- let(:expected_params) do
- {
- 'url' => url,
- 'namespace' => 'foo',
- 'project' => 'bar',
- 'alert' => '1',
- 'query' => nil,
- 'anchor' => nil
- }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index 57790ad78a8..85e8b366f29 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -8,6 +8,32 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER'
describe '#sample' do
+ let(:main_load_balancer) do
+ double(:main_load_balancer, host_list: main_host_list, configuration: main_configuration, primary_only?: false)
+ end
+
+ let(:main_configuration) { double(:configuration, connection_specification_name: 'ActiveRecord::Base') }
+ let(:main_host_list) { double(:host_list, hosts: [main_replica_host]) }
+ let(:main_replica_host) { double(:host, pool: main_replica_pool, host: 'main-replica-host', port: 2345) }
+ let(:main_replica_pool) do
+ double(:main_replica_pool, db_config: double(:main_replica_db_config, name: 'main_replica'), stat: stats)
+ end
+
+ let(:stats) do
+ { size: 123, connections: 100, busy: 10, dead: 5, idle: 85, waiting: 1 }
+ end
+
+ let(:ci_load_balancer) do
+ double(:ci_load_balancer, host_list: ci_host_list, configuration: ci_configuration, primary_only?: false)
+ end
+
+ let(:ci_configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
+ let(:ci_replica_host) { double(:host, pool: ci_replica_pool, host: 'ci-replica-host', port: 3456) }
+ let(:ci_replica_pool) do
+ double(:ci_replica_pool, db_config: double(:ci_replica_db_config, name: 'ci_replica'), stat: stats)
+ end
+
let(:main_labels) do
{
class: 'ActiveRecord::Base',
@@ -62,35 +88,9 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
end
context 'when replica hosts are configured' do
- let(:main_load_balancer) { ApplicationRecord.load_balancer }
- let(:main_replica_host) { main_load_balancer.host }
-
- let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
- let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
- let(:ci_replica_host) { double(:host, connection: ci_connection) }
- let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
-
before do
allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer)
.and_return([main_load_balancer, ci_load_balancer].to_enum)
-
- allow(main_load_balancer).to receive(:primary_only?).and_return(false)
- allow(ci_load_balancer).to receive(:primary_only?).and_return(false)
-
- allow(main_replica_host).to receive(:host).and_return('main-replica-host')
- allow(ci_replica_host).to receive(:host).and_return('ci-replica-host')
-
- allow(main_replica_host).to receive(:port).and_return(2345)
- allow(ci_replica_host).to receive(:port).and_return(3456)
-
- allow(Gitlab::Database).to receive(:db_config_name)
- .with(main_replica_host.connection)
- .and_return('main_replica')
-
- allow(Gitlab::Database).to receive(:db_config_name)
- .with(ci_replica_host.connection)
- .and_return('ci_replica')
end
it 'samples connection pool statistics for primaries and replicas' do
@@ -117,35 +117,9 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
end
context 'when the base model has replica connections' do
- let(:main_load_balancer) { ApplicationRecord.load_balancer }
- let(:main_replica_host) { main_load_balancer.host }
-
- let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
- let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
- let(:ci_replica_host) { double(:host, connection: ci_connection) }
- let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
-
before do
allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer)
.and_return([main_load_balancer, ci_load_balancer].to_enum)
-
- allow(main_load_balancer).to receive(:primary_only?).and_return(false)
- allow(ci_load_balancer).to receive(:primary_only?).and_return(false)
-
- allow(main_replica_host).to receive(:host).and_return('main-replica-host')
- allow(ci_replica_host).to receive(:host).and_return('ci-replica-host')
-
- allow(main_replica_host).to receive(:port).and_return(2345)
- allow(ci_replica_host).to receive(:port).and_return(3456)
-
- allow(Gitlab::Database).to receive(:db_config_name)
- .with(main_replica_host.connection)
- .and_return('main_replica')
-
- allow(Gitlab::Database).to receive(:db_config_name)
- .with(ci_replica_host.connection)
- .and_return('ci_replica')
end
it 'still records the replica metrics' do
diff --git a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
index 5394cea64af..b85256f32c5 100644
--- a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
+++ b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
require 'action_dispatch'
require 'rack'
-require 'request_store'
+require 'gitlab/safe_request_store'
RSpec.describe Gitlab::Middleware::WebhookRecursionDetection do
let(:app) { double(:app) }
diff --git a/spec/lib/gitlab/observability_spec.rb b/spec/lib/gitlab/observability_spec.rb
index 61f69a0171a..04c35f0ee3a 100644
--- a/spec/lib/gitlab/observability_spec.rb
+++ b/spec/lib/gitlab/observability_spec.rb
@@ -40,16 +40,10 @@ RSpec.describe Gitlab::Observability, feature_category: :error_tracking do
it { is_expected.to eq("#{described_class.observability_url}/v1/auth/start") }
end
- describe '.tracing_url' do
- subject { described_class.tracing_url(project) }
-
- it { is_expected.to eq("#{described_class.observability_url}/query/#{group.id}/#{project.id}/v1/traces") }
- end
-
describe '.provisioning_url' do
subject { described_class.provisioning_url(project) }
- it { is_expected.to eq(described_class.observability_url.to_s) }
+ it { is_expected.to eq("#{described_class.observability_url}/v3/tenant/#{project.id}") }
end
describe '.build_full_url' do
@@ -169,27 +163,6 @@ RSpec.describe Gitlab::Observability, feature_category: :error_tracking do
end
end
- describe '.tracing_enabled?' do
- let_it_be(:project) { create(:project, :repository) }
-
- it 'returns true if feature is enabled globally' do
- expect(described_class.tracing_enabled?(project)).to eq(true)
- end
-
- it 'returns true if feature is enabled for the project' do
- stub_feature_flags(observability_tracing: false)
- stub_feature_flags(observability_tracing: project)
-
- expect(described_class.tracing_enabled?(project)).to eq(true)
- end
-
- it 'returns false if feature is disabled globally' do
- stub_feature_flags(observability_tracing: false)
-
- expect(described_class.tracing_enabled?(project)).to eq(false)
- end
- end
-
describe '.allowed_for_action?' do
let(:group) { build_stubbed(:group) }
let(:user) { build_stubbed(:user) }
diff --git a/spec/lib/gitlab/other_markup_spec.rb b/spec/lib/gitlab/other_markup_spec.rb
index 74e2c5e26c1..34f1e0cfbc5 100644
--- a/spec/lib/gitlab/other_markup_spec.rb
+++ b/spec/lib/gitlab/other_markup_spec.rb
@@ -2,9 +2,48 @@
require 'spec_helper'
-RSpec.describe Gitlab::OtherMarkup do
+RSpec.describe Gitlab::OtherMarkup, feature_category: :wiki do
let(:context) { {} }
+ context 'when restructured text' do
+ it 'renders' do
+ input = <<~RST
+ Header
+ ======
+
+ *emphasis*; **strong emphasis**; `interpreted text`
+ RST
+
+ output = <<~HTML
+ <h1>Header</h1>
+ <p><em>emphasis</em>; <strong>strong emphasis</strong>; <cite>interpreted text</cite></p>
+ HTML
+
+ expect(render('unimportant_name.rst', input, context)).to include(output.strip)
+ end
+
+ context 'when PlantUML is enabled' do
+ it 'generates the diagram' do
+ Gitlab::CurrentSettings.current_application_settings.update!(plantuml_enabled: true, plantuml_url: 'https://plantuml.com/plantuml')
+
+ input = <<~RST
+ .. plantuml::
+ :caption: Caption with **bold** and *italic*
+
+ Bob -> Alice: hello
+ Alice -> Bob: hi
+ RST
+
+ output = <<~HTML
+ <img class="plantuml" src="https://plantuml.com/plantuml/png/U9npoazIqBLJSCp9J4wrKiX8pSd9vm9pGA9E-Kb0iKm0o4SAt000" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,Qm9iIC0+IEFsaWNlOiBoZWxsbwpBbGljZSAtPiBCb2I6IGhp">
+ <p>Caption with <strong>bold</strong> and <em>italic</em></p>
+ HTML
+
+ expect(render('unimportant_name.rst', input, context)).to include(output.strip)
+ end
+ end
+ end
+
context 'XSS Checks' do
links = {
'links' => {
diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb
deleted file mode 100644
index 72240f52580..00000000000
--- a/spec/lib/gitlab/pages/cache_control_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do
- RSpec.shared_examples 'cache_control' do |type|
- it { expect(subject.cache_key).to match(/pages_domain_for_#{type}_1_*/) }
-
- describe '#clear_cache', :use_clean_rails_redis_caching do
- before do
- Rails.cache.write("pages_domain_for_#{type}_1", ['settings-hash'])
- Rails.cache.write("pages_domain_for_#{type}_1_settings-hash", 'payload')
- end
-
- it 'clears the cache' do
- cached_keys = [
- "pages_domain_for_#{type}_1_settings-hash",
- "pages_domain_for_#{type}_1"
- ]
-
- expect(::Gitlab::AppLogger)
- .to receive(:info)
- .with(
- message: 'clear pages cache',
- pages_keys: cached_keys,
- pages_type: type,
- pages_id: 1
- )
-
- expect(Rails.cache)
- .to receive(:delete_multi)
- .with(cached_keys)
-
- subject.clear_cache
- end
- end
- end
-
- describe '.for_namespace' do
- subject(:cache_control) { described_class.for_namespace(1) }
-
- it_behaves_like 'cache_control', :namespace
- end
-
- describe '.for_domain' do
- subject(:cache_control) { described_class.for_domain(1) }
-
- it_behaves_like 'cache_control', :domain
- end
-
- describe '#cache_key' do
- it 'does not change the pages config' do
- expect { described_class.new(type: :domain, id: 1).cache_key }
- .not_to change(Gitlab.config, :pages)
- end
-
- it 'is based on pages settings' do
- access_control = Gitlab.config.pages.access_control
- cache_key = described_class.new(type: :domain, id: 1).cache_key
-
- stub_config(pages: { access_control: !access_control })
-
- expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
- end
-
- it 'is based on the force_pages_access_control settings' do
- force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control
- cache_key = described_class.new(type: :domain, id: 1).cache_key
-
- ::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control
-
- expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
- end
-
- it 'caches the application settings hash' do
- expect(Rails.cache)
- .to receive(:write)
- .with('pages_domain_for_domain_1', kind_of(Set))
-
- described_class.new(type: :domain, id: 1).cache_key
- end
- end
-
- it 'fails with invalid type' do
- expect { described_class.new(type: :unknown, id: nil) }
- .to raise_error(ArgumentError, 'type must be :namespace or :domain')
- end
-end
diff --git a/spec/lib/gitlab/pages/virtual_host_finder_spec.rb b/spec/lib/gitlab/pages/virtual_host_finder_spec.rb
index 49eee772f8d..8c34968bbfc 100644
--- a/spec/lib/gitlab/pages/virtual_host_finder_spec.rb
+++ b/spec/lib/gitlab/pages/virtual_host_finder_spec.rb
@@ -40,23 +40,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to match(/pages_domain_for_domain_#{pages_domain.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
-
- context 'when :cache_pages_domain_api is disabled' do
- before do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain when there are pages deployed for the project' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to be_nil
- expect(virtual_domain.lookup_paths.length).to eq(1)
- expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
- end
- end
end
end
@@ -76,23 +62,8 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(0)
end
-
- context 'when :cache_pages_domain_api is disabled' do
- before do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain with no lookup_paths' do
- virtual_domain = described_class.new("#{project.namespace.path}.example.com".downcase).execute
-
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to be_nil
- expect(virtual_domain.lookup_paths.length).to eq(0)
- end
- end
end
context 'when there are pages deployed for the project' do
@@ -111,7 +82,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
@@ -120,25 +90,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.Example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
-
- context 'when :cache_pages_domain_api is disabled' do
- before_all do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain when there are pages deployed for the project' do
- virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
-
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.cache_key).to be_nil
- expect(virtual_domain.lookup_paths.length).to eq(1)
- expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
- end
- end
end
end
@@ -187,18 +141,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
-
- context 'when :cache_pages_domain_api is disabled' do
- before do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain when there are pages deployed for the project' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths.length).to eq(1)
- expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
- end
- end
end
end
diff --git a/spec/lib/gitlab/pages_spec.rb b/spec/lib/gitlab/pages_spec.rb
index 9f85efd56e6..c20956788ac 100644
--- a/spec/lib/gitlab/pages_spec.rb
+++ b/spec/lib/gitlab/pages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Pages do
+RSpec.describe Gitlab::Pages, feature_category: :pages do
using RSpec::Parameterized::TableSyntax
let(:pages_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
@@ -48,4 +48,89 @@ RSpec.describe Gitlab::Pages do
it { is_expected.to eq(result) }
end
end
+
+ describe '.multiple_versions_enabled_for?' do
+ context 'when project is nil' do
+ it 'returns false' do
+ expect(described_class.multiple_versions_enabled_for?(nil)).to eq(false)
+ end
+ end
+
+ context 'when a project is given' do
+ let_it_be(:project) { create(:project) }
+
+ where(:setting, :feature_flag, :license, :result) do
+ false | false | false | false
+ false | false | true | false
+ false | true | false | false
+ false | true | true | false
+ true | false | false | false
+ true | false | true | false
+ true | true | false | false
+ true | true | true | true
+ end
+
+ with_them do
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.multiple_versions_enabled_for?(project) }
+
+ before do
+ stub_licensed_features(pages_multiple_versions: license)
+ stub_feature_flags(pages_multiple_versions_setting: feature_flag)
+ project.project_setting.update!(pages_multiple_versions_enabled: setting)
+ end
+
+ # this feature is only available in EE
+ it { is_expected.to eq(result && Gitlab.ee?) }
+ end
+ end
+ end
+
+ describe '#add_unique_domain_to' do
+ let(:project) { build(:project) }
+
+ context 'when pages is not enabled' do
+ before do
+ stub_pages_setting(enabled: false)
+ end
+
+ it 'does not set pages unique domain' do
+ expect(Gitlab::Pages::RandomDomain).not_to receive(:generate)
+
+ described_class.add_unique_domain_to(project)
+
+ expect(project.project_setting.pages_unique_domain_enabled).to eq(false)
+ expect(project.project_setting.pages_unique_domain).to eq(nil)
+ end
+ end
+
+ context 'when pages is enabled' do
+ before do
+ stub_pages_setting(enabled: true)
+ end
+
+ it 'enables unique domain by default' do
+ allow(Gitlab::Pages::RandomDomain)
+ .to receive(:generate)
+ .and_return('unique-domain')
+
+ described_class.add_unique_domain_to(project)
+
+ expect(project.project_setting.pages_unique_domain_enabled).to eq(true)
+ expect(project.project_setting.pages_unique_domain).to eq('unique-domain')
+ end
+
+ context 'when project already have a unique domain' do
+ it 'does not changes the original unique domain' do
+ expect(Gitlab::Pages::RandomDomain).not_to receive(:generate)
+ project.project_setting.update!(pages_unique_domain: 'unique-domain')
+
+ described_class.add_unique_domain_to(project.reload)
+
+ expect(project.project_setting.pages_unique_domain).to eq('unique-domain')
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index 7cee65c13f7..4128f745ce7 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -6,20 +6,52 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
subject { described_class }
describe '.available_for_type?' do
- it 'returns true for Group' do
- expect(subject.available_for_type?(Group.all)).to be_truthy
- end
+ context 'with api_keyset_pagination_multi_order FF disabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: false)
+ end
- it 'returns true for Ci::Build' do
- expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
- end
+ it 'returns true for Group' do
+ expect(subject.available_for_type?(Group.all)).to be_truthy
+ end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ it 'returns true for Ci::Build' do
+ expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
+ end
+
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ end
+
+ it 'return false for User' do
+ expect(subject.available_for_type?(User.all)).to be_falsey
+ end
end
- it 'return false for other types of relations' do
- expect(subject.available_for_type?(User.all)).to be_falsey
+ context 'with api_keyset_pagination_multi_order FF enabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: true)
+ end
+
+ it 'returns true for Group' do
+ expect(subject.available_for_type?(Group.all)).to be_truthy
+ end
+
+ it 'returns true for Ci::Build' do
+ expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
+ end
+
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ end
+
+ it 'returns true for User' do
+ expect(subject.available_for_type?(User.all)).to be_truthy
+ end
+
+ it 'return false for other types of relations' do
+ expect(subject.available_for_type?(Issue.all)).to be_falsey
+ end
end
end
@@ -58,7 +90,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
end
it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
end
@@ -68,16 +100,48 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
let(:order_by) { :id }
let(:sort) { :desc }
- it 'returns true for Ci::Build' do
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
- end
+ context 'with api_keyset_pagination_multi_order FF disabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: false)
+ end
+
+ it 'returns true for Ci::Build' do
+ expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
+ end
+
+ it 'returns true for AuditEvent' do
+ expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
+ end
- it 'returns true for AuditEvent' do
- expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ end
+
+ it 'returns false for User' do
+ expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ end
end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ context 'with api_keyset_pagination_multi_order FF enabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: true)
+ end
+
+ it 'returns true for Ci::Build' do
+ expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
+ end
+
+ it 'returns true for AuditEvent' do
+ expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
+ end
+
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ end
+
+ it 'returns true for User' do
+ expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
+ end
end
end
@@ -90,7 +154,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
end
it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
end
end
end
diff --git a/spec/lib/gitlab/patch/redis_cache_store_spec.rb b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
index 5a674d443bb..21c256fdbbe 100644
--- a/spec/lib/gitlab/patch/redis_cache_store_spec.rb
+++ b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
@@ -34,36 +34,58 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f
end
context 'when reading large amount of keys' do
- it 'batches get into pipelines of 100' do
- cache.redis.with do |redis|
- normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis)
- multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) &&
- ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store)
+ let(:input_size) { 2000 }
+ let(:chunk_size) { 1000 }
+
+ shared_examples 'read large amount of keys' do
+ it 'breaks the input into 2 chunks for redis cluster' do
+ cache.redis.with do |redis|
+ normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis)
+ multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) &&
+ ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store)
+
+ if normal_cluster || multistore_cluster
+ expect_next_instances_of(Gitlab::Redis::CrossSlot::Pipeline, 2) do |pipeline|
+ obj = instance_double(::Redis)
+ expect(pipeline).to receive(:pipelined).and_yield(obj)
+ expect(obj).to receive(:get).exactly(chunk_size).times
+ end
+ else
+ expect(redis).to receive(:mget).and_call_original
+ end
+ end
- if normal_cluster || multistore_cluster
- expect(redis).to receive(:pipelined).at_least(2).and_call_original
- else
- expect(redis).to receive(:mget).and_call_original
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.read_multi(*Array.new(input_size) { |i| i })
end
end
+ end
- Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- cache.read_multi(*Array.new(101) { |i| i })
+ context 'when GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT is smaller than the default' do
+ before do
+ stub_env('GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT', 10)
end
+
+ it_behaves_like 'read large amount of keys'
end
- end
- end
- context 'when cache is Rails.cache' do
- let(:cache) { Rails.cache }
+ context 'when GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT is larger than the default' do
+ let(:input_size) { 4000 }
+ let(:chunk_size) { 2000 }
- context 'when reading using secondary store as default' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_cache: false)
+ before do
+ stub_env('GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT', chunk_size)
+ end
+
+ it_behaves_like 'read large amount of keys'
end
- it_behaves_like 'reading using cache stores'
+ it_behaves_like 'read large amount of keys'
end
+ end
+
+ context 'when cache is Rails.cache' do
+ let(:cache) { Rails.cache }
it_behaves_like 'reading using cache stores'
end
@@ -97,7 +119,7 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f
context 'when deleting large amount of keys' do
before do
- 200.times { |i| cache.write(i, i) }
+ 2000.times { |i| cache.write(i, i) }
end
it 'calls pipeline multiple times' do
@@ -113,9 +135,9 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f
expect(
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- cache.delete_multi(Array(0..199))
+ cache.delete_multi(Array(0..1999))
end
- ).to eq(200)
+ ).to eq(2000)
end
end
end
diff --git a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
new file mode 100644
index 00000000000..f57257cd1c0
--- /dev/null
+++ b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::SidekiqScheduledEnq, :clean_gitlab_redis_queues, feature_category: :scalability do
+ describe '#enqueue_jobs' do
+ let_it_be(:payload) { {} }
+
+ before do
+ allow(Sidekiq).to receive(:load_json).and_return(payload)
+
+ # stub data in both namespaces
+ Sidekiq.redis { |c| c.zadd('schedule', 100, 'dummy') }
+ Gitlab::Redis::Queues.with { |c| c.zadd('schedule', 100, 'dummy') }
+ end
+
+ subject { Sidekiq::Scheduled::Enq.new.enqueue_jobs }
+
+ it 'polls both namespaces by default' do
+ expect(Sidekiq::Client).to receive(:push).with(payload).twice
+
+ subject
+
+ Sidekiq.redis do |conn|
+ expect(conn.zcard('schedule')).to eq(0)
+ end
+
+ Gitlab::Redis::Queues.with do |conn|
+ expect(conn.zcard('schedule')).to eq(0)
+ end
+ end
+
+ context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is disabled' do
+ before do
+ stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'false')
+ end
+
+ it 'polls via Sidekiq.redis only' do
+ expect(Sidekiq::Client).to receive(:push).with(payload).once
+
+ subject
+
+ Sidekiq.redis do |conn|
+ expect(conn.zcard('schedule')).to eq(0)
+ end
+
+ Gitlab::Redis::Queues.with do |conn|
+ expect(conn.zcard('schedule')).to eq(1)
+ end
+ end
+ end
+
+ context 'when both envvar are enabled' do
+ around do |example|
+ # runs the zadd to ensure it goes into namespaced set
+ Sidekiq.redis { |c| c.zadd('schedule', 100, 'dummy') }
+
+ holder = Sidekiq.redis_pool
+
+ # forcibly replace Sidekiq.redis since this is set in config/initializer/sidekiq.rb
+ Sidekiq.redis = Gitlab::Redis::Queues.pool
+
+ example.run
+
+ ensure
+ Sidekiq.redis = holder
+ end
+
+ before do
+ stub_env('SIDEKIQ_ENQUEUE_NON_NAMESPACED', 'true')
+ stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'true')
+ end
+
+ it 'polls both sets' do
+ expect(Sidekiq::Client).to receive(:push).with(payload).twice
+
+ subject
+
+ Sidekiq.redis do |conn|
+ expect(conn.zcard('schedule')).to eq(0)
+ end
+
+ Gitlab::Redis::Queues.with do |conn|
+ expect(conn.zcard('schedule')).to eq(0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
deleted file mode 100644
index 559557f9313..00000000000
--- a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
+++ /dev/null
@@ -1,248 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
- include Prometheus::MetricBuilders
-
- let(:parser_error_class) { Gitlab::Prometheus::ParsingError }
-
- describe '#load_groups_from_yaml' do
- subject { described_class.load_groups_from_yaml('dummy.yaml') }
-
- describe 'parsing sample yaml' do
- let(:sample_yaml) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: "title"
- required_metrics: [ metric_a, metric_b ]
- weight: 1
- queries: [{ query_range: 'query_range_a', label: label, unit: unit }]
- - title: "title"
- required_metrics: [metric_a]
- weight: 1
- queries: [{ query_range: 'query_range_empty' }]
- - group: group_b
- priority: 1
- metrics:
- - title: title
- required_metrics: ['metric_a']
- weight: 1
- queries: [{query_range: query_range_a}]
- EOF
- end
-
- before do
- allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(sample_yaml) }
- end
-
- it 'parses to two metric groups with 2 and 1 metric respectively' do
- expect(subject.count).to eq(2)
- expect(subject[0].metrics.count).to eq(2)
- expect(subject[1].metrics.count).to eq(1)
- end
-
- it 'provide group data' do
- expect(subject[0]).to have_attributes(name: 'group_a', priority: 1)
- expect(subject[1]).to have_attributes(name: 'group_b', priority: 1)
- end
-
- it 'provides metrics data' do
- metrics = subject.flat_map(&:metrics)
-
- expect(metrics.count).to eq(3)
- expect(metrics[0]).to have_attributes(title: 'title', required_metrics: %w(metric_a metric_b), weight: 1)
- expect(metrics[1]).to have_attributes(title: 'title', required_metrics: %w(metric_a), weight: 1)
- expect(metrics[2]).to have_attributes(title: 'title', required_metrics: %w{metric_a}, weight: 1)
- end
-
- it 'provides query data' do
- queries = subject.flat_map(&:metrics).flat_map(&:queries)
-
- expect(queries.count).to eq(3)
- expect(queries[0]).to eq(query_range: 'query_range_a', label: 'label', unit: 'unit')
- expect(queries[1]).to eq(query_range: 'query_range_empty')
- expect(queries[2]).to eq(query_range: 'query_range_a')
- end
- end
-
- shared_examples 'required field' do |field_name|
- context "when #{field_name} is nil" do
- before do
- allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_missing) }
- end
-
- it 'throws parsing error' do
- expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i)
- end
- end
-
- context "when #{field_name} are not specified" do
- before do
- allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_nil) }
- end
-
- it 'throws parsing error' do
- expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i)
- end
- end
- end
-
- describe 'group required fields' do
- it_behaves_like 'required field', 'metrics' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- EOF
- end
- end
-
- it_behaves_like 'required field', 'name' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group:
- priority: 1
- metrics: []
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - priority: 1
- metrics: []
- EOF
- end
- end
-
- it_behaves_like 'required field', 'priority' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority:
- metrics: []
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- metrics: []
- EOF
- end
- end
- end
-
- describe 'metrics fields parsing' do
- it_behaves_like 'required field', 'title' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title:
- required_metrics: []
- weight: 1
- queries: []
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - required_metrics: []
- weight: 1
- queries: []
- EOF
- end
- end
-
- it_behaves_like 'required field', 'required metrics' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- required_metrics:
- weight: 1
- queries: []
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- weight: 1
- queries: []
- EOF
- end
- end
-
- it_behaves_like 'required field', 'weight' do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- required_metrics: []
- weight:
- queries: []
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- required_metrics: []
- queries: []
- EOF
- end
- end
-
- it_behaves_like 'required field', :queries do
- let(:field_nil) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- required_metrics: []
- weight: 1
- queries:
- EOF
- end
-
- let(:field_missing) do
- <<-EOF.strip_heredoc
- - group: group_a
- priority: 1
- metrics:
- - title: title
- required_metrics: []
- weight: 1
- EOF
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
deleted file mode 100644
index b2350eff9f9..00000000000
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
- around do |example|
- travel_to(Time.local(2008, 9, 1, 12, 0, 0)) { example.run }
- end
-
- include_examples 'additional metrics query' do
- let(:project) { create(:project, :repository) }
- let(:deployment) { create(:deployment, environment: environment, project: project) }
- let(:query_params) { [deployment.id] }
-
- it 'queries using specific time' do
- expect(client).to receive(:query_range).with(anything,
- start_time: (deployment.created_at - 30.minutes).to_f,
- end_time: (deployment.created_at + 30.minutes).to_f)
-
- expect(query_result).not_to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb
deleted file mode 100644
index d0dee2ad366..00000000000
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsEnvironmentQuery do
- around do |example|
- freeze_time { example.run }
- end
-
- include_examples 'additional metrics query' do
- let(:query_params) { [environment.id] }
-
- it 'queries using specific time' do
- expect(client).to receive(:query_range)
- .with(anything, start_time: 8.hours.ago.to_f, end_time: Time.now.to_f)
- expect(query_result).not_to be_nil
- end
-
- context 'when start and end time parameters are provided' do
- let(:query_params) { [environment.id, start_time, end_time] }
-
- context 'as unix timestamps' do
- let(:start_time) { 4.hours.ago.to_f }
- let(:end_time) { 2.hours.ago.to_f }
-
- it 'queries using the provided times' do
- expect(client).to receive(:query_range)
- .with(anything, start_time: start_time, end_time: end_time)
- expect(query_result).not_to be_nil
- end
- end
-
- context 'as Date/Time objects' do
- let(:start_time) { 4.hours.ago }
- let(:end_time) { 2.hours.ago }
-
- it 'queries using the provided times converted to unix' do
- expect(client).to receive(:query_range)
- .with(anything, start_time: start_time.to_f, end_time: end_time.to_f)
- expect(query_result).not_to be_nil
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb
index e8433d99d15..9d2144f75db 100644
--- a/spec/lib/gitlab/rack_attack/request_spec.rb
+++ b/spec/lib/gitlab/rack_attack/request_spec.rb
@@ -249,6 +249,39 @@ RSpec.describe Gitlab::RackAttack::Request do
end
end
+ describe '#get_request_protected_path?' do
+ subject { request.get_request_protected_path? }
+
+ before do
+ stub_application_setting(
+ protected_paths_for_get_request: %w[/protected /secure])
+ end
+
+ where(:path, :expected) do
+ '/' | false
+ '/groups' | false
+ '/foo/protected' | false
+ '/foo/secure' | false
+
+ '/protected' | true
+ '/secure' | true
+ '/secure/' | true
+ '/secure/foo' | true
+ end
+
+ with_them do
+ it { is_expected.to eq(expected) }
+
+ context 'when the application is mounted at a relative URL' do
+ before do
+ stub_config_setting(relative_url_root: '/gitlab/root')
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+ end
+ end
+
describe '#frontend_request?', :allow_forgery_protection do
subject { request.send(:frontend_request?) }
diff --git a/spec/lib/gitlab/redis/chat_spec.rb b/spec/lib/gitlab/redis/chat_spec.rb
index 7a008580936..f9080b4409f 100644
--- a/spec/lib/gitlab/redis/chat_spec.rb
+++ b/spec/lib/gitlab/redis/chat_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-RSpec.describe Gitlab::Redis::Chat, feature_category: :no_category do # rubocop: disable RSpec/InvalidFeatureCategory
+RSpec.describe Gitlab::Redis::Chat, feature_category: :ai_abstraction_layer do
include_examples "redis_new_instance_shared_examples", 'chat', Gitlab::Redis::Cache
end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index e15375c88c7..ce21c2269cc 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -1130,4 +1130,104 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
end
+
+ # NOTE: for pub/sub, unit tests are favoured over integration tests to avoid long polling
+ # with threads which could lead to flaky specs. The multiplexing behaviour are verified in
+ # 'with WRITE redis commands' and 'with READ redis commands' contexts.
+ context 'with pub/sub commands' do
+ let(:channel_name) { 'chanA' }
+ let(:message) { "msg" }
+
+ shared_examples 'publishes to stores' do
+ it 'publishes to one or more stores' do
+ expect(stores).to all(receive(:publish))
+
+ multi_store.publish(channel_name, message)
+ end
+ end
+
+ shared_examples 'subscribes and unsubscribes' do
+ it 'subscribes to the default store' do
+ expect(default_store).to receive(:subscribe)
+ expect(non_default_store).not_to receive(:subscribe)
+
+ multi_store.subscribe(channel_name)
+ end
+
+ it 'unsubscribes to the default store' do
+ expect(default_store).to receive(:unsubscribe)
+ expect(non_default_store).not_to receive(:unsubscribe)
+
+ multi_store.unsubscribe
+ end
+ end
+
+ context 'when using both stores' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it_behaves_like 'publishes to stores' do
+ let(:stores) { [primary_store, secondary_store] }
+ end
+
+ context 'with primary store set as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it_behaves_like 'subscribes and unsubscribes' do
+ let(:default_store) { primary_store }
+ let(:non_default_store) { secondary_store }
+ end
+ end
+
+ context 'with secondary store set as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'subscribes and unsubscribes' do
+ let(:default_store) { secondary_store }
+ let(:non_default_store) { primary_store }
+ end
+ end
+ end
+
+ context 'when only using the primary store' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_test_store: false,
+ use_primary_store_as_default_for_test_store: true
+ )
+ end
+
+ it_behaves_like 'subscribes and unsubscribes' do
+ let(:default_store) { primary_store }
+ let(:non_default_store) { secondary_store }
+ end
+
+ it_behaves_like 'publishes to stores' do
+ let(:stores) { [primary_store] }
+ end
+ end
+
+ context 'when only using the secondary store' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_test_store: false,
+ use_primary_store_as_default_for_test_store: false
+ )
+ end
+
+ it_behaves_like 'subscribes and unsubscribes' do
+ let(:default_store) { secondary_store }
+ let(:non_default_store) { primary_store }
+ end
+
+ it_behaves_like 'publishes to stores' do
+ let(:stores) { [secondary_store] }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/redis/pubsub_spec.rb b/spec/lib/gitlab/redis/pubsub_spec.rb
new file mode 100644
index 00000000000..e196d02116e
--- /dev/null
+++ b/spec/lib/gitlab/redis/pubsub_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::Pubsub, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'pubsub', Gitlab::Redis::SharedState
+ include_examples "redis_shared_examples"
+end
diff --git a/spec/lib/gitlab/redis/queues_metadata_spec.rb b/spec/lib/gitlab/redis/queues_metadata_spec.rb
new file mode 100644
index 00000000000..693e8074b45
--- /dev/null
+++ b/spec/lib/gitlab/redis/queues_metadata_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::QueuesMetadata, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'queues_metadata', Gitlab::Redis::Queues
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ before do
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('QueuesMetadata')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_queues_metadata,
+ :use_primary_store_as_default_for_queues_metadata
+ end
+end
diff --git a/spec/lib/gitlab/redis/etag_cache_spec.rb b/spec/lib/gitlab/redis/workhorse_spec.rb
index 182a41bac80..46931a6afcb 100644
--- a/spec/lib/gitlab/redis/etag_cache_spec.rb
+++ b/spec/lib/gitlab/redis/workhorse_spec.rb
@@ -2,28 +2,22 @@
require 'spec_helper'
-RSpec.describe Gitlab::Redis::EtagCache, feature_category: :shared do
- # Note: this is a pseudo-store in front of `Cache`, meant only as a tool
- # to move away from `SharedState` for etag cache data. Thus, we use the
- # same store configuration as the former.
- let(:instance_specific_config_file) { "config/redis.cache.yml" }
-
+RSpec.describe Gitlab::Redis::Workhorse, feature_category: :scalability do
+ include_examples "redis_new_instance_shared_examples", 'workhorse', Gitlab::Redis::SharedState
include_examples "redis_shared_examples"
describe '#pool' do
let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
- let(:rails_root) { mktmpdir }
subject { described_class.pool }
before do
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root)
- allow(Gitlab::Redis::Cache).to receive(:rails_root).and_return(rails_root)
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
- allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket)
+ # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
+ allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir)
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
end
around do |example|
@@ -37,20 +31,14 @@ RSpec.describe Gitlab::Redis::EtagCache, feature_category: :shared do
subject.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
- expect(redis_instance.primary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
- expect(redis_instance.secondary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
- expect(redis_instance.instance_name).to eq('EtagCache')
+ expect(redis_instance.instance_name).to eq('Workhorse')
end
end
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_etag_cache,
- :use_primary_store_as_default_for_etag_cache
- end
-
- describe '#store_name' do
- it 'returns the name of the Cache store' do
- expect(described_class.store_name).to eq('Cache')
- end
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_workhorse,
+ :use_primary_store_as_default_for_workhorse
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index df123ef638f..02ae3f63918 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -86,31 +86,6 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
it { is_expected.to match('<any-Charact3r$|any-Charact3r$>') }
end
- describe '.bulk_import_destination_namespace_path_regex_message' do
- subject { described_class.bulk_import_destination_namespace_path_regex_message }
-
- it {
- is_expected
- .to eq("must have a relative path structure with no HTTP " \
- "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \
- "end with a special character, and must not contain consecutive special characters."
- )
- }
- end
-
- describe '.bulk_import_source_full_path_regex_message' do
- subject { described_class.bulk_import_source_full_path_regex_message }
-
- it {
- is_expected
- .to eq(
- "must have a relative path structure with no HTTP " \
- "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \
- "end with a special character, and must not contain consecutive special characters."
- )
- }
- end
-
describe '.group_path_regex' do
subject { described_class.group_path_regex }
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index 6cff0eff7e8..a4a7c139beb 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::RepoPath do
include Gitlab::Routing
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:personal_snippet) { create(:personal_snippet) }
let_it_be(:project_snippet) { create(:project_snippet, project: project) }
let_it_be(:redirect_route) { 'foo/bar/baz' }
@@ -28,6 +28,10 @@ RSpec.describe ::Gitlab::RepoPath do
it 'parses a project snippet repository path' do
expect(described_class.parse("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, nil])
end
+
+ it 'parses a full project design repository path' do
+ expect(described_class.parse(project.design_repository.full_path)).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, nil])
+ end
end
context 'a relative path' do
@@ -43,6 +47,10 @@ RSpec.describe ::Gitlab::RepoPath do
expect(described_class.parse('/' + project.full_path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
end
+ it 'parses a relative design repository path' do
+ expect(described_class.parse(project.full_path + '.design.git')).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, nil])
+ end
+
context 'of a redirected project' do
it 'parses a relative repository path' do
expect(described_class.parse(redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route])
@@ -52,6 +60,10 @@ RSpec.describe ::Gitlab::RepoPath do
expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, "#{redirect_route}.wiki"])
end
+ it 'parses a relative design repository path' do
+ expect(described_class.parse(redirect.path + '.design.git')).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, "#{redirect_route}.design"])
+ end
+
it 'parses a relative path starting with /' do
expect(described_class.parse('/' + redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route])
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 725b7901e68..d1f19a5e1ba 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -16,8 +16,9 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
let(:query) { 'foo' }
let(:filters) { {} }
let(:sort) { nil }
+ let(:limit_projects) { Project.order(:id) }
- subject(:results) { described_class.new(user, query, Project.order(:id), sort: sort, filters: filters) }
+ subject(:results) { described_class.new(user, query, limit_projects, sort: sort, filters: filters) }
context 'as a user with access' do
before do
@@ -236,9 +237,14 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
+ let_it_be(:unarchived_project) { project }
+ let_it_be(:archived_project) { create(:project, :public, :archived) }
+ let_it_be(:unarchived_result) { create(:issue, project: unarchived_project, title: 'foo unarchived') }
+ let_it_be(:archived_result) { create(:issue, project: archived_project, title: 'foo archived') }
include_examples 'search results filtered by state'
include_examples 'search results filtered by confidential'
+ include_examples 'search results filtered by archived', 'search_issues_hide_archived_projects'
end
context 'ordering' do
@@ -274,7 +280,7 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') }
let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') }
- it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived'
+ it_behaves_like 'search results filtered by archived'
end
end
@@ -433,26 +439,32 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
end
context 'milestones' do
- it 'returns correct set of milestones' do
- private_project_1 = create(:project, :private)
- private_project_2 = create(:project, :private)
- internal_project = create(:project, :internal)
- public_project_1 = create(:project, :public)
- public_project_2 = create(:project, :public, :issues_disabled, :merge_requests_disabled)
+ let_it_be(:archived_project) { create(:project, :public, :archived) }
+ let_it_be(:private_project_1) { create(:project, :private) }
+ let_it_be(:private_project_2) { create(:project, :private) }
+ let_it_be(:internal_project) { create(:project, :internal) }
+ let_it_be(:public_project_1) { create(:project, :public) }
+ let_it_be(:public_project_2) { create(:project, :public, :issues_disabled, :merge_requests_disabled) }
+ let_it_be(:hidden_milestone_1) { create(:milestone, project: private_project_2, title: 'Private project without access milestone') }
+ let_it_be(:hidden_milestone_2) { create(:milestone, project: public_project_2, title: 'Public project with milestones disabled milestone') }
+ let_it_be(:hidden_milestone_3) { create(:milestone, project: archived_project, title: 'Milestone from an archived project') }
+ let_it_be(:milestone_1) { create(:milestone, project: private_project_1, title: 'Private project with access milestone', state: 'closed') }
+ let_it_be(:milestone_2) { create(:milestone, project: internal_project, title: 'Internal project milestone') }
+ let_it_be(:milestone_3) { create(:milestone, project: public_project_1, title: 'Public project with milestones enabled milestone') }
+ let(:unarchived_result) { milestone_1 }
+ let(:archived_result) { hidden_milestone_3 }
+ let(:limit_projects) { ProjectsFinder.new(current_user: user).execute }
+ let(:query) { 'milestone' }
+ let(:scope) { 'milestones' }
+
+ before do
private_project_1.add_developer(user)
- # milestones that should not be visible
- create(:milestone, project: private_project_2, title: 'Private project without access milestone')
- create(:milestone, project: public_project_2, title: 'Public project with milestones disabled milestone')
- # milestones that should be visible
- milestone_1 = create(:milestone, project: private_project_1, title: 'Private project with access milestone', state: 'closed')
- milestone_2 = create(:milestone, project: internal_project, title: 'Internal project milestone')
- milestone_3 = create(:milestone, project: public_project_1, title: 'Public project with milestones enabled milestone')
- # Global search scope takes user authorized projects, internal projects and public projects.
- limit_projects = ProjectsFinder.new(current_user: user).execute
-
- milestones = described_class.new(user, 'milestone', limit_projects).objects('milestones')
-
- expect(milestones).to match_array([milestone_1, milestone_2, milestone_3])
end
+
+ it 'returns correct set of milestones' do
+ expect(results.objects(scope)).to match_array([milestone_1, milestone_2, milestone_3])
+ end
+
+ include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects'
end
end
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
index 774a362617a..faa8a206d74 100644
--- a/spec/lib/gitlab/security/scan_configuration_spec.rb
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -57,6 +57,16 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
it { is_expected.to be_nil }
end
+ describe '#on_demand_available?' do
+ subject { scan.on_demand_available? }
+
+ let(:configured) { true }
+ let(:available) { true }
+ let(:type) { :sast }
+
+ it { is_expected.to be_falsey }
+ end
+
describe '#can_enable_by_merge_request?' do
subject { scan.can_enable_by_merge_request? }
diff --git a/spec/lib/gitlab/setup_helper/workhorse_spec.rb b/spec/lib/gitlab/setup_helper/workhorse_spec.rb
index 726b73a9dfe..e5a44abc731 100644
--- a/spec/lib/gitlab/setup_helper/workhorse_spec.rb
+++ b/spec/lib/gitlab/setup_helper/workhorse_spec.rb
@@ -24,8 +24,8 @@ RSpec.describe Gitlab::SetupHelper::Workhorse do
end
describe '.redis_url' do
- it 'matches the SharedState URL' do
- expect(Gitlab::Redis::SharedState).to receive(:url).and_return('foo')
+ it 'matches the Workhorse URL' do
+ expect(Gitlab::Redis::Workhorse).to receive(:url).and_return('foo')
expect(described_class.redis_url).to eq('foo')
end
@@ -34,14 +34,14 @@ RSpec.describe Gitlab::SetupHelper::Workhorse do
describe '.redis_db' do
subject { described_class.redis_db }
- it 'matches the SharedState DB' do
- expect(Gitlab::Redis::SharedState).to receive(:params).and_return(db: 1)
+ it 'matches the Workhorse DB' do
+ expect(Gitlab::Redis::Workhorse).to receive(:params).and_return(db: 1)
is_expected.to eq(1)
end
it 'defaults to 0 if unspecified' do
- expect(Gitlab::Redis::SharedState).to receive(:params).and_return({})
+ expect(Gitlab::Redis::Workhorse).to receive(:params).and_return({})
is_expected.to eq(0)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
index 14eb568b974..3ae1236cc7c 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues,
-:clean_gitlab_redis_shared_state do
+:clean_gitlab_redis_queues_metadata do
shared_context 'deduplication worker class' do |strategy, including_scheduled|
let(:worker_class) do
Class.new do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index c22e7a1240f..937a1751cc7 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state,
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
+ :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state, :clean_gitlab_redis_queues_metadata,
feature_category: :shared do
using RSpec::Parameterized::TableSyntax
@@ -78,11 +79,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- context 'with Redis cookies' do
- def with_redis(&block)
- Gitlab::Redis::Queues.with(&block)
- end
-
+ shared_examples 'with Redis cookies' do
let(:cookie_key) { "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:#{idempotency_key}:cookie:v2" }
let(:cookie) { get_redis_msgpack(cookie_key) }
@@ -416,6 +413,62 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::QueuesMetadata.with(&block)
+ end
+
+ shared_examples 'uses QueuesMetadata' do
+ it 'use Gitlab::Redis::QueuesMetadata.with' do
+ expect(Gitlab::Redis::QueuesMetadata).to receive(:with).and_call_original
+ expect(Gitlab::Redis::Queues).not_to receive(:with)
+
+ duplicate_job.check!
+ end
+ end
+
+ context 'when migration is ongoing with double-write' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false)
+ end
+
+ it_behaves_like 'uses QueuesMetadata'
+ it_behaves_like 'with Redis cookies'
+ end
+
+ context 'when migration is completed' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false)
+ end
+
+ it_behaves_like 'uses QueuesMetadata'
+ it_behaves_like 'with Redis cookies'
+ end
+
+ it_behaves_like 'uses QueuesMetadata'
+ it_behaves_like 'with Redis cookies'
+ end
+
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Gitlab::Redis::Queues.with(&block)
+ end
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false)
+ stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false)
+ end
+
+ it 'use Gitlab::Redis::Queues' do
+ expect(Gitlab::Redis::Queues).to receive(:with).and_call_original
+ expect(Gitlab::Redis::QueuesMetadata).not_to receive(:with)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'with Redis cookies'
+ end
+
describe '#scheduled?' do
it 'returns false for non-scheduled jobs' do
expect(duplicate_job.scheduled?).to be(false)
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 0cbf9eab3d8..a27e723e392 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -402,11 +402,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
include Sidekiq::Worker
include WorkerAttributes
- if category
- feature_category category
- else
- feature_category :not_owned
- end
+ feature_category category || :not_owned
def perform; end
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index c66e36c5621..bf379d9cb0d 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
+ :clean_gitlab_redis_queues_metadata do
def clear_queues
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Queue.new('post_receive').clear
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 93632848788..8ceba7ca4b7 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues, :clean_gitlab_redis_queues_metadata do
around do |example|
Sidekiq::Queue.new('foobar').clear
Sidekiq::Testing.disable!(&example)
diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb
index 523380eae34..d2d3fdbb450 100644
--- a/spec/lib/gitlab/sql/cte_spec.rb
+++ b/spec/lib/gitlab/sql/cte_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Gitlab::SQL::CTE do
expected = [
"#{name} AS ",
- Gitlab::Database::AsWithMaterialized.materialized_if_supported,
- (' ' unless Gitlab::Database::AsWithMaterialized.materialized_if_supported.blank?),
+ 'MATERIALIZED ',
"(#{sql1})"
].join
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index a34ddf8773c..7bd2ddf2889 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -9,36 +9,44 @@ RSpec.describe Gitlab::SQL::Pattern do
let_it_be(:issue1) { create(:issue, title: 'noise foo noise', description: 'noise bar noise') }
let_it_be(:issue2) { create(:issue, title: 'noise baz noise', description: 'noise foo noise') }
let_it_be(:issue3) { create(:issue, title: 'Oh', description: 'Ah') }
+ let_it_be(:issue4) { create(:issue, title: 'beep beep', description: 'beep beep') }
+ let_it_be(:issue5) { create(:issue, title: 'beep', description: 'beep') }
- subject(:fuzzy_search) { Issue.fuzzy_search(query, columns) }
+ subject(:fuzzy_search) { Issue.fuzzy_search(query, columns, exact_matches_first: exact_matches_first) }
- where(:query, :columns, :expected) do
- 'foo' | [Issue.arel_table[:title]] | %i[issue1]
+ where(:query, :columns, :exact_matches_first, :expected) do
+ 'foo' | [Issue.arel_table[:title]] | false | %i[issue1]
- 'foo' | %i[title] | %i[issue1]
- 'foo' | %w[title] | %i[issue1]
- 'foo' | %i[description] | %i[issue2]
- 'foo' | %i[title description] | %i[issue1 issue2]
- 'bar' | %i[title description] | %i[issue1]
- 'baz' | %i[title description] | %i[issue2]
- 'qux' | %i[title description] | []
+ 'foo' | %i[title] | false | %i[issue1]
+ 'foo' | %w[title] | false | %i[issue1]
+ 'foo' | %i[description] | false | %i[issue2]
+ 'foo' | %i[title description] | false | %i[issue1 issue2]
+ 'bar' | %i[title description] | false | %i[issue1]
+ 'baz' | %i[title description] | false | %i[issue2]
+ 'qux' | %i[title description] | false | []
- 'oh' | %i[title description] | %i[issue3]
- 'OH' | %i[title description] | %i[issue3]
- 'ah' | %i[title description] | %i[issue3]
- 'AH' | %i[title description] | %i[issue3]
- 'oh' | %i[title] | %i[issue3]
- 'ah' | %i[description] | %i[issue3]
+ 'oh' | %i[title description] | false | %i[issue3]
+ 'OH' | %i[title description] | false | %i[issue3]
+ 'ah' | %i[title description] | false | %i[issue3]
+ 'AH' | %i[title description] | false | %i[issue3]
+ 'oh' | %i[title] | false | %i[issue3]
+ 'ah' | %i[description] | false | %i[issue3]
- '' | %i[title] | %i[issue1 issue2 issue3]
- %w[a b] | %i[title] | %i[issue1 issue2 issue3]
+ '' | %i[title] | false | %i[issue1 issue2 issue3 issue4 issue5]
+ %w[a b] | %i[title] | false | %i[issue1 issue2 issue3 issue4 issue5]
+
+ 'beep' | %i[title] | true | %i[issue5 issue4]
end
with_them do
let(:expected_issues) { expected.map { |sym| send(sym) } }
it 'finds the expected issues' do
- expect(fuzzy_search).to match_array(expected_issues)
+ if exact_matches_first
+ expect(fuzzy_search).to eq(expected_issues)
+ else
+ expect(fuzzy_search).to match_array(expected_issues)
+ end
end
end
end
diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb
index aa755d64a7a..b3372f676d4 100644
--- a/spec/lib/gitlab/time_tracking_formatter_spec.rb
+++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb
@@ -28,6 +28,14 @@ RSpec.describe Gitlab::TimeTrackingFormatter, feature_category: :team_planning d
end
end
+ context 'when the duration is nil' do
+ let(:duration_string) { nil }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
context 'when the duration is zero' do
let(:duration_string) { '0h' }
diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
index 78a869b535a..5a5c7123971 100644
--- a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
@@ -63,6 +63,10 @@ RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_s
context 'when on gitlab.com environment' do
let(:endpoint) { 'db-snowplow.trx.gitlab.net' }
+ before do
+ stub_application_setting(snowplow_database_collector_hostname: endpoint)
+ end
+
it 'sends event to tracker' do
allow(Gitlab).to receive(:com?).and_return(true)
allow(tracker).to receive(:track_struct_event).and_call_original
diff --git a/spec/lib/gitlab/tracking/service_ping_context_spec.rb b/spec/lib/gitlab/tracking/service_ping_context_spec.rb
index 7530650b902..3da9a588c0e 100644
--- a/spec/lib/gitlab/tracking/service_ping_context_spec.rb
+++ b/spec/lib/gitlab/tracking/service_ping_context_spec.rb
@@ -7,29 +7,27 @@ RSpec.describe Gitlab::Tracking::ServicePingContext do
using RSpec::Parameterized::TableSyntax
context 'with valid configuration' do
- where(:data_source, :event, :key_path) do
- :redis | nil | 'counts.some_metric'
- :redis_hll | 'some_event' | nil
+ where(:data_source, :event) do
+ :redis | 'some_event'
+ :redis_hll | 'some_event'
end
with_them do
it 'does not raise errors' do
- expect { described_class.new(data_source: data_source, event: event, key_path: key_path) }.not_to raise_error
+ expect { described_class.new(data_source: data_source, event: event) }.not_to raise_error
end
end
end
context 'with invalid configuration' do
- where(:data_source, :event, :key_path) do
- :redis | nil | nil
- :redis | 'some_event' | nil
- :redis_hll | nil | nil
- :redis_hll | nil | 'some key_path'
- :random | 'some_event' | nil
+ where(:data_source, :event) do
+ :redis | nil
+ :redis_hll | nil
+ :random | 'some_event'
end
with_them do
- subject(:new_instance) { described_class.new(data_source: data_source, event: event, key_path: key_path) }
+ subject(:new_instance) { described_class.new(data_source: data_source, event: event) }
it 'does not raise errors' do
expect { new_instance }.to raise_error(ArgumentError)
@@ -48,10 +46,10 @@ RSpec.describe Gitlab::Tracking::ServicePingContext do
end
context 'for redis data source' do
- let(:context_instance) { described_class.new(data_source: :redis, key_path: 'counts.sample_metric') }
+ let(:context_instance) { described_class.new(data_source: :redis, event: 'some_event') }
it 'contains event_name' do
- expect(context_instance.to_context.to_json.dig(:data, :key_path)).to eq('counts.sample_metric')
+ expect(context_instance.to_context.to_json.dig(:data, :event_name)).to eq('some_event')
end
end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index c44cfdea1cd..4485a30ae66 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Tracking::StandardContext do
+RSpec.describe Gitlab::Tracking::StandardContext, feature_category: :service_ping do
let(:snowplow_context) { subject.to_context }
describe '#to_context' do
@@ -76,6 +76,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do
it 'holds the correct values', :aggregate_failures do
json_data = snowplow_context.to_json.fetch(:data)
expect(json_data[:user_id]).to eq(user_id)
+ expect(json_data[:is_gitlab_team_member]).to eq(nil)
expect(json_data[:project_id]).to eq(project_id)
expect(json_data[:namespace_id]).to eq(namespace_id)
expect(json_data[:plan]).to eq(plan_name)
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 73627d3e6ff..865a8384405 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -32,6 +32,9 @@ RSpec.describe Gitlab::UrlBuilder do
:ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" }
:design | ->(design) { "/#{design.project.full_path}/-/design_management/designs/#{design.id}/raw_image" }
+ [:issue, :group_level] | ->(issue) { "/groups/#{issue.namespace.full_path}/-/work_items/#{issue.iid}" }
+ [:work_item, :group_level] | ->(work_item) { "/groups/#{work_item.namespace.full_path}/-/work_items/#{work_item.iid}" }
+
:group | ->(group) { "/groups/#{group.full_path}" }
:group_milestone | ->(milestone) { "/groups/#{milestone.group.full_path}/-/milestones/#{milestone.iid}" }
diff --git a/spec/lib/gitlab/url_sanitizer_spec.rb b/spec/lib/gitlab/url_sanitizer_spec.rb
index 5f76c1de5b1..2c2ef8f13fb 100644
--- a/spec/lib/gitlab/url_sanitizer_spec.rb
+++ b/spec/lib/gitlab/url_sanitizer_spec.rb
@@ -91,6 +91,25 @@ RSpec.describe Gitlab::UrlSanitizer do
end
end
+ describe '.sanitize_masked_url' do
+ where(:original_url, :masked_url) do
+ 'http://{domain}.com' | 'http://{domain}.com'
+ 'http://{domain}/{hook}' | 'http://{domain}/{hook}'
+ 'http://user:pass@{domain}/hook' | 'http://*****:*****@{domain}/hook'
+ 'http://user:pass@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook'
+ 'http://user:@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook'
+ 'http://:pass@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook'
+ 'http://user@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook'
+ 'http://u:p@{domain}/hook?email=james@example.com' | 'http://*****:*****@{domain}/hook?email=james@example.com'
+ 'http://{domain}/hook?email=james@example.com' | 'http://{domain}/hook?email=james@example.com'
+ 'http://user:{pass}@example.com' | 'http://*****:*****@example.com'
+ end
+
+ with_them do
+ it { expect(described_class.sanitize_masked_url(original_url)).to eq(masked_url) }
+ end
+ end
+
describe '#sanitized_url' do
context 'credentials in hash' do
where(username: ['foo', '', nil], password: ['bar', '', nil])
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 859f3f7a8d7..6695736e54c 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Usage::MetricDefinition do
+RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping do
let(:attributes) do
{
description: 'GitLab instance unique identifier',
@@ -109,6 +109,42 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
end
+ describe '#to_context' do
+ subject { definition.to_context }
+
+ context 'with data_source redis_hll metric' do
+ before do
+ attributes[:data_source] = 'redis_hll'
+ attributes[:options] = { events: %w[some_event_1 some_event_2] }
+ end
+
+ it 'returns a ServicePingContext with first event as event_name' do
+ expect(subject.to_h[:data][:event_name]).to eq('some_event_1')
+ end
+ end
+
+ context 'with data_source redis metric' do
+ before do
+ attributes[:data_source] = 'redis'
+ attributes[:options] = { prefix: 'web_ide', event: 'views_count', include_usage_prefix: false }
+ end
+
+ it 'returns a ServicePingContext with redis key as event_name' do
+ expect(subject.to_h[:data][:event_name]).to eq('WEB_IDE_VIEWS_COUNT')
+ end
+ end
+
+ context 'with data_source database metric' do
+ before do
+ attributes[:data_source] = 'database'
+ end
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#validate' do
using RSpec::Parameterized::TableSyntax
@@ -117,7 +153,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:value_type | nil
:value_type | 'test'
:status | nil
- :milestone | nil
+ :milestone | 10.0
:data_category | nil
:key_path | nil
:product_group | nil
@@ -233,26 +269,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
end
- describe 'statuses' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :skip_validation?) do
- 'active' | false
- 'broken' | false
- 'removed' | true
- end
-
- with_them do
- subject(:validation) do
- described_class.new(path, attributes.merge( { status: status } )).send(:skip_validation?)
- end
-
- it 'returns true/false for skip_validation' do
- expect(validation).to eq(skip_validation?)
- end
- end
- end
-
describe '.load_all!' do
let(:metric1) { Dir.mktmpdir('metric1') }
let(:metric2) { Dir.mktmpdir('metric2') }
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb
index e66dd04b69b..4544a3a60a1 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb
@@ -18,6 +18,20 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrat
]
end
+ let(:start) { 9.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+
+ let(:expected_query) do
+ "SELECT \"batched_background_migrations\".\"table_name\", \"batched_background_migrations\".\"job_class_name\", " \
+ "COUNT(batched_jobs) AS number_of_failed_jobs " \
+ "FROM \"batched_background_migrations\" " \
+ "INNER JOIN \"batched_background_migration_jobs\" \"batched_jobs\" " \
+ "ON \"batched_jobs\".\"batched_background_migration_id\" = \"batched_background_migrations\".\"id\" " \
+ "WHERE \"batched_jobs\".\"status\" = 2 " \
+ "AND \"batched_background_migrations\".\"created_at\" BETWEEN '#{start}' AND '#{finish}' " \
+ "GROUP BY \"batched_background_migrations\".\"table_name\", \"batched_background_migrations\".\"job_class_name\""
+ end
+
let_it_be(:active_migration) do
create(:batched_background_migration, :active, table_name: 'users', job_class_name: 'test', created_at: 5.days.ago)
end
@@ -36,5 +50,5 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrat
let_it_be(:old_batched_job) { create(:batched_background_migration_job, :failed, batched_migration: old_migration) }
- it_behaves_like 'a correct instrumented metric value', { time_frame: '7d' }
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '7d' }
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb
new file mode 100644
index 00000000000..208d5c259ca
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountConnectedAgentsMetric, feature_category: :service_ping do
+ let_it_be(:agent_token_connected) { create(:cluster_agent_token, :active, last_used_at: 2.minutes.ago) }
+ let_it_be(:agent_token_disconnected) { create(:cluster_agent_token) }
+
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb
index 355d619f768..750d340551a 100644
--- a/spec/lib/gitlab/usage/metrics/query_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/query_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::Query do
it 'returns the histogram sql' do
expect(described_class.for(:histogram, AlertManagement::HttpIntegration.active,
:project_id, buckets: 1..2, bucket_size: 101))
- .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end
diff --git a/spec/lib/gitlab/usage/time_series_storable_spec.rb b/spec/lib/gitlab/usage/time_series_storable_spec.rb
new file mode 100644
index 00000000000..420a87c5483
--- /dev/null
+++ b/spec/lib/gitlab/usage/time_series_storable_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::TimeSeriesStorable, feature_category: :service_ping do
+ let(:counter_class) do
+ Class.new do
+ include Gitlab::Usage::TimeSeriesStorable
+
+ def redis_key(event, date)
+ key = apply_time_aggregation(event, date)
+ "#{key}:"
+ end
+ end
+ end
+
+ let(:counter_instance) { counter_class.new }
+
+ describe '#apply_time_aggregation' do
+ let(:key) { "key3" }
+ let(:time) { Date.new(2023, 5, 1) }
+
+ it 'returns proper key for given time' do
+ expect(counter_instance.apply_time_aggregation(key, time)).to eq("key3-2023-18")
+ end
+ end
+
+ describe '#keys_for_aggregation' do
+ let(:result) { counter_instance.keys_for_aggregation(**params) }
+ let(:params) { base_params }
+ let(:base_params) { { events: events, start_date: start_date, end_date: end_date } }
+ let(:events) { %w[event1 event2] }
+ let(:start_date) { Date.new(2023, 4, 1) }
+ let(:end_date) { Date.new(2023, 4, 15) }
+
+ it 'returns proper keys' do
+ expect(result).to match_array(["event1-2023-13:", "event1-2023-14:", "event2-2023-13:", "event2-2023-14:"])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 11c6ea2fc9d..eeef9406841 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
+RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter, feature_category: :pipeline_composition do
describe '.track_unique_project_event' do
using RSpec::Parameterized::TableSyntax
include SnowplowHelpers
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index 50e20e4fbcf..21a820deaa4 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -3,304 +3,244 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_gitlab_redis_shared_state do
- let_it_be(:user1) { build(:user, id: 1) }
+ let_it_be(:user) { build(:user, id: 1) }
let_it_be(:user2) { build(:user, id: 2) }
let_it_be(:user3) { build(:user, id: 3) }
let_it_be(:project) { create(:project) }
- let_it_be(:category) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CATEGORY }
- let_it_be(:event_action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_ACTION }
- let_it_be(:event_label) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL }
- let(:original_params) { nil }
- let(:event_property) { action }
let(:time) { Time.zone.now }
+ let(:namespace) { project&.namespace }
context 'for Issue title edit actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_TITLE_CHANGED }
- def track_action(params)
- described_class.track_issue_title_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_title_changed_action(author: user, project: project) }
end
end
context 'for Issue description edit actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
- def track_action(params)
- described_class.track_issue_description_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_description_changed_action(author: user, project: project) }
end
end
context 'for Issue assignee edit actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
- def track_action(params)
- described_class.track_issue_assignee_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_assignee_changed_action(author: user, project: project) }
end
end
context 'for Issue make confidential actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
- def track_action(params)
- described_class.track_issue_made_confidential_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_made_confidential_action(author: user, project: project) }
end
end
context 'for Issue make visible actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_MADE_VISIBLE }
- def track_action(params)
- described_class.track_issue_made_visible_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_made_visible_action(author: user, project: project) }
end
end
context 'for Issue created actions' do
- it_behaves_like 'tracked issuable internal event with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_CREATED }
- let(:original_params) { { namespace: project.project_namespace.reload } }
+ let(:project) { nil }
- def track_action(params)
- described_class.track_issue_created_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_created_action(author: user, namespace: namespace) }
end
end
context 'for Issue closed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_CLOSED }
- def track_action(params)
- described_class.track_issue_closed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_closed_action(author: user, project: project) }
end
end
context 'for Issue reopened actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_REOPENED }
- def track_action(params)
- described_class.track_issue_reopened_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_reopened_action(author: user, project: project) }
end
end
context 'for Issue label changed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_LABEL_CHANGED }
- def track_action(params)
- described_class.track_issue_label_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_label_changed_action(author: user, project: project) }
end
end
context 'for Issue label milestone actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_MILESTONE_CHANGED }
- def track_action(params)
- described_class.track_issue_milestone_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_milestone_changed_action(author: user, project: project) }
end
end
context 'for Issue cross-referenced actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_CROSS_REFERENCED }
- def track_action(params)
- described_class.track_issue_cross_referenced_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_cross_referenced_action(author: user, project: project) }
end
end
context 'for Issue moved actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_MOVED }
- def track_action(params)
- described_class.track_issue_moved_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_moved_action(author: user, project: project) }
end
end
context 'for Issue cloned actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
- let_it_be(:action) { described_class::ISSUE_CLONED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { described_class::ISSUE_CLONED }
- def track_action(params)
- described_class.track_issue_cloned_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_cloned_action(author: user, project: project) }
end
end
context 'for Issue relate actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_RELATED }
- def track_action(params)
- described_class.track_issue_related_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_related_action(author: user, project: project) }
end
end
context 'for Issue unrelate actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_UNRELATED }
- def track_action(params)
- described_class.track_issue_unrelated_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_unrelated_action(author: user, project: project) }
end
end
context 'for Issue marked as duplicate actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE }
- def track_action(params)
- described_class.track_issue_marked_as_duplicate_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_marked_as_duplicate_action(author: user, project: project) }
end
end
context 'for Issue locked actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_LOCKED }
- def track_action(params)
- described_class.track_issue_locked_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_locked_action(author: user, project: project) }
end
end
context 'for Issue unlocked actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_UNLOCKED }
- def track_action(params)
- described_class.track_issue_unlocked_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_unlocked_action(author: user, project: project) }
end
end
context 'for Issue designs added actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DESIGNS_ADDED }
- def track_action(params)
- described_class.track_issue_designs_added_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_designs_added_action(author: user, project: project) }
end
end
context 'for Issue designs modified actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DESIGNS_MODIFIED }
- def track_action(params)
- described_class.track_issue_designs_modified_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_designs_modified_action(author: user, project: project) }
end
end
context 'for Issue designs removed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DESIGNS_REMOVED }
- def track_action(params)
- described_class.track_issue_designs_removed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_designs_removed_action(author: user, project: project) }
end
end
context 'for Issue due date changed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DUE_DATE_CHANGED }
- def track_action(params)
- described_class.track_issue_due_date_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_due_date_changed_action(author: user, project: project) }
end
end
context 'for Issue time estimate changed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
- def track_action(params)
- described_class.track_issue_time_estimate_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_time_estimate_changed_action(author: user, project: project) }
end
end
context 'for Issue time spent changed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED }
- def track_action(params)
- described_class.track_issue_time_spent_changed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_time_spent_changed_action(author: user, project: project) }
end
end
context 'for Issue comment added actions', :snowplow do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_COMMENT_ADDED }
- def track_action(params)
- described_class.track_issue_comment_added_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_comment_added_action(author: user, project: project) }
end
end
context 'for Issue comment edited actions', :snowplow do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_COMMENT_EDITED }
- def track_action(params)
- described_class.track_issue_comment_edited_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_comment_edited_action(author: user, project: project) }
end
end
context 'for Issue comment removed actions', :snowplow do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_COMMENT_REMOVED }
- def track_action(params)
- described_class.track_issue_comment_removed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_comment_removed_action(author: user, project: project) }
end
end
context 'for Issue design comment removed actions' do
- it_behaves_like 'tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'internal event tracking' do
let(:action) { described_class::ISSUE_DESIGN_COMMENT_REMOVED }
- def track_action(params)
- described_class.track_issue_design_comment_removed_action(**params)
- end
+ subject(:track_event) { described_class.track_issue_design_comment_removed_action(author: user, project: project) }
end
end
it 'can return the count of actions per user deduplicated' do
travel_to(Date.today.beginning_of_week) do # because events aggregated by week we need to emit events in the same week
- described_class.track_issue_title_changed_action(author: user1, project: project)
- described_class.track_issue_description_changed_action(author: user1, project: project)
- described_class.track_issue_assignee_changed_action(author: user1, project: project)
+ described_class.track_issue_title_changed_action(author: user, project: project)
+ described_class.track_issue_description_changed_action(author: user, project: project)
+ described_class.track_issue_assignee_changed_action(author: user, project: project)
end
travel_to(Date.today.beginning_of_week + 2.days) do
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index 9562f1c5500..1ea2ea144df 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -15,7 +15,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
'k8s_api_proxy_request' => 2,
'flux_git_push_notifications_total' => 3,
'k8s_api_proxy_requests_via_ci_access' => 4,
- 'k8s_api_proxy_requests_via_user_access' => 5
+ 'k8s_api_proxy_requests_via_user_access' => 5,
+ 'k8s_api_proxy_requests_via_pat_access' => 6
}
end
@@ -31,7 +32,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
kubernetes_agent_k8s_api_proxy_request: 6,
kubernetes_agent_flux_git_push_notifications_total: 9,
kubernetes_agent_k8s_api_proxy_requests_via_ci_access: 12,
- kubernetes_agent_k8s_api_proxy_requests_via_user_access: 15
+ kubernetes_agent_k8s_api_proxy_requests_via_user_access: 15,
+ kubernetes_agent_k8s_api_proxy_requests_via_pat_access: 18
)
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 30588324adf..3ec7bf33623 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::UsageDataQueries do
it 'returns the histogram sql' do
expect(described_class.histogram(AlertManagement::HttpIntegration.active,
:project_id, buckets: 1..2, bucket_size: 101))
- .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 94c4544f754..143d0484392 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -397,7 +397,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
user = create(:user)
project = create(:project, creator: user)
issue = create(:issue, project: project, author: user)
- create(:issue, project: project, author: User.support_bot)
+ create(:issue, project: project, author: Users::Internal.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
@@ -431,7 +431,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
user = create(:user)
project = create(:project, creator: user)
create(:issue, project: project, author: user)
- create(:issue, project: project, author: User.support_bot)
+ create(:issue, project: project, author: Users::Internal.support_bot)
end
expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3)
@@ -556,7 +556,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2)
expect(count_data[:incident_issues]).to eq(4)
- expect(count_data[:issues_created_from_alerts]).to eq(3)
+ expect(count_data[:issues_created_from_alerts]).to eq(2)
expect(count_data[:alert_bot_incident_issues]).to eq(4)
expect(count_data[:clusters_enabled]).to eq(6)
expect(count_data[:project_clusters_enabled]).to eq(4)
@@ -883,7 +883,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
let(:project) { create(:project, :service_desk_enabled) }
it 'gathers Service Desk data' do
- create_list(:issue, 2, :confidential, author: User.support_bot, project: project)
+ create_list(:issue, 2, :confidential, author: Users::Internal.support_bot, project: project)
expect(subject).to eq(service_desk_enabled_projects: 1,
service_desk_issues: 2)
diff --git a/spec/lib/gitlab/user_access_snippet_spec.rb b/spec/lib/gitlab/user_access_snippet_spec.rb
index 916e920e2ac..fd71a6ce0a5 100644
--- a/spec/lib/gitlab/user_access_snippet_spec.rb
+++ b/spec/lib/gitlab/user_access_snippet_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::UserAccessSnippet do
let_it_be(:project) { create(:project, :private) }
let_it_be(:snippet) { create(:project_snippet, :private, project: project) }
- let_it_be(:migration_bot) { User.migration_bot }
+ let_it_be(:migration_bot) { Users::Internal.migration_bot }
let(:user) { create(:user) }
diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb
index 45953c7906e..d707cf51712 100644
--- a/spec/lib/gitlab/utils/markdown_spec.rb
+++ b/spec/lib/gitlab/utils/markdown_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::Utils::Markdown do
+RSpec.describe Gitlab::Utils::Markdown, feature_category: :gitlab_docs do
let(:klass) do
Class.new do
include Gitlab::Utils::Markdown
@@ -53,25 +53,30 @@ RSpec.describe Gitlab::Utils::Markdown do
end
context 'when string has a product suffix' do
- %w[CORE STARTER PREMIUM ULTIMATE FREE BRONZE SILVER GOLD].each do |tier|
- ['', ' ONLY', ' SELF', ' SAAS'].each do |modifier|
- context "#{tier}#{modifier}" do
- let(:string) { "My Header (#{tier}#{modifier})" }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
- end
-
- context 'with "*" around a product suffix' do
- let(:string) { "My Header **(#{tier}#{modifier})**" }
-
- it 'ignores a product suffix' do
- is_expected.to eq 'my-header'
+ %w[PREMIUM ULTIMATE FREE].each do |tier|
+ [' ALL', ' SELF', ' SAAS'].each do |modifier|
+ ['', ' BETA', ' EXPERIMENT'].each do |status|
+ context "#{tier}#{modifier}#{status}" do
+ context 'with "*" around a product suffix' do
+ let(:string) { "My Header **(#{tier}#{modifier}#{status})**" }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
end
end
end
end
end
+ %w[BETA EXPERIMENT].each do |status|
+ context 'with "*" around a product suffix' do
+ let(:string) { "My Header **(#{status})**" }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+ end
+ end
end
context 'when string is empty' do
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index a1c2f7d667f..9bc1ebaebcb 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Workhorse do
+RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let_it_be(:project) { create(:project, :repository) }
let(:features) { { 'gitaly-feature-enforce-requests-limits' => 'true' } }
@@ -365,19 +365,72 @@ RSpec.describe Gitlab::Workhorse do
end
end
+ describe '.cleanup_key' do
+ let(:key) { 'test-key' }
+ let(:value) { 'test-value' }
+
+ subject(:cleanup_key) { described_class.cleanup_key(key) }
+
+ shared_examples 'cleans up key' do |redis = Gitlab::Redis::Workhorse|
+ before do
+ described_class.set_key_and_notify(key, value)
+ end
+
+ it 'deletes the key' do
+ expect { cleanup_key }
+ .to change { redis.with { |c| c.exists?(key) } }.from(true).to(false)
+ end
+ end
+
+ it_behaves_like 'cleans up key'
+
+ context 'when workhorse migration feature flags are disabled' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_workhorse: false,
+ use_primary_store_as_default_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'cleans up key', Gitlab::Redis::SharedState
+ end
+
+ context 'when either workhorse migration feature flags are enabled' do
+ context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do
+ before do
+ stub_feature_flags(
+ use_primary_store_as_default_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'cleans up key'
+ end
+
+ context 'when use_primary_store_as_default_for_workhorse is enabled' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'cleans up key'
+ end
+ end
+ end
+
describe '.set_key_and_notify' do
let(:key) { 'test-key' }
let(:value) { 'test-value' }
subject { described_class.set_key_and_notify(key, value, overwrite: overwrite) }
- shared_examples 'set and notify' do
+ shared_examples 'set and notify' do |redis = Gitlab::Redis::Workhorse|
it 'set and return the same value' do
is_expected.to eq(value)
end
it 'set and notify' do
- expect(Gitlab::Redis::SharedState).to receive(:with).and_call_original
+ expect(redis).to receive(:with).and_call_original
expect_any_instance_of(::Redis).to receive(:publish)
.with(described_class::NOTIFICATION_PREFIX + 'test-key', "test-value")
@@ -389,6 +442,39 @@ RSpec.describe Gitlab::Workhorse do
let(:overwrite) { true }
it_behaves_like 'set and notify'
+
+ context 'when workhorse migration feature flags are disabled' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_workhorse: false,
+ use_primary_store_as_default_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'set and notify', Gitlab::Redis::SharedState
+ end
+
+ context 'when either workhorse migration feature flags are enabled' do
+ context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do
+ before do
+ stub_feature_flags(
+ use_primary_store_as_default_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'set and notify'
+ end
+
+ context 'when use_primary_store_as_default_for_workhorse is enabled' do
+ before do
+ stub_feature_flags(
+ use_primary_and_secondary_stores_for_workhorse: false
+ )
+ end
+
+ it_behaves_like 'set and notify'
+ end
+ end
end
context 'when we set an existing key' do
@@ -519,18 +605,53 @@ RSpec.describe Gitlab::Workhorse do
describe '.send_dependency' do
let(:headers) { { Accept: 'foo', Authorization: 'Bearer asdf1234' } }
let(:url) { 'https://foo.bar.com/baz' }
+ let(:upload_method) { nil }
+ let(:upload_url) { nil }
+ let(:upload_headers) { {} }
+ let(:upload_config) { { method: upload_method, headers: upload_headers, url: upload_url }.compact_blank! }
- subject { described_class.send_dependency(headers, url) }
+ subject { described_class.send_dependency(headers, url, upload_config: upload_config) }
- it 'sets the header correctly', :aggregate_failures do
- key, command, params = decode_workhorse_header(subject)
+ shared_examples 'setting the header correctly' do |ensure_upload_config_field: nil|
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+ expected_params = {
+ 'Headers' => headers.transform_values { |v| Array.wrap(v) },
+ 'Url' => url,
+ 'UploadConfig' => {
+ 'Method' => upload_method,
+ 'Url' => upload_url,
+ 'Headers' => upload_headers.transform_values { |v| Array.wrap(v) }
+ }.compact_blank!
+ }
+ expected_params.compact_blank!
- expect(key).to eq("Gitlab-Workhorse-Send-Data")
- expect(command).to eq("send-dependency")
- expect(params).to eq({
- 'Header' => headers,
- 'Url' => url
- }.deep_stringify_keys)
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-dependency")
+ expect(params).to eq(expected_params.deep_stringify_keys)
+
+ expect(params.dig('UploadConfig', ensure_upload_config_field)).to be_present if ensure_upload_config_field
+ end
+ end
+
+ it_behaves_like 'setting the header correctly'
+
+ context 'overriding the method' do
+ let(:upload_method) { 'PUT' }
+
+ it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Method'
+ end
+
+ context 'overriding the upload url' do
+ let(:upload_url) { 'https://test.dev' }
+
+ it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Url'
+ end
+
+ context 'with upload headers set' do
+ let(:upload_headers) { { 'Private-Token' => '1234567890' } }
+
+ it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Headers'
end
end
diff --git a/spec/lib/gitlab/x509/certificate_spec.rb b/spec/lib/gitlab/x509/certificate_spec.rb
index d919b99de2a..a81bdfcbd42 100644
--- a/spec/lib/gitlab/x509/certificate_spec.rb
+++ b/spec/lib/gitlab/x509/certificate_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::X509::Certificate do
+RSpec.describe Gitlab::X509::Certificate, feature_category: :source_code_management do
include SmimeHelper
let(:sample_ca_certs_path) { Rails.root.join('spec/fixtures/clusters').to_s }
diff --git a/spec/lib/gitlab/x509/commit_sigstore_spec.rb b/spec/lib/gitlab/x509/commit_sigstore_spec.rb
new file mode 100644
index 00000000000..7079fa28108
--- /dev/null
+++ b/spec/lib/gitlab/x509/commit_sigstore_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::X509::Commit, feature_category: :source_code_management do
+ let(:commit_sha) { '440bf5b2b499a90d9adcbebe3752f8c6f245a1aa' }
+ let_it_be(:user) { create(:user, email: X509Helpers::User2.certificate_email) }
+ let_it_be(:project) { create(:project, :repository, path: X509Helpers::User2.path, creator: user) }
+ let(:commit) { create(:commit, project: project) }
+ let(:signature) { described_class.new(commit).signature }
+ let(:store) { OpenSSL::X509::Store.new }
+ let(:certificate) { OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert) }
+
+ before do
+ store.add_cert(certificate) if certificate
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
+ end
+
+ describe '#signature' do
+ context 'on second call' do
+ it 'returns the cached signature' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:new).and_call_original
+ end
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:create_cached_signature!).and_call_original
+ end
+
+ signature
+
+ # consecutive call
+ expect(described_class).not_to receive(:create_cached_signature!).and_call_original
+ signature
+ end
+ end
+ end
+
+ describe '#update_signature!' do
+ let(:certificate) { nil }
+
+ it 'updates verification status' do
+ signature
+
+ cert = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert)
+ store.add_cert(cert)
+
+ # stored_signature = CommitSignatures::X509CommitSignature.find_by_commit_sha(commit_sha)
+ # expect { described_class.new(commit).update_signature!(stored_signature) }.to(
+ # change { signature.reload.verification_status }.from('unverified').to('verified')
+ # ) # TODO sigstore support pending
+ end
+ end
+end
diff --git a/spec/lib/gitlab/x509/commit_spec.rb b/spec/lib/gitlab/x509/commit_spec.rb
index 412fa6e5a7f..2766a1a9bac 100644
--- a/spec/lib/gitlab/x509/commit_spec.rb
+++ b/spec/lib/gitlab/x509/commit_spec.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::X509::Commit do
+RSpec.describe Gitlab::X509::Commit, feature_category: :source_code_management do
let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
- let(:user) { create(:user, email: X509Helpers::User1.certificate_email) }
- let(:project) { create(:project, :repository, path: X509Helpers::User1.path, creator: user) }
+ let_it_be(:user) { create(:user, email: X509Helpers::User1.certificate_email) }
+ let_it_be(:project) { create(:project, :repository, path: X509Helpers::User1.path, creator: user) }
let(:commit) { project.commit_by(oid: commit_sha ) }
let(:signature) { described_class.new(commit).signature }
let(:store) { OpenSSL::X509::Store.new }
diff --git a/spec/lib/gitlab/x509/signature_sigstore_spec.rb b/spec/lib/gitlab/x509/signature_sigstore_spec.rb
new file mode 100644
index 00000000000..84962576ea2
--- /dev/null
+++ b/spec/lib/gitlab/x509/signature_sigstore_spec.rb
@@ -0,0 +1,453 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::X509::Signature, feature_category: :source_code_management do
+ let(:issuer_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.issuer_subject_key_identifier,
+ subject: X509Helpers::User2.certificate_issuer
+ }
+ end
+
+ it_behaves_like 'signature with type checking', :x509 do
+ subject(:signature) do
+ described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ end
+ end
+
+ shared_examples "a verified signature" do
+ let!(:user) { create(:user, email: X509Helpers::User2.certificate_email) }
+
+ subject(:signature) do
+ described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ end
+
+ it 'returns a verified signature if email does match' do
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+ end
+
+ it 'returns a verified signature if email does match, case-insensitively' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email.upcase,
+ X509Helpers::User2.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+ end
+
+ context 'when the certificate contains multiple emails' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:get_certificate_extension).and_call_original
+ allow(instance).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("email:gitlab2@example.com, othername:<unsupported>, email:#{
+ X509Helpers::User2.certificate_email
+ }")
+ end
+ end
+
+ context 'and the email matches one of them' do
+ it 'returns a verified signature' do
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes.except(:email, :emails))
+ expect(signature.x509_certificate.email).to eq('gitlab2@example.com')
+ expect(signature.x509_certificate.emails).to contain_exactly('gitlab2@example.com',
+ X509Helpers::User2.certificate_email)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+ end
+ end
+ end
+
+ context "if the email matches but isn't confirmed" do
+ let!(:user) { create(:user, :unconfirmed, email: X509Helpers::User2.certificate_email) }
+
+ it "returns an unverified signature" do
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ it 'returns an unverified signature if email does not match' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ "gitlab@example.com",
+ X509Helpers::User2.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if email does match and time is wrong' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email,
+ Time.zone.local(2020, 2, 22)
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if certificate is revoked' do
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+
+ signature.x509_certificate.revoked!
+
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ context 'with commit signature' do
+ let(:certificate_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.certificate_subject_key_identifier,
+ subject: X509Helpers::User2.certificate_subject,
+ email: X509Helpers::User2.certificate_email,
+ emails: [X509Helpers::User2.certificate_email],
+ serial_number: X509Helpers::User2.certificate_serial
+ }
+ end
+
+ context 'with verified signature' do
+ context 'with trusted certificate store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ certificate = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert)
+ store.add_cert(certificate)
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
+ end
+
+ it_behaves_like "a verified signature"
+ end
+
+ context 'with the certificate defined by OpenSSL::X509::DEFAULT_CERT_FILE' do
+ before do
+ store = OpenSSL::X509::Store.new
+ certificate = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert)
+ file_path = Rails.root.join("tmp/cert.pem").to_s
+
+ File.open(file_path, "wb") do |f|
+ f.print certificate.to_pem
+ end
+
+ allow(Gitlab::X509::Certificate).to receive(:default_cert_file).and_return(file_path)
+
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
+ end
+
+ it_behaves_like "a verified signature"
+ end
+
+ context 'without trusted certificate within store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ allow(OpenSSL::X509::Store).to receive(:new)
+ .and_return(
+ store
+ )
+ end
+
+ it 'returns an unverified signature' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+ end
+
+ context 'with invalid signature' do
+ it 'returns nil' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature.tr('A', 'B'),
+ X509Helpers::User2.signed_commit_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ expect(signature.x509_certificate).to be_nil
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ context 'with invalid commit message' do
+ it 'returns nil' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ 'x',
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ expect(signature.x509_certificate).to be_nil
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+ end
+
+ context 'with email' do
+ describe 'subjectAltName with email, othername' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:get_certificate_extension).and_call_original
+ allow(instance).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("email:gitlab@example.com, othername:<unsupported>")
+ end
+ end
+
+ let(:signature) do
+ described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ 'gitlab@example.com',
+ X509Helpers::User2.signed_commit_time
+ )
+ end
+
+ it 'extracts email' do
+ expect(signature.x509_certificate.email).to eq("gitlab@example.com")
+ expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com")
+ end
+
+ context 'when there are multiple emails' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:get_certificate_extension).and_call_original
+ allow(instance).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("email:gitlab@example.com, othername:<unsupported>, email:gitlab2@example.com")
+ end
+ end
+
+ it 'extracts all the emails' do
+ expect(signature.x509_certificate.email).to eq("gitlab@example.com")
+ expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com", "gitlab2@example.com")
+ end
+ end
+ end
+
+ describe 'subjectAltName with othername, email' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:get_certificate_extension).and_call_original
+ end
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:get_certificate_extension).and_call_original
+ allow(instance).to receive(:get_certificate_extension)
+ .with('subjectAltName')
+ .and_return("othername:<unsupported>, email:gitlab@example.com")
+ end
+ end
+
+ it 'extracts email' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_commit_signature,
+ X509Helpers::User2.signed_commit_base_data,
+ 'gitlab@example.com',
+ X509Helpers::User2.signed_commit_time
+ )
+
+ expect(signature.x509_certificate.email).to eq("gitlab@example.com")
+ end
+ end
+ end
+
+ describe '#signed_by_user' do
+ subject do
+ described_class.new(
+ X509Helpers::User2.signed_tag_signature,
+ X509Helpers::User2.signed_tag_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ ).signed_by_user
+ end
+
+ context 'if email is assigned to a user' do
+ let!(:signed_by_user) { create(:user, email: X509Helpers::User2.certificate_email) }
+
+ it 'returns user' do
+ is_expected.to eq(signed_by_user)
+ end
+ end
+
+ it 'if email is not assigned to a user, return nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'with tag signature' do
+ let(:certificate_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.tag_certificate_subject_key_identifier,
+ subject: X509Helpers::User2.certificate_subject,
+ email: X509Helpers::User2.certificate_email,
+ emails: [X509Helpers::User2.certificate_email],
+ serial_number: X509Helpers::User2.tag_certificate_serial
+ }
+ end
+
+ let(:issuer_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.tag_issuer_subject_key_identifier,
+ subject: X509Helpers::User2.tag_certificate_issuer
+ }
+ end
+
+ context 'with verified signature' do
+ let_it_be(:user) { create(:user, :unconfirmed, email: X509Helpers::User2.certificate_email) }
+
+ subject(:signature) do
+ described_class.new(
+ X509Helpers::User2.signed_tag_signature,
+ X509Helpers::User2.signed_tag_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ end
+
+ context 'with trusted certificate store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ certificate = OpenSSL::X509::Certificate.new X509Helpers::User2.trust_cert
+ store.add_cert(certificate)
+ allow(OpenSSL::X509::Store).to receive(:new).and_return(store)
+ end
+
+ context 'when user email is confirmed' do
+ before_all do
+ user.confirm
+ end
+
+ it 'returns a verified signature if email does match', :ggregate_failures do
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+ end
+
+ it 'returns an unverified signature if email does not match', :aggregate_failures do
+ signature = described_class.new(
+ X509Helpers::User2.signed_tag_signature,
+ X509Helpers::User2.signed_tag_base_data,
+ "gitlab@example.com",
+ X509Helpers::User2.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey # TODO sigstore support pending
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if email does match and time is wrong', :aggregate_failures do
+ signature = described_class.new(
+ X509Helpers::User2.signed_tag_signature,
+ X509Helpers::User2.signed_tag_base_data,
+ X509Helpers::User2.certificate_email,
+ Time.zone.local(2020, 2, 22)
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+
+ it 'returns an unverified signature if certificate is revoked' do
+ expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending
+
+ signature.x509_certificate.revoked!
+
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ it 'returns an unverified signature if the email matches but is not confirmed' do
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ context 'without trusted certificate within store' do
+ before do
+ store = OpenSSL::X509::Store.new
+ allow(OpenSSL::X509::Store).to receive(:new)
+ .and_return(
+ store
+ )
+ end
+
+ it 'returns an unverified signature' do
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+ end
+
+ context 'with invalid signature' do
+ it 'returns nil' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_tag_signature.tr('A', 'B'),
+ X509Helpers::User2.signed_tag_base_data,
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ expect(signature.x509_certificate).to be_nil
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ context 'with invalid message' do
+ it 'returns nil' do
+ signature = described_class.new(
+ X509Helpers::User2.signed_tag_signature,
+ 'x',
+ X509Helpers::User2.certificate_email,
+ X509Helpers::User2.signed_commit_time
+ )
+ expect(signature.x509_certificate).to be_nil
+ expect(signature.verified_signature).to be_falsey
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb
index e0823aa8153..8043cefe888 100644
--- a/spec/lib/gitlab/x509/signature_spec.rb
+++ b/spec/lib/gitlab/x509/signature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::X509::Signature do
+RSpec.describe Gitlab::X509::Signature, feature_category: :source_code_management do
let(:issuer_attributes) do
{
subject_key_identifier: X509Helpers::User1.issuer_subject_key_identifier,
diff --git a/spec/lib/gitlab/x509/tag_sigstore_spec.rb b/spec/lib/gitlab/x509/tag_sigstore_spec.rb
new file mode 100644
index 00000000000..3cf864ea442
--- /dev/null
+++ b/spec/lib/gitlab/x509/tag_sigstore_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::X509::Tag, feature_category: :source_code_management do
+ describe '#signature' do
+ let(:tag_id) { 'v1.1.2' }
+ let(:tag) { instance_double('Gitlab::Git::Tag') }
+ let_it_be(:user) { create(:user, email: X509Helpers::User2.tag_email) }
+ let_it_be(:project) { create(:project, path: X509Helpers::User2.path, creator: user) }
+ let(:signature) { described_class.new(project.repository, tag).signature }
+
+ before do
+ allow(tag).to receive(:id).and_return(tag_id)
+ allow(tag).to receive(:has_signature?).and_return(true)
+ allow(tag).to receive(:user_email).and_return(user.email)
+ allow(tag).to receive(:date).and_return(X509Helpers::User2.signed_tag_time)
+ allow(Gitlab::Git::Tag).to receive(:extract_signature_lazily).with(project.repository, tag_id)
+ .and_return([X509Helpers::User2.signed_tag_signature, X509Helpers::User2.signed_tag_base_data])
+ end
+
+ describe 'signed tag' do
+ let(:certificate_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.tag_certificate_subject_key_identifier,
+ subject: X509Helpers::User2.certificate_subject,
+ email: X509Helpers::User2.certificate_email,
+ serial_number: X509Helpers::User2.tag_certificate_serial
+ }
+ end
+
+ let(:issuer_attributes) do
+ {
+ subject_key_identifier: X509Helpers::User2.tag_issuer_subject_key_identifier,
+ subject: X509Helpers::User2.tag_certificate_issuer
+ }
+ end
+
+ it { expect(signature).not_to be_nil }
+ it { expect(signature.verification_status).to eq(:unverified) }
+ it { expect(signature.x509_certificate).to have_attributes(certificate_attributes) }
+ it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb
index e20ef688db5..4368c3d7a4b 100644
--- a/spec/lib/gitlab/x509/tag_spec.rb
+++ b/spec/lib/gitlab/x509/tag_spec.rb
@@ -1,15 +1,24 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::X509::Tag do
- subject(:signature) { described_class.new(project.repository, tag).signature }
-
+RSpec.describe Gitlab::X509::Tag, feature_category: :source_code_management do
describe '#signature' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:repository) { project.repository.raw }
+ let(:tag_id) { 'v1.1.1' }
+ let(:tag) { instance_double('Gitlab::Git::Tag') }
+ let_it_be(:user) { create(:user, email: X509Helpers::User1.tag_email) }
+ let_it_be(:project) { create(:project, path: X509Helpers::User1.path, creator: user) }
+ let(:signature) { described_class.new(project.repository, tag).signature }
+
+ before do
+ allow(tag).to receive(:id).and_return(tag_id)
+ allow(tag).to receive(:has_signature?).and_return(true)
+ allow(tag).to receive(:user_email).and_return(user.email)
+ allow(tag).to receive(:date).and_return(X509Helpers::User1.signed_tag_time)
+ allow(Gitlab::Git::Tag).to receive(:extract_signature_lazily).with(project.repository, tag_id)
+ .and_return([X509Helpers::User1.signed_tag_signature, X509Helpers::User1.signed_tag_base_data])
+ end
describe 'signed tag' do
- let(:tag) { project.repository.find_tag('v1.1.1') }
let(:certificate_attributes) do
{
subject_key_identifier: X509Helpers::User1.tag_certificate_subject_key_identifier,
@@ -32,11 +41,5 @@ RSpec.describe Gitlab::X509::Tag do
it { expect(signature.x509_certificate).to have_attributes(certificate_attributes) }
it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) }
end
-
- describe 'unsigned tag' do
- let(:tag) { project.repository.find_tag('v1.0.0') }
-
- it { expect(signature).to be_nil }
- end
end
end
diff --git a/spec/lib/peek/views/click_house_spec.rb b/spec/lib/peek/views/click_house_spec.rb
index 9d7d06204fc..1ff49afd728 100644
--- a/spec/lib/peek/views/click_house_spec.rb
+++ b/spec/lib/peek/views/click_house_spec.rb
@@ -16,9 +16,15 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca
data = ClickHouse::Client.select('SELECT 1 AS value', :main)
ClickHouse::Client.execute('INSERT INTO events (id) VALUES (1)', :main)
+ Tempfile.open(['test', '.csv.gz']) do |f|
+ File.binwrite(f.path, ActiveSupport::Gzip.compress("id\n10\n20"))
+
+ ClickHouse::Client.insert_csv('INSERT INTO events (id) FORMAT CSV', File.open(f.path), :main)
+ end
+
expect(data).to eq([{ 'value' => 1 }])
- expect(results[:calls]).to eq(2)
+ expect(results[:calls]).to eq(3)
expect(results[:duration]).to be_kind_of(String)
expect(results[:details]).to match_array([
@@ -30,6 +36,11 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca
sql: 'INSERT INTO events (id) VALUES (1)',
database: 'database: main',
statistics: include('written_rows=>"1"')
+ }),
+ a_hash_including({
+ sql: 'INSERT INTO events (id) FORMAT CSV',
+ database: 'database: main',
+ statistics: include('written_rows=>"2"')
})
])
end
diff --git a/spec/lib/sidebars/admin/panel_spec.rb b/spec/lib/sidebars/admin/panel_spec.rb
index 9c362f527f5..83ad867050c 100644
--- a/spec/lib/sidebars/admin/panel_spec.rb
+++ b/spec/lib/sidebars/admin/panel_spec.rb
@@ -18,14 +18,10 @@ RSpec.describe Sidebars::Admin::Panel, feature_category: :navigation do
describe '#super_sidebar_context_header' do
it 'returns a hash with the correct title and icon' do
- expected_header = {
- title: panel.aria_label,
- icon: 'admin'
- }
-
- expect(panel.super_sidebar_context_header).to eq(expected_header)
+ expect(panel.super_sidebar_context_header).to eq(_('Admin Area'))
end
end
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
end
diff --git a/spec/lib/sidebars/concerns/has_avatar_spec.rb b/spec/lib/sidebars/concerns/has_avatar_spec.rb
new file mode 100644
index 00000000000..bc9038c216e
--- /dev/null
+++ b/spec/lib/sidebars/concerns/has_avatar_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Sidebars::Concerns::HasAvatar, feature_category: :navigation do
+ subject do
+ Class.new do
+ include Sidebars::Concerns::HasAvatar
+ end.new
+ end
+
+ describe '#avatar' do
+ it 'returns nil' do
+ expect(subject.avatar).to be_nil
+ end
+ end
+
+ describe '#avatar_shape' do
+ it 'returns rect' do
+ expect(subject.avatar_shape).to eq('rect')
+ end
+ end
+
+ describe '#entity_id' do
+ it 'returns nil' do
+ expect(subject.entity_id).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/sidebars/explore/panel_spec.rb b/spec/lib/sidebars/explore/panel_spec.rb
new file mode 100644
index 00000000000..b3030dfe2e4
--- /dev/null
+++ b/spec/lib/sidebars/explore/panel_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Explore::Panel, feature_category: :navigation do
+ let(:user) { build_stubbed(:user) }
+
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+
+ subject { described_class.new(context) }
+
+ it_behaves_like 'a panel with uniquely identifiable menu items'
+
+ it 'implements #super_sidebar_context_header' do
+ expect(subject.super_sidebar_context_header).to eq(_('Explore'))
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
index 382ee07e458..713e22e2e76 100644
--- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -24,29 +24,16 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category
expect(menu.render?).to eq true
end
end
-
- context 'when menu does not have any menu item to show' do
- it 'returns false' do
- stub_feature_flags(harbor_registry_integration: false)
- stub_container_registry_config(enabled: false)
- stub_config(packages: { enabled: false })
- stub_config(dependency_proxy: { enabled: false })
-
- expect(menu.render?).to eq false
- end
- end
end
describe '#link' do
let(:registry_enabled) { true }
let(:packages_enabled) { true }
- let(:harbor_registry_integration) { true }
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
stub_config(dependency_proxy: { enabled: true })
- stub_feature_flags(harbor_registry_integration: harbor_registry_integration)
end
subject { menu.link }
@@ -70,14 +57,6 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category
it 'menu link points to Harbor Registry page' do
expect(subject).to eq find_menu(menu, :harbor_registry).link
end
-
- context 'when Harbor Registry is not visible' do
- let(:harbor_registry_integration) { false }
-
- it 'menu link points to Dependency Proxy page' do
- expect(subject).to eq find_menu(menu, :dependency_proxy).link
- end
- end
end
end
end
@@ -194,29 +173,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category
describe 'Harbor Registry' do
let(:item_id) { :harbor_registry }
- before do
- stub_feature_flags(harbor_registry_integration: harbor_registry_enabled)
- end
-
- context 'when config harbor registry setting is disabled' do
- let(:harbor_registry_enabled) { false }
-
- it_behaves_like 'the menu entry is not available'
- end
-
- context 'when config harbor registry setting is enabled' do
- let(:harbor_registry_enabled) { true }
-
- it_behaves_like 'the menu entry is available'
- end
+ it_behaves_like 'the menu entry is available'
context 'when config harbor registry setting is not activated' do
before do
harbor_integration.update!(active: false)
end
- let(:harbor_registry_enabled) { true }
-
it_behaves_like 'the menu entry is not available'
end
end
diff --git a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
index d3aceaf422b..2cce2d28e68 100644
--- a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
@@ -17,9 +17,10 @@ RSpec.describe Sidebars::Groups::Menus::ScopeMenu, feature_category: :navigation
it_behaves_like 'serializable as super_sidebar_menu_args' do
let(:extra_attrs) do
{
- sprite_icon: 'group',
super_sidebar_parent: ::Sidebars::StaticMenu,
- title: _('Group overview'),
+ title: group.name,
+ avatar: group.avatar_url,
+ entity_id: group.id,
item_id: :group_overview
}
end
diff --git a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
index 52c3a35a9d7..c939dd870c4 100644
--- a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
@@ -20,12 +20,7 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio
subject { described_class.new(context) }
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq(
- {
- title: group.name,
- avatar: group.avatar_url,
- id: group.id
- })
+ expect(subject.super_sidebar_context_header).to eq(_('Group'))
end
describe '#renderable_menus' do
@@ -53,4 +48,5 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio
it_behaves_like 'a panel with uniquely identifiable menu items'
it_behaves_like 'a panel with all menu_items categorized'
+ it_behaves_like 'a panel instantiable by the anonymous user'
end
diff --git a/spec/lib/sidebars/menu_item_spec.rb b/spec/lib/sidebars/menu_item_spec.rb
index 3ff5b80e5d9..7f67b5a2e8d 100644
--- a/spec/lib/sidebars/menu_item_spec.rb
+++ b/spec/lib/sidebars/menu_item_spec.rb
@@ -5,7 +5,8 @@ require 'fast_spec_helper'
RSpec.describe Sidebars::MenuItem, feature_category: :navigation do
let(:title) { 'foo' }
let(:html_options) { {} }
- let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options) }
+ let(:extra) { {} }
+ let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options, **extra) }
it 'includes by default aria-label attribute set to the title' do
expect(menu_item.container_html_options).to eq({ aria: { label: title } })
@@ -21,11 +22,17 @@ RSpec.describe Sidebars::MenuItem, feature_category: :navigation do
describe "#serialize_for_super_sidebar" do
let(:html_options) { { class: 'custom-class' } }
+ let(:extra) { { avatar: '/avatar.png', entity_id: 123 } }
subject { menu_item.serialize_for_super_sidebar }
it 'includes custom CSS classes' do
expect(subject[:link_classes]).to be('custom-class')
end
+
+ it 'includes avatar data' do
+ expect(subject[:avatar]).to be('/avatar.png')
+ expect(subject[:entity_id]).to be(123)
+ end
end
end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index 00202ac7d2b..e59a8cd2163 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -33,6 +33,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
item_id: 'id1',
title: 'Is active',
link: 'foo2',
+ avatar: '/avatar.png',
+ entity_id: 123,
active_routes: { controller: 'fooc' }
))
menu.add_item(Sidebars::MenuItem.new(
@@ -51,6 +53,9 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
{
title: "Title",
icon: nil,
+ avatar: nil,
+ avatar_shape: 'rect',
+ entity_id: nil,
link: "foo2",
is_active: true,
pill_count: nil,
@@ -60,6 +65,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
id: 'id1',
title: "Is active",
icon: nil,
+ avatar: '/avatar.png',
+ entity_id: 123,
link: "foo2",
is_active: true,
pill_count: nil,
@@ -69,6 +76,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
id: 'id2',
title: "Not active",
icon: nil,
+ avatar: nil,
+ entity_id: nil,
link: "foo3",
is_active: false,
pill_count: 10,
@@ -85,6 +94,9 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do
{
title: "Title",
icon: nil,
+ avatar: nil,
+ avatar_shape: 'rect',
+ entity_id: nil,
link: nil,
is_active: false,
pill_count: 'foo',
diff --git a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
index bc03787e95f..999889a72ee 100644
--- a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :nav
let(:menu) { described_class.new(context) }
let(:extra_attrs) do
{
- title: s_('Organization|Organization overview'),
- sprite_icon: 'organization',
+ avatar: nil,
+ entity_id: organization.id,
super_sidebar_parent: ::Sidebars::StaticMenu,
item_id: :organization_overview
}
diff --git a/spec/lib/sidebars/organizations/panel_spec.rb b/spec/lib/sidebars/organizations/panel_spec.rb
index 1f0b8d72aef..edaa676aa41 100644
--- a/spec/lib/sidebars/organizations/panel_spec.rb
+++ b/spec/lib/sidebars/organizations/panel_spec.rb
@@ -14,4 +14,5 @@ RSpec.describe Sidebars::Organizations::Panel, feature_category: :navigation do
end
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
end
diff --git a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
index 99b33a5edf8..b8ceda615c4 100644
--- a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb
@@ -15,11 +15,7 @@ RSpec.describe Sidebars::Organizations::SuperSidebarPanel, feature_category: :na
subject { described_class.new(context) }
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq(
- {
- title: organization.name,
- id: organization.id
- })
+ expect(subject.super_sidebar_context_header).to eq(s_('Organization|Organization'))
end
describe '#renderable_menus' do
@@ -36,4 +32,5 @@ RSpec.describe Sidebars::Organizations::SuperSidebarPanel, feature_category: :na
end
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
end
diff --git a/spec/lib/sidebars/panel_spec.rb b/spec/lib/sidebars/panel_spec.rb
index 857cb1139b5..e4b3b973484 100644
--- a/spec/lib/sidebars/panel_spec.rb
+++ b/spec/lib/sidebars/panel_spec.rb
@@ -46,17 +46,25 @@ RSpec.describe Sidebars::Panel, feature_category: :navigation do
end
end
- describe '#has_renderable_menus?' do
- it 'returns false when no renderable menus' do
- expect(panel.has_renderable_menus?).to be false
+ describe '#render?' do
+ it 'returns false with no menus' do
+ expect(panel.render?).to be false
end
- it 'returns true when no renderable menus' do
+ it 'returns false with no renderable menus' do
+ allow(menu1).to receive(:render?).and_return(false)
+
+ panel.add_menu(menu1)
+
+ expect(panel.render?).to be false
+ end
+
+ it 'returns true with renderable menus' do
allow(menu1).to receive(:render?).and_return(true)
panel.add_menu(menu1)
- expect(panel.has_renderable_menus?).to be true
+ expect(panel.render?).to be true
end
end
diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
index 53d92d013a9..91913e5b733 100644
--- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat
let(:extra_attrs) do
{
item_id: :project_issue_list,
+ active_routes: { path: %w[projects/issues#index projects/issues#show projects/issues#new] },
pill_count: menu.pill_count,
has_pill: menu.has_pill?,
super_sidebar_parent: Sidebars::Projects::SuperSidebarMenus::PlanMenu
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index c0787aa9db5..f1df56823b1 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -88,19 +88,5 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu, feature_category: :naviga
it_behaves_like 'access rights checks'
end
-
- describe 'Tracing' do
- let(:item_id) { :tracing }
-
- specify { is_expected.not_to be_nil }
-
- describe 'when feature is disabled' do
- before do
- stub_feature_flags(observability_tracing: false)
- end
-
- specify { is_expected.to be_nil }
- end
- end
end
end
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index b917208bac1..0cf95391a26 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
- stub_feature_flags(harbor_registry_integration: false, ml_experiment_tracking: false)
+ stub_feature_flags(ml_experiment_tracking: false)
end
context 'when Packages Registry is visible' do
@@ -58,8 +58,8 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
context 'when Container Registry is not visible' do
let(:registry_enabled) { false }
- it 'does not display menu link' do
- expect(subject.render?).to eq false
+ it 'displays menu link' do
+ expect(subject.render?).to eq true
end
end
end
@@ -155,26 +155,13 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
describe 'Harbor Registry' do
let(:item_id) { :harbor_registry }
- context 'when config harbor registry setting is disabled' do
- it 'does not add the menu item to the list' do
- stub_feature_flags(harbor_registry_integration: false)
-
- is_expected.to be_nil
- end
- end
-
- context 'when config harbor registry setting is enabled' do
- it 'the menu item is added to list of menu items' do
- stub_feature_flags(harbor_registry_integration: true)
-
- is_expected.not_to be_nil
- expect(subject.active_routes[:controller]).to eq('projects/harbor/repositories')
- end
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ expect(subject.active_routes[:controller]).to eq('projects/harbor/repositories')
end
context 'when config harbor registry setting is not activated' do
it 'does not add the menu item to the list' do
- stub_feature_flags(harbor_registry_integration: true)
project.harbor_integration.update!(active: false)
is_expected.to be_nil
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index 45464278880..1c2d159950a 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu, feature_category: :navigati
let(:menu) { described_class.new(context) }
let(:extra_attrs) do
{
- title: _('Project overview'),
- sprite_icon: 'project',
+ title: project.name,
+ avatar: project.avatar_url,
+ entity_id: project.id,
super_sidebar_parent: ::Sidebars::StaticMenu,
item_id: :project_overview
}
diff --git a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
index 3fc6cd5083f..dc264c1c14f 100644
--- a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
@@ -31,12 +31,7 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat
end
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq(
- {
- title: project.name,
- avatar: project.avatar_url,
- id: project.id
- })
+ expect(subject.super_sidebar_context_header).to eq(_('Project'))
end
describe '#renderable_menus' do
@@ -64,4 +59,5 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat
it_behaves_like 'a panel with uniquely identifiable menu items'
it_behaves_like 'a panel with all menu_items categorized'
+ it_behaves_like 'a panel instantiable by the anonymous user'
end
diff --git a/spec/lib/sidebars/search/panel_spec.rb b/spec/lib/sidebars/search/panel_spec.rb
index 39c0f112793..fa1b4266a2f 100644
--- a/spec/lib/sidebars/search/panel_spec.rb
+++ b/spec/lib/sidebars/search/panel_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do
subject { described_class.new(context) }
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
describe '#aria_label' do
it 'returns the correct aria label' do
@@ -21,11 +22,7 @@ RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do
describe '#super_sidebar_context_header' do
it 'returns a hash with the correct title and icon' do
- expected_header = {
- title: 'Search results',
- icon: 'search-results'
- }
- expect(panel.super_sidebar_context_header).to eq(expected_header)
+ expect(panel.super_sidebar_context_header).to eq(_('Search results'))
end
end
end
diff --git a/spec/lib/sidebars/static_menu_spec.rb b/spec/lib/sidebars/static_menu_spec.rb
index 3d9feee0494..fda953c0791 100644
--- a/spec/lib/sidebars/static_menu_spec.rb
+++ b/spec/lib/sidebars/static_menu_spec.rb
@@ -23,6 +23,8 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do
id: 'id1',
title: "Is active",
icon: nil,
+ avatar: nil,
+ entity_id: nil,
link: "foo2",
is_active: true,
pill_count: nil,
@@ -32,6 +34,8 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do
id: 'id2',
title: "Not active",
icon: nil,
+ avatar: nil,
+ entity_id: nil,
link: "foo3",
is_active: false,
pill_count: nil,
diff --git a/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb b/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb
index 7cf86676892..ef12ce023b4 100644
--- a/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb
+++ b/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
RSpec.describe Sidebars::UserProfile::Menus::OverviewMenu, feature_category: :navigation do
it_behaves_like 'User profile menu',
- title: s_('UserProfile|Overview'),
- icon: 'overview',
+ icon: nil,
+ expect_avatar: true,
+ avatar_shape: 'circle',
active_route: 'users#show' do
let(:link) { "/#{user.username}" }
end
diff --git a/spec/lib/sidebars/user_profile/panel_spec.rb b/spec/lib/sidebars/user_profile/panel_spec.rb
index a2bf490bc58..97fe13397a9 100644
--- a/spec/lib/sidebars/user_profile/panel_spec.rb
+++ b/spec/lib/sidebars/user_profile/panel_spec.rb
@@ -11,16 +11,13 @@ RSpec.describe Sidebars::UserProfile::Panel, feature_category: :navigation do
subject { described_class.new(context) }
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
it 'implements #aria_label' do
expect(subject.aria_label).to eq(s_('UserProfile|User profile navigation'))
end
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq({
- title: user.name,
- avatar: user.avatar_url,
- avatar_shape: 'circle'
- })
+ expect(subject.super_sidebar_context_header).to eq(_('Profile'))
end
end
diff --git a/spec/lib/sidebars/user_settings/panel_spec.rb b/spec/lib/sidebars/user_settings/panel_spec.rb
index d574652188d..e65717d75d6 100644
--- a/spec/lib/sidebars/user_settings/panel_spec.rb
+++ b/spec/lib/sidebars/user_settings/panel_spec.rb
@@ -10,8 +10,9 @@ RSpec.describe Sidebars::UserSettings::Panel, feature_category: :navigation do
subject { described_class.new(context) }
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq({ title: _('User settings'), avatar: user.avatar_url })
+ expect(subject.super_sidebar_context_header).to eq(_('User settings'))
end
end
diff --git a/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb b/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb
new file mode 100644
index 00000000000..304725ce8ca
--- /dev/null
+++ b/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::YourWork::Menus::OrganizationsMenu, feature_category: :navigation do
+ let(:user) { build_stubbed(:user) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when `ui_for_organizations` feature flag is enabled' do
+ context 'when `current_user` is available' do
+ before do
+ stub_feature_flags(ui_for_organizations: [user])
+ end
+
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when `current_user` is not available' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+
+ context 'when `ui_for_organizations` feature flag is disabled' do
+ before do
+ stub_feature_flags(ui_for_organizations: false)
+ end
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/your_work/panel_spec.rb b/spec/lib/sidebars/your_work/panel_spec.rb
index 65c2786a16d..8037f7eb7c1 100644
--- a/spec/lib/sidebars/your_work/panel_spec.rb
+++ b/spec/lib/sidebars/your_work/panel_spec.rb
@@ -10,8 +10,9 @@ RSpec.describe Sidebars::YourWork::Panel, feature_category: :navigation do
subject { described_class.new(context) }
it_behaves_like 'a panel with uniquely identifiable menu items'
+ it_behaves_like 'a panel instantiable by the anonymous user'
it 'implements #super_sidebar_context_header' do
- expect(subject.super_sidebar_context_header).to eq({ title: 'Your work', icon: 'work' })
+ expect(subject.super_sidebar_context_header).to eq(_('Your work'))
end
end
diff --git a/spec/lib/system_check/app/table_truncate_check_spec.rb b/spec/lib/system_check/app/table_truncate_check_spec.rb
new file mode 100644
index 00000000000..673365f3e5e
--- /dev/null
+++ b/spec/lib/system_check/app/table_truncate_check_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SystemCheck::App::TableTruncateCheck, feature_category: :cell do
+ context 'when running on single databases' do
+ before do
+ skip_if_database_exists(:ci)
+ end
+
+ describe '#skip?' do
+ subject { described_class.new.skip? }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ context 'when running on multiple databases' do
+ let(:needs_truncation) { true }
+
+ before do
+ skip_if_shared_database(:ci)
+
+ allow_next_instances_of(Gitlab::Database::TablesTruncate, 2) do |instance|
+ allow(instance).to receive(:needs_truncation?).and_return(needs_truncation)
+ end
+ end
+
+ describe '#skip?' do
+ subject { described_class.new.skip? }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#check?' do
+ subject { described_class.new.check? }
+
+ context 'when TableTruncate returns false' do
+ let(:needs_truncation) { false }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when TableTruncate returns true' do
+ let(:needs_truncation) { true }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#show_error' do
+ let(:needs_truncation) { true }
+ let(:checker) { described_class.new }
+
+ before do
+ checker.check?
+ end
+
+ subject(:show_error) { checker.show_error }
+
+ it 'outputs error information' do
+ expected = %r{
+ Try\sfixing\sit:\s+
+ sudo\s-u\s.+?\s-H\sbundle\sexec\srake\sgitlab:db:truncate_legacy_tables:main\s
+ gitlab:db:truncate_legacy_tables:ci\s+
+ For\smore\sinformation\ssee:\s+
+ doc/development/database/multiple_databases.md\sin\ssection\s'Truncating\stables'\s+
+ Please\sfix\sthe\serror\sabove\sand\srerun\sthe\schecks.\s+
+ }x
+
+ expect { show_error }.to output(expected).to_stdout
+ end
+ end
+ end
+end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
index e094563e8fb..ea561c42993 100644
--- a/spec/lib/unnested_in_filters/rewriter_spec.rb
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -68,92 +68,92 @@ RSpec.describe UnnestedInFilters::Rewriter do
describe '#rewrite' do
let(:recorded_queries) { ActiveRecord::QueryRecorder.new { rewriter.rewrite.load } }
let(:relation) { User.where(state: :active, user_type: %i(support_bot alert_bot)).limit(2) }
+ let(:users_select) { 'SELECT "users".*' }
+ let(:users_select_with_ignored_columns) { 'SELECT ("users"."\w+", )+("users"."\w+")' }
- let(:expected_query) do
- <<~SQL
- SELECT
- "users".*
- FROM
- unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
- LATERAL (
- SELECT
- "users".*
- FROM
- "users"
- WHERE
- "users"."state" = 'active' AND
- (users."user_type" = "user_types"."user_type")
- LIMIT 2
- ) AS users
- LIMIT 2
- SQL
+ let(:users_unnest) do
+ 'FROM unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\, LATERAL \('
+ end
+
+ let(:users_where) do
+ 'FROM
+ "users"
+ WHERE
+ "users"."state" = \'active\' AND
+ \(users."user_type" = "user_types"."user_type"\)
+ LIMIT 2\)
+ AS users
+ LIMIT 2'
+ end
+
+ let(:expected_query_regexp) do
+ Regexp.new(
+ "(#{users_select}|#{users_select_with_ignored_columns})
+ #{users_unnest}(#{users_select}|#{users_select_with_ignored_columns})
+ #{users_where}".squish
+ )
end
subject(:issued_query) { recorded_queries.occurrences.each_key.first }
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
context 'when the relation has a subquery' do
let(:relation) { User.where(state: User.select(:state), user_type: %i(support_bot alert_bot)).limit(1) }
- let(:expected_query) do
- <<~SQL
- SELECT
- "users".*
- FROM
- unnest(ARRAY(SELECT "users"."state" FROM "users")::character varying[]) AS "states"("state"),
- unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
- LATERAL (
- SELECT
- "users".*
- FROM
- "users"
- WHERE
- (users."state" = "states"."state") AND
- (users."user_type" = "user_types"."user_type")
- LIMIT 1
- ) AS users
- LIMIT 1
- SQL
+ let(:users_unnest) do
+ 'FROM
+ unnest\(ARRAY\(SELECT "users"."state" FROM "users"\)::character varying\[\]\) AS "states"\("state"\)\,
+ unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\,
+ LATERAL \('
+ end
+
+ let(:users_where) do
+ 'FROM
+ "users"
+ WHERE
+ \(users."state" = "states"."state"\) AND
+ \(users."user_type" = "user_types"."user_type"\)
+ LIMIT 1\)
+ AS users
+ LIMIT 1'
end
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
end
context 'when there is an order' do
let(:relation) { User.where(state: %w(active blocked banned)).order(order).limit(2) }
- let(:expected_query) do
- <<~SQL
- SELECT
- "users".*
- FROM
- unnest('{active,blocked,banned}'::charactervarying[]) AS "states"("state"),
- LATERAL (
- SELECT
- "users".*
- FROM
- "users"
- WHERE
- (users."state" = "states"."state")
- ORDER BY
- "users"."user_type" DESC
- LIMIT 2
- ) AS users
- ORDER BY
- "users"."user_type" DESC
- LIMIT 2
- SQL
+
+ let(:users_unnest) do
+ 'FROM
+ unnest\(\'{active\,blocked\,banned}\'::character varying\[\]\) AS "states"\("state"\)\,
+ LATERAL \('
+ end
+
+ let(:users_where) do
+ 'FROM
+ "users"
+ WHERE
+ \(users."state" = "states"."state"\)
+ ORDER BY
+ "users"."user_type" DESC
+ LIMIT 2\)
+ AS users
+ ORDER BY
+ "users"."user_type" DESC
+ LIMIT 2'
end
context 'when the order is an Arel node' do
let(:order) { { user_type: :desc } }
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
end
@@ -171,7 +171,7 @@ RSpec.describe UnnestedInFilters::Rewriter do
end
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
end
end
@@ -179,85 +179,82 @@ RSpec.describe UnnestedInFilters::Rewriter do
context 'when the combined attributes include the primary key' do
let(:relation) { User.where(user_type: %i(support_bot alert_bot)).order(id: :desc).limit(2) }
- let(:expected_query) do
- <<~SQL
- SELECT
- "users".*
- FROM
- "users"
- WHERE
- "users"."id" IN (
- SELECT
- "users"."id"
- FROM
- unnest('{1,2}' :: smallint []) AS "user_types"("user_type"),
- LATERAL (
- SELECT
- "users"."user_type",
- "users"."id"
- FROM
- "users"
- WHERE
- (users."user_type" = "user_types"."user_type")
- ORDER BY
- "users"."id" DESC
- LIMIT
- 2
- ) AS users
- ORDER BY
- "users"."id" DESC
- LIMIT
- 2
- )
- ORDER BY
+ let(:users_where) do
+ 'FROM
+ "users"
+ WHERE
+ "users"."id" IN
+ \(SELECT
+ "users"."id"
+ FROM
+ unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\,
+ LATERAL
+ \(SELECT
+ "users"."user_type"\,
+ "users"."id"
+ FROM
+ "users"
+ WHERE
+ \(users."user_type" = "user_types"."user_type"\)
+ ORDER BY
+ "users"."id" DESC
+ LIMIT 2\)
+ AS users
+ ORDER BY
"users"."id" DESC
- LIMIT
- 2
- SQL
+ LIMIT 2\)
+ ORDER BY
+ "users"."id" DESC
+ LIMIT 2'
+ end
+
+ let(:expected_query_regexp) do
+ Regexp.new("(#{users_select}|#{users_select_with_ignored_columns}) #{users_where}".squish)
end
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
end
context 'when a join table is receiving an IN list query' do
let(:relation) { User.joins(:status).where(status: { message: %w[foo bar] }).order(id: :desc).limit(2) }
- let(:expected_query) do
- <<~SQL
- SELECT
- "users".*
- FROM
- "users"
- WHERE
- "users"."id" IN (
- SELECT
- "users"."id"
- FROM
- LATERAL (
- SELECT
- message,
- "users"."id"
- FROM
- "users"
- INNER JOIN "user_statuses" "status" ON "status"."user_id" = "users"."id"
- WHERE
- "status"."message" IN ('foo', 'bar')
- ORDER BY
- "users"."id" DESC
- LIMIT 2) AS users
- ORDER BY
- "users"."id" DESC
- LIMIT 2)
- ORDER BY
+ let(:users_where) do
+ 'FROM
+ "users"
+ WHERE
+ "users"."id" IN
+ \(SELECT
+ "users"."id"
+ FROM
+ LATERAL
+ \(SELECT
+ message,
+ "users"."id"
+ FROM
+ "users"
+ INNER JOIN "user_statuses" "status" ON "status"."user_id" = "users"."id"
+ WHERE
+ "status"."message" IN \(\'foo\'\, \'bar\'\)
+ ORDER BY
+ "users"."id" DESC
+ LIMIT 2\)
+ AS users
+ ORDER BY
"users"."id" DESC
- LIMIT 2
- SQL
+ LIMIT 2\)
+ ORDER BY
+ "users"."id" DESC
+ LIMIT 2'
+ end
+
+ let(:expected_query_regexp) do
+ Regexp.new("(#{users_select}|#{users_select_with_ignored_columns}) #{users_where}".squish)
end
it 'does not rewrite the in statement for the joined table' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query).to match(expected_query_regexp)
end
end
diff --git a/spec/lib/users/internal_spec.rb b/spec/lib/users/internal_spec.rb
new file mode 100644
index 00000000000..b7368f5042e
--- /dev/null
+++ b/spec/lib/users/internal_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::Internal, feature_category: :user_profile do
+ shared_examples 'bot users' do |bot_type, username, email|
+ it 'creates the user if it does not exist' do
+ expect do
+ described_class.public_send(bot_type)
+ end.to change { User.where(user_type: bot_type).count }.by(1)
+ end
+
+ it 'creates a route for the namespace of the created user' do
+ bot_user = described_class.public_send(bot_type)
+
+ expect(bot_user.namespace.route).to be_present
+ end
+
+ it 'does not create a new user if it already exists' do
+ described_class.public_send(bot_type)
+
+ expect do
+ described_class.public_send(bot_type)
+ end.not_to change { User.count }
+ end
+
+ context 'when a regular user exists with the bot usernane' do
+ it 'creates a user with a non-conflicting username' do
+ create(:user, username: username)
+
+ expect do
+ described_class.public_send(bot_type)
+ end.to change { User.where(user_type: bot_type).count }.by(1)
+ end
+ end
+
+ context 'when a regular user exists with the bot user email' do
+ it 'creates a user with a non-conflicting email' do
+ create(:user, email: email)
+
+ expect do
+ described_class.public_send(bot_type)
+ end.to change { User.where(user_type: bot_type).count }.by(1)
+ end
+ end
+
+ context 'when a domain allowlist is in place' do
+ before do
+ stub_application_setting(domain_allowlist: ['gitlab.com'])
+ end
+
+ it 'creates the bot user' do
+ expect do
+ described_class.public_send(bot_type)
+ end.to change { User.where(user_type: bot_type).count }.by(1)
+ end
+ end
+ end
+
+ shared_examples 'bot user avatars' do |bot_type, avatar_filename|
+ it 'sets the custom avatar for the created bot' do
+ bot_user = described_class.public_send(bot_type)
+
+ expect(bot_user.avatar.url).to be_present
+ expect(bot_user.avatar.filename).to eq(avatar_filename)
+ end
+ end
+
+ it_behaves_like 'bot users', :alert_bot, 'alert-bot', 'alert@example.com'
+ it_behaves_like 'bot users', :support_bot, 'support-bot', 'support@example.com'
+ it_behaves_like 'bot users', :migration_bot, 'migration-bot', 'noreply+gitlab-migration-bot@example.com'
+ it_behaves_like 'bot users', :security_bot, 'GitLab-Security-Bot', 'security-bot@example.com'
+ it_behaves_like 'bot users', :ghost, 'ghost', 'ghost@example.com'
+ it_behaves_like 'bot users', :automation_bot, 'automation-bot', 'automation@example.com'
+ it_behaves_like 'bot users', :llm_bot, 'GitLab-Llm-Bot', 'llm-bot@example.com'
+ it_behaves_like 'bot users', :admin_bot, 'GitLab-Admin-Bot', 'admin-bot@example.com'
+
+ it_behaves_like 'bot user avatars', :alert_bot, 'alert-bot.png'
+ it_behaves_like 'bot user avatars', :support_bot, 'support-bot.png'
+ it_behaves_like 'bot user avatars', :security_bot, 'security-bot.png'
+ it_behaves_like 'bot user avatars', :automation_bot, 'support-bot.png'
+ it_behaves_like 'bot user avatars', :llm_bot, 'support-bot.png'
+ it_behaves_like 'bot user avatars', :admin_bot, 'admin-bot.png'
+
+ context 'when bot is the support_bot' do
+ subject { described_class.support_bot }
+
+ it { is_expected.to be_confirmed }
+ end
+
+ context 'when bot is the admin bot' do
+ subject { described_class.admin_bot }
+
+ it { is_expected.to be_admin }
+ it { is_expected.to be_confirmed }
+ end
+end
diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb
index 2d332dd99d6..93a06bfc881 100644
--- a/spec/mailers/emails/in_product_marketing_spec.rb
+++ b/spec/mailers/emails/in_product_marketing_spec.rb
@@ -25,75 +25,6 @@ RSpec.describe Emails::InProductMarketing do
end
end
- describe '#in_product_marketing_email' do
- let_it_be(:group) { create(:group) }
-
- let!(:onboarding_progress) { create(:onboarding_progress, namespace: group) }
-
- using RSpec::Parameterized::TableSyntax
-
- let(:track) { :create }
- let(:series) { 0 }
-
- subject { Notify.in_product_marketing_email(user.id, group.id, track, series) }
-
- include_context 'gitlab email notification'
-
- it_behaves_like 'has custom headers when on gitlab.com'
-
- it 'sends to the right user with a link to unsubscribe' do
- aggregate_failures do
- expect(subject).to deliver_to(user.notification_email_or_default)
- expect(subject).to have_body_text(profile_notifications_url)
- end
- end
-
- where(:track, :series) do
- :create | 0
- :create | 1
- :create | 2
- :verify | 0
- :verify | 1
- :verify | 2
- :trial | 0
- :trial | 1
- :trial | 2
- :team | 0
- :team | 1
- :team | 2
- :team_short | 0
- :trial_short | 0
- :admin_verify | 0
- end
-
- with_them do
- before do
- group.add_owner(user)
- end
-
- it 'has the correct subject and content' do
- message = Gitlab::Email::Message::InProductMarketing.for(track).new(group: group, user: user, series: series)
-
- aggregate_failures do
- is_expected.to have_subject(message.subject_line)
- is_expected.to have_body_text(message.title)
- is_expected.to have_body_text(message.subtitle)
- is_expected.to have_body_text(CGI.unescapeHTML(message.cta_link))
-
- if /create|verify/.match?(track)
- is_expected.to have_body_text(message.invite_text)
- is_expected.to have_body_text(CGI.unescapeHTML(message.invite_link))
- else
- is_expected.not_to have_body_text(message.invite_text)
- is_expected.not_to have_body_text(CGI.unescapeHTML(message.invite_link))
- end
-
- is_expected.to have_body_text(message.progress)
- end
- end
- end
- end
-
describe '#build_ios_app_guide_email' do
subject { Notify.build_ios_app_guide_email(user.notification_email_or_default) }
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 140b067f7aa..4816e88a311 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -157,42 +157,67 @@ RSpec.describe Emails::Profile, feature_category: :user_profile do
end
end
- describe 'user personal access token is about to expire' do
+ describe 'resource access token is about to expire' do
let_it_be(:user) { create(:user) }
- let_it_be(:expiring_token) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
- subject { Notify.access_token_about_to_expire_email(user, [expiring_token.name]) }
+ shared_examples 'resource about to expire email' do
+ it 'is sent to the owners' do
+ is_expected.to deliver_to user
+ end
- it_behaves_like 'an email sent from GitLab'
- it_behaves_like 'it should not have Gmail Actions links'
- it_behaves_like 'a user cannot unsubscribe through footer link'
+ it 'has the correct subject' do
+ is_expected.to have_subject /^Your resource access tokens will expire in 7 days or less$/i
+ end
- it 'is sent to the user' do
- is_expected.to deliver_to user.email
- end
+ it 'includes a link to access tokens page' do
+ is_expected.to have_body_text /#{resource_access_tokens_path}/
+ end
- it 'has the correct subject' do
- is_expected.to have_subject /^Your personal access tokens will expire in 7 days or less$/i
- end
+ it 'provides the names of expiring tokens' do
+ is_expected.to have_body_text /#{expiring_token.name}/
+ end
- it 'mentions the access tokens will expire' do
- is_expected.to have_body_text /One or more of your personal access tokens will expire in 7 days or less/
+ it 'includes the email reason' do
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost</a>}
+ end
end
- it 'provides the names of expiring tokens' do
- is_expected.to have_body_text /#{expiring_token.name}/
- end
+ context 'when access token belongs to a group' do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
+ let_it_be(:resource) { create(:group) }
+ let_it_be(:resource_access_tokens_path) { group_settings_access_tokens_path(resource) }
- it 'includes a link to personal access tokens page' do
- is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
- end
+ before_all do
+ resource.add_owner(user)
+ resource.add_developer(project_bot)
+ end
- it 'includes the email reason' do
- is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost</a>}
+ subject { Notify.resource_access_tokens_about_to_expire_email(user, resource, [expiring_token.name]) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'resource about to expire email'
end
- context 'with User does not exist' do
- it { expect { Notify.access_token_about_to_expire_email('foo') }.not_to raise_error }
+ context 'when access token belongs to a project' do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
+ let_it_be(:resource) { create(:project) }
+ let_it_be(:resource_access_tokens_path) { project_settings_access_tokens_path(resource) }
+
+ before_all do
+ resource.add_maintainer(user)
+ resource.add_reporter(project_bot)
+ end
+
+ subject { Notify.resource_access_tokens_about_to_expire_email(user, resource, [expiring_token.name]) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'resource about to expire email'
end
end
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index 8c0efe3f480..e3fe36237df 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -26,6 +26,16 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
issue.issue_email_participants.create!(email: email)
end
+ before do
+ # Because we use global project and custom email instances, make sure
+ # custom email is disabled in all regular cases to avoid flakiness.
+ unless service_desk_setting.custom_email_verification.started?
+ service_desk_setting.custom_email_verification.mark_as_started!(user)
+ end
+
+ service_desk_setting.update!(custom_email_enabled: false) unless service_desk_setting.custom_email_enabled?
+ end
+
shared_examples 'a service desk notification email' do |attachments_count|
it 'builds the email correctly' do
aggregate_failures do
@@ -42,6 +52,10 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
expect(subject.parts[1].content_type).to include('text/html')
end
end
+
+ it 'uses system noreply address as Reply-To address' do
+ expect(subject.reply_to.first).to eq(Gitlab.config.gitlab.email_reply_to)
+ end
end
shared_examples 'a service desk notification email with template content' do |template_key, attachments_count|
@@ -145,6 +159,45 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
end
+ shared_examples 'a service desk notification email that uses custom email' do
+ before do
+ # Access via service_desk_setting to avoid flakiness
+ unless service_desk_setting.custom_email_verification.finished?
+ service_desk_setting.custom_email_verification.error = nil
+ service_desk_setting.custom_email_verification.mark_as_finished!
+ end
+
+ # Reset because we access changed records through these objects
+ service_desk_setting.reset
+ project.reset
+
+ service_desk_setting.update!(custom_email_enabled: true) unless service_desk_setting.custom_email_enabled?
+
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ it 'uses SMTP delivery method and custom email settings' do
+ expect_service_desk_custom_email_delivery_options(service_desk_setting)
+
+ expect(Gitlab::AppLogger).to have_received(:info).with({ category: 'custom_email' })
+ end
+
+ it 'generates Reply-To address from custom email' do
+ reply_address = subject.reply_to.first
+ expected_reply_address = service_desk_setting.custom_email.sub('@', "+#{SentNotification.last.reply_key}@")
+
+ expect(reply_address).to eq(expected_reply_address)
+ end
+
+ context 'when feature flag service_desk_custom_email_reply is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_email_reply: false)
+ end
+
+ it { is_expected.to have_header 'Reply-To', /<reply+(.*)@#{Gitlab.config.gitlab.host}>\Z/ }
+ end
+ end
+
describe '.service_desk_thank_you_email' do
let_it_be(:reply_in_subject) { true }
let_it_be(:expected_text) do
@@ -234,6 +287,12 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
end
end
+
+ context 'when custom email is enabled' do
+ subject { Notify.service_desk_thank_you_email(issue.id) }
+
+ it_behaves_like 'a service desk notification email that uses custom email'
+ end
end
describe '.service_desk_new_note_email' do
@@ -295,7 +354,7 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
context 'with all-user reference in a an external author comment' do
- let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "Hey @all, just a ping", author: User.support_bot) }
+ let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "Hey @all, just a ping", author: Users::Internal.support_bot) }
let(:template_content) { 'some text %{ NOTE_TEXT }' }
@@ -435,19 +494,44 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
it_behaves_like 'a service desk notification email with template content', 'new_note'
end
end
+
+ context 'when custom email is enabled' do
+ subject { Notify.service_desk_new_note_email(issue.id, note.id, email) }
+
+ it_behaves_like 'a service desk notification email that uses custom email'
+ end
end
describe '.service_desk_custom_email_verification_email' do
+ # Use strict definition here because Mail::SMTP.new({}).settings
+ # might have been changed before.
+ let(:expected_delivery_method_defaults) do
+ {
+ address: 'localhost',
+ domain: 'localhost.localdomain',
+ port: 25,
+ password: nil,
+ user_name: nil
+ }
+ end
+
subject { Notify.service_desk_custom_email_verification_email(service_desk_setting) }
it_behaves_like 'a custom email verification process email'
it 'uses service bot name and custom email as sender' do
- expect_sender(User.support_bot, sender_email: service_desk_setting.custom_email)
+ expect_sender(Users::Internal.support_bot, sender_email: service_desk_setting.custom_email)
end
it 'forcibly uses SMTP delivery method and has correct settings' do
expect_service_desk_custom_email_delivery_options(service_desk_setting)
+
+ # defaults are unchanged after email overrode settings
+ expect(Mail::SMTP.new({}).settings).to include(expected_delivery_method_defaults)
+
+ # other mailers are unchanged after email overrode settings
+ other_mail = Notify.test_email(email, 'Test subject', 'Test body')
+ expect(other_mail.delivery_method).to be_a(Mail::TestMailer)
end
it 'uses verification email address as recipient' do
@@ -455,7 +539,7 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
end
it 'contains verification token' do
- is_expected.to have_body_text("Verification token: #{verification.token}")
+ is_expected.to have_body_text("Verification token: #{service_desk_setting.custom_email_verification.token}")
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 976fe214c95..9df89f84450 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1059,21 +1059,10 @@ RSpec.describe Notify do
is_expected.to have_body_text project_member.human_access
is_expected.to have_body_text 'leave the project'
is_expected.to have_body_text project_url(project, leave: 1)
- is_expected.not_to have_body_text 'You were assigned the following tasks:'
- end
-
- context 'with tasks to be done present' do
- let(:project_member) { create(:project_member, project: project, user: user, tasks_to_be_done: [:ci, :code]) }
-
- it 'contains the assigned tasks to be done' do
- is_expected.to have_body_text 'You were assigned the following tasks:'
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:ci]
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:code]
- end
end
end
- def invite_to_project(project, inviter:, user: nil, tasks_to_be_done: [])
+ def invite_to_project(project, inviter:, user: nil)
create(
:project_member,
:developer,
@@ -1081,8 +1070,7 @@ RSpec.describe Notify do
invite_token: '1234',
invite_email: 'toto@example.com',
user: user,
- created_by: inviter,
- tasks_to_be_done: tasks_to_be_done
+ created_by: inviter
)
end
@@ -1115,7 +1103,6 @@ RSpec.describe Notify do
is_expected.to have_content("#{inviter.name} invited you to join the")
is_expected.to have_content('Project details')
is_expected.to have_content("What's it about?")
- is_expected.not_to have_body_text 'and has assigned you the following tasks:'
end
end
@@ -1161,16 +1148,6 @@ RSpec.describe Notify do
end
end
end
-
- context 'with tasks to be done present', :aggregate_failures do
- let(:project_member) { invite_to_project(project, inviter: inviter, tasks_to_be_done: [:ci, :code]) }
-
- it 'contains the assigned tasks to be done' do
- is_expected.to have_body_text 'and has assigned you the following tasks:'
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:ci]
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:code]
- end
- end
end
describe 'project invitation accepted' do
@@ -1560,7 +1537,7 @@ RSpec.describe Notify do
end
it 'uses service bot name by default' do
- expect_sender(User.support_bot)
+ expect_sender(Users::Internal.support_bot)
end
context 'when custom outgoing name is set' do
@@ -1577,7 +1554,7 @@ RSpec.describe Notify do
let_it_be(:settings) { create(:service_desk_setting, project: project, outgoing_name: '') }
it 'uses service bot name' do
- expect_sender(User.support_bot)
+ expect_sender(Users::Internal.support_bot)
end
end
@@ -1589,7 +1566,7 @@ RSpec.describe Notify do
it_behaves_like 'a mail with default delivery method'
it 'uses service bot name by default' do
- expect_sender(User.support_bot)
+ expect_sender(Users::Internal.support_bot)
end
context 'when custom email is enabled' do
@@ -1611,7 +1588,7 @@ RSpec.describe Notify do
end
it 'uses custom email and service bot name in "from" header' do
- expect_sender(User.support_bot, sender_email: 'supersupport@example.com')
+ expect_sender(Users::Internal.support_bot, sender_email: 'supersupport@example.com')
end
it 'uses SMTP delivery method and has correct settings' do
@@ -1773,7 +1750,7 @@ RSpec.describe Notify do
end
end
- def invite_to_group(group, inviter:, user: nil, tasks_to_be_done: [])
+ def invite_to_group(group, inviter:, user: nil)
create(
:group_member,
:developer,
@@ -1781,8 +1758,7 @@ RSpec.describe Notify do
invite_token: '1234',
invite_email: 'toto@example.com',
user: user,
- created_by: inviter,
- tasks_to_be_done: tasks_to_be_done
+ created_by: inviter
)
end
@@ -1807,7 +1783,6 @@ RSpec.describe Notify do
is_expected.to have_body_text group.name
is_expected.to have_body_text group_member.human_access.downcase
is_expected.to have_body_text group_member.invite_token
- is_expected.not_to have_body_text 'and has assigned you the following tasks:'
end
end
@@ -1821,24 +1796,6 @@ RSpec.describe Notify do
is_expected.to have_body_text group_member.invite_token
end
end
-
- context 'with tasks to be done present', :aggregate_failures do
- let(:group_member) { invite_to_group(group, inviter: inviter, tasks_to_be_done: [:ci, :code]) }
-
- it 'contains the assigned tasks to be done' do
- is_expected.to have_body_text 'and has assigned you the following tasks:'
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:ci]
- is_expected.to have_body_text localized_tasks_to_be_done_choices[:code]
- end
-
- context 'when there is no inviter' do
- let(:inviter) { nil }
-
- it 'does not contain the assigned tasks to be done' do
- is_expected.not_to have_body_text 'and has assigned you the following tasks:'
- end
- end
- end
end
describe 'group invitation reminders' do
@@ -2534,17 +2491,4 @@ RSpec.describe Notify do
end
end
end
-
- describe 'in product marketing', :mailer do
- let_it_be(:group) { create(:group) }
-
- let(:mail) { ActionMailer::Base.deliveries.last }
-
- it 'does not raise error' do
- described_class.in_product_marketing_email(user.id, group.id, :trial, 0).deliver
-
- expect(mail.subject).to eq('Go farther with GitLab')
- expect(mail.body.parts.first.to_s).to include('Start a GitLab Ultimate trial today in less than one minute, no credit card required.')
- end
- end
end
diff --git a/spec/migrations/20230125093723_rebalance_partition_id_ci_pipeline_spec.rb b/spec/migrations/20230125093723_rebalance_partition_id_ci_pipeline_spec.rb
deleted file mode 100644
index 3ccd92e15a4..00000000000
--- a/spec/migrations/20230125093723_rebalance_partition_id_ci_pipeline_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RebalancePartitionIdCiPipeline, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_builds' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_pipelines,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230125093840_rebalance_partition_id_ci_build_spec.rb b/spec/migrations/20230125093840_rebalance_partition_id_ci_build_spec.rb
deleted file mode 100644
index b983564a2d9..00000000000
--- a/spec/migrations/20230125093840_rebalance_partition_id_ci_build_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RebalancePartitionIdCiBuild, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_builds' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_builds,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230208100917_fix_partition_ids_for_ci_pipeline_variable_spec.rb b/spec/migrations/20230208100917_fix_partition_ids_for_ci_pipeline_variable_spec.rb
deleted file mode 100644
index fb0e1fe17ec..00000000000
--- a/spec/migrations/20230208100917_fix_partition_ids_for_ci_pipeline_variable_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiPipelineVariable, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_pipeline_variables' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_pipeline_variables,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230208103009_fix_partition_ids_for_ci_job_artifact_spec.rb b/spec/migrations/20230208103009_fix_partition_ids_for_ci_job_artifact_spec.rb
deleted file mode 100644
index de2386c6a0d..00000000000
--- a/spec/migrations/20230208103009_fix_partition_ids_for_ci_job_artifact_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiJobArtifact, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_job_artifacts' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_job_artifacts,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230208132608_fix_partition_ids_for_ci_stage_spec.rb b/spec/migrations/20230208132608_fix_partition_ids_for_ci_stage_spec.rb
deleted file mode 100644
index 8b057afc1e9..00000000000
--- a/spec/migrations/20230208132608_fix_partition_ids_for_ci_stage_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiStage, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_stages' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_stages,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230209090702_fix_partition_ids_for_ci_build_report_result_spec.rb b/spec/migrations/20230209090702_fix_partition_ids_for_ci_build_report_result_spec.rb
deleted file mode 100644
index f0ac8239f58..00000000000
--- a/spec/migrations/20230209090702_fix_partition_ids_for_ci_build_report_result_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiBuildReportResult,
- migration: :gitlab_ci,
- feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_build_report_results' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_build_report_results,
- column_name: :build_id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230209092204_fix_partition_ids_for_ci_build_trace_metadata_spec.rb b/spec/migrations/20230209092204_fix_partition_ids_for_ci_build_trace_metadata_spec.rb
deleted file mode 100644
index a93ba36d9ae..00000000000
--- a/spec/migrations/20230209092204_fix_partition_ids_for_ci_build_trace_metadata_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiBuildTraceMetadata,
- migration: :gitlab_ci,
- feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_build_trace_metadata' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_build_trace_metadata,
- column_name: :build_id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230209140102_fix_partition_ids_for_ci_build_metadata_spec.rb b/spec/migrations/20230209140102_fix_partition_ids_for_ci_build_metadata_spec.rb
deleted file mode 100644
index c354d68749f..00000000000
--- a/spec/migrations/20230209140102_fix_partition_ids_for_ci_build_metadata_spec.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiBuildMetadata,
- migration: :gitlab_ci,
- feature_category: :continuous_integration do
- let(:migration) { described_class::MIGRATION }
-
- context 'when on saas' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of p_ci_builds_metadata' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :p_ci_builds_metadata,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20230214122717_fix_partition_ids_for_ci_job_variables_spec.rb b/spec/migrations/20230214122717_fix_partition_ids_for_ci_job_variables_spec.rb
deleted file mode 100644
index 64275855262..00000000000
--- a/spec/migrations/20230214122717_fix_partition_ids_for_ci_job_variables_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsForCiJobVariables, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:builds) { table(:ci_builds, database: :ci) }
- let(:job_variables) { table(:ci_job_variables, database: :ci) }
- let(:connection) { job_variables.connection }
-
- around do |example|
- connection.execute "ALTER TABLE #{job_variables.quoted_table_name} DISABLE TRIGGER ALL;"
-
- example.run
- ensure
- connection.execute "ALTER TABLE #{job_variables.quoted_table_name} ENABLE TRIGGER ALL;"
- end
-
- before do
- job = builds.create!(partition_id: 100)
-
- job_variables.insert_all!([
- { job_id: job.id, partition_id: 100, key: 'variable-100' },
- { job_id: job.id, partition_id: 101, key: 'variable-101' }
- ])
- end
-
- describe '#up', :aggregate_failures do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'fixes partition_id' do
- expect { migrate! }.not_to raise_error
-
- expect(job_variables.where(partition_id: 100).count).to eq(2)
- expect(job_variables.where(partition_id: 101).count).to eq(0)
- end
- end
-
- context 'when on self managed' do
- it 'does not change partition_id' do
- expect { migrate! }.not_to raise_error
-
- expect(job_variables.where(partition_id: 100).count).to eq(1)
- expect(job_variables.where(partition_id: 101).count).to eq(1)
- end
- end
- end
-end
diff --git a/spec/migrations/20230214154101_fix_partition_ids_on_ci_sources_pipelines_spec.rb b/spec/migrations/20230214154101_fix_partition_ids_on_ci_sources_pipelines_spec.rb
deleted file mode 100644
index 44031175497..00000000000
--- a/spec/migrations/20230214154101_fix_partition_ids_on_ci_sources_pipelines_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixPartitionIdsOnCiSourcesPipelines, migration: :gitlab_ci, feature_category: :continuous_integration do
- let(:sources_pipelines) { table(:ci_sources_pipelines, database: :ci) }
-
- before do
- sources_pipelines.insert_all!([
- { partition_id: 100, source_partition_id: 100 },
- { partition_id: 100, source_partition_id: 101 },
- { partition_id: 101, source_partition_id: 100 },
- { partition_id: 101, source_partition_id: 101 }
- ])
- end
-
- describe '#up' do
- context 'when on sass' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'fixes partition_id and source_partition_id' do
- expect { migrate! }.not_to raise_error
-
- expect(sources_pipelines.where(partition_id: 100).count).to eq(4)
- expect(sources_pipelines.where(partition_id: 101).count).to eq(0)
- expect(sources_pipelines.where(source_partition_id: 100).count).to eq(4)
- expect(sources_pipelines.where(source_partition_id: 101).count).to eq(0)
- end
- end
-
- context 'when on self managed' do
- it 'does not change partition_id or source_partition_id' do
- expect { migrate! }.not_to raise_error
-
- expect(sources_pipelines.where(partition_id: 100).count).to eq(2)
- expect(sources_pipelines.where(partition_id: 100).count).to eq(2)
- expect(sources_pipelines.where(source_partition_id: 101).count).to eq(2)
- expect(sources_pipelines.where(source_partition_id: 101).count).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20230726142555_ensure_notes_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/20230726142555_ensure_notes_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..a76ad767cf2
--- /dev/null
+++ b/spec/migrations/20230726142555_ensure_notes_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureNotesBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'notes',
+ column_name: 'id',
+ job_arguments: [['id'], ['id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/20230726144458_swap_notes_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/20230726144458_swap_notes_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..b4552cebc58
--- /dev/null
+++ b/spec/migrations/20230726144458_swap_notes_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapNotesIdToBigintForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+
+ shared_examples 'column `id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE notes ALTER COLUMN id TYPE bigint')
+ connection.execute('ALTER TABLE notes DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ after do
+ connection.execute('ALTER TABLE notes DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ notes_table.reset_column_information
+
+ expect(notes_table.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(notes_table.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ notes_table.reset_column_information
+
+ expect(notes_table.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(notes_table.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ let!(:notes_table) { table(:notes) }
+
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE notes ALTER COLUMN id TYPE bigint')
+ connection.execute('ALTER TABLE notes ADD COLUMN IF NOT EXISTS id_convert_to_bigint integer')
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE notes DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(notes_table.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(notes_table.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('integer')
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE notes ALTER COLUMN id TYPE integer')
+ connection.execute('ALTER TABLE notes ADD COLUMN IF NOT EXISTS id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE notes ALTER COLUMN id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_notes_on_id_convert_to_bigint CASCADE')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_080e73845bfd() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."id_convert_to_bigint" := NEW."id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE notes DROP COLUMN IF EXISTS id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ notes_table.reset_column_information
+
+ expect(notes_table.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer')
+ expect(notes_table.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ notes_table.reset_column_information
+
+ expect(notes_table.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(notes_table.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230802212443_add_current_user_todos_widget_to_epic_work_item_type_spec.rb b/spec/migrations/20230802212443_add_current_user_todos_widget_to_epic_work_item_type_spec.rb
new file mode 100644
index 00000000000..22a8f93b524
--- /dev/null
+++ b/spec/migrations/20230802212443_add_current_user_todos_widget_to_epic_work_item_type_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddCurrentUserTodosWidgetToEpicWorkItemType, :migration, feature_category: :team_planning do
+ it_behaves_like 'migration that adds a widget to a work item type' do
+ let(:target_type_enum_value) { described_class::EPIC_ENUM_VALUE }
+ let(:target_type) { :epic }
+ let(:widgets_for_type) do
+ {
+ 'Assignees' => 0,
+ 'Description' => 1,
+ 'Hierarchy' => 2,
+ 'Labels' => 3,
+ 'Notes' => 5,
+ 'Start and due date' => 6,
+ 'Health status' => 7,
+ 'Status' => 11,
+ 'Notifications' => 14,
+ 'Award emoji' => 16
+ }.freeze
+ end
+ end
+end
diff --git a/spec/migrations/20230809170822_ensure_system_note_metadata_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/20230809170822_ensure_system_note_metadata_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..1c33872142d
--- /dev/null
+++ b/spec/migrations/20230809170822_ensure_system_note_metadata_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureSystemNoteMetadataBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'system_note_metadata',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/20230809174702_swap_system_note_metadata_note_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/20230809174702_swap_system_note_metadata_note_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..e8413b24ae9
--- /dev/null
+++ b/spec/migrations/20230809174702_swap_system_note_metadata_note_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapSystemNoteMetadataNoteIdToBigintForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:system_note_metadata) { table(:system_note_metadata) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE system_note_metadata ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE system_note_metadata DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ system_note_metadata.reset_column_information
+
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ system_note_metadata.reset_column_information
+
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE system_note_metadata ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE system_note_metadata ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE system_note_metadata DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE system_note_metadata ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE system_note_metadata ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_482bac5ec48a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE system_note_metadata DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ system_note_metadata.reset_column_information
+
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ system_note_metadata.reset_column_information
+
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(system_note_metadata.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230809203254_ensure_issue_user_mentions_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/20230809203254_ensure_issue_user_mentions_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..09694a2ee8d
--- /dev/null
+++ b/spec/migrations/20230809203254_ensure_issue_user_mentions_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureIssueUserMentionsBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'issue_user_mentions',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/20230809210550_swap_issue_user_mentions_note_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/20230809210550_swap_issue_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..a311f876890
--- /dev/null
+++ b/spec/migrations/20230809210550_swap_issue_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapIssueUserMentionsNoteIdToBigintForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:issue_user_mentions) { table(:issue_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE issue_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ after do
+ connection.execute('ALTER TABLE issue_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ issue_user_mentions.reset_column_information
+
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ issue_user_mentions.reset_column_information
+
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE issue_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE issue_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE issue_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_issue_user_mentions_on_note_id_convert_to_bigint CASCADE')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_c2051020aa8b() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE issue_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ issue_user_mentions.reset_column_information
+
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ issue_user_mentions.reset_column_information
+
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(issue_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230810113227_swap_note_diff_files_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230810113227_swap_note_diff_files_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..de169d9d21b
--- /dev/null
+++ b/spec/migrations/20230810113227_swap_note_diff_files_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapNoteDiffFilesNoteIdToBigintForSelfHosts, feature_category: :database do
+ describe '#up' do
+ after(:all) do
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE note_diff_files DROP COLUMN IF EXISTS diff_note_id_convert_to_bigint')
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id TYPE bigint')
+ connection.execute('ALTER TABLE note_diff_files DROP COLUMN IF EXISTS diff_note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ note_diff_files = table(:note_diff_files)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance with the columns already swapped' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE note_diff_files ADD COLUMN IF NOT EXISTS diff_note_id_convert_to_bigint integer'
+ )
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ note_diff_files = table(:note_diff_files)
+
+ migrate!
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find do |c|
+ c.name == 'diff_note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ end
+ end
+
+ context 'when self-managed instance with the `diff_note_id_convert_to_bigint` column already dropped ' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id TYPE bigint')
+ connection.execute('ALTER TABLE note_diff_files DROP COLUMN IF EXISTS diff_note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ note_diff_files = table(:note_diff_files)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }).to be nil
+ }
+
+ migration.after -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }).to be nil
+ }
+ end
+ end
+ end
+ end
+
+ context 'when self-managed instance' do
+ before do
+ # As we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id TYPE integer')
+ connection.execute('ALTER TABLE note_diff_files ADD COLUMN IF NOT EXISTS diff_note_id_convert_to_bigint bigint')
+ connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id_convert_to_bigint TYPE bigint')
+ connection.execute('DROP INDEX IF EXISTS index_note_diff_files_on_note_id_convert_to_bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_775287b6d67a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."diff_note_id_convert_to_bigint" := NEW."diff_note_id"; RETURN NEW; END; $$;')
+ end
+
+ it 'swaps the columns' do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ note_diff_files = table(:note_diff_files)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('integer')
+ expect(note_diff_files.columns.find do |c|
+ c.name == 'diff_note_id_convert_to_bigint'
+ end.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ note_diff_files.reset_column_information
+
+ expect(note_diff_files.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint')
+ expect(note_diff_files.columns.find do |c|
+ c.name == 'diff_note_id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230810124545_schedule_fixing_namespace_ids_of_vulnerability_reads_spec.rb b/spec/migrations/20230810124545_schedule_fixing_namespace_ids_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..294545bed2b
--- /dev/null
+++ b/spec/migrations/20230810124545_schedule_fixing_namespace_ids_of_vulnerability_reads_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleFixingNamespaceIdsOfVulnerabilityReads, feature_category: :vulnerability_management do
+ let(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :vulnerability_id,
+ interval: 2.minutes,
+ batch_size: 10_000,
+ sub_batch_size: 100
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230811103457_queue_backfill_nuget_normalized_version_spec.rb b/spec/migrations/20230811103457_queue_backfill_nuget_normalized_version_spec.rb
new file mode 100644
index 00000000000..6eb7ba0e2cd
--- /dev/null
+++ b/spec/migrations/20230811103457_queue_backfill_nuget_normalized_version_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillNugetNormalizedVersion, feature_category: :package_registry do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :packages_nuget_metadata,
+ column_name: :package_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230815140656_queue_populate_denormalized_columns_for_sbom_occurrences_spec.rb b/spec/migrations/20230815140656_queue_populate_denormalized_columns_for_sbom_occurrences_spec.rb
new file mode 100644
index 00000000000..3976e398607
--- /dev/null
+++ b/spec/migrations/20230815140656_queue_populate_denormalized_columns_for_sbom_occurrences_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueuePopulateDenormalizedColumnsForSbomOccurrences, feature_category: :dependency_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :sbom_occurrences,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230815160428_rename_plans_titles_with_legacy_plan_names_spec.rb b/spec/migrations/20230815160428_rename_plans_titles_with_legacy_plan_names_spec.rb
new file mode 100644
index 00000000000..21c17a60e90
--- /dev/null
+++ b/spec/migrations/20230815160428_rename_plans_titles_with_legacy_plan_names_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RenamePlansTitlesWithLegacyPlanNames, feature_category: :subscription_management do
+ let(:plans) { table(:plans) }
+
+ let!(:premium_plan) { plans.create!(name: 'premium', title: 'Premium (Formerly Silver)') }
+ let!(:ultimate_plan) { plans.create!(name: 'ultimate', title: 'Ultimate (Formerly Gold)') }
+
+ describe '#up' do
+ it 'updates the plan titles' do
+ expect(premium_plan.title).to eq('Premium (Formerly Silver)')
+ expect(ultimate_plan.title).to eq('Ultimate (Formerly Gold)')
+
+ migrate!
+
+ expect(premium_plan.reload.title).to eq('Premium')
+ expect(ultimate_plan.reload.title).to eq('Ultimate')
+ end
+ end
+end
diff --git a/spec/migrations/20230816152540_ensure_dum_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/20230816152540_ensure_dum_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..c8590250c62
--- /dev/null
+++ b/spec/migrations/20230816152540_ensure_dum_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe EnsureDumNoteIdBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'design_user_mentions',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/20230816152639_swap_design_user_mentions_note_id_to_big_int_for_self_managed_spec.rb b/spec/migrations/20230816152639_swap_design_user_mentions_note_id_to_big_int_for_self_managed_spec.rb
new file mode 100644
index 00000000000..f6342fe6388
--- /dev/null
+++ b/spec/migrations/20230816152639_swap_design_user_mentions_note_id_to_big_int_for_self_managed_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe SwapDesignUserMentionsNoteIdToBigIntForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:design_user_mentions) { table(:design_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ design_user_mentions.reset_column_information
+
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ design_user_mentions.reset_column_information
+
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE design_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute('ALTER TABLE design_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_3dc62927cae8() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ design_user_mentions.reset_column_information
+
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ design_user_mentions.reset_column_information
+
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230817111938_swap_events_target_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230817111938_swap_events_target_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..515d4f21fc6
--- /dev/null
+++ b/spec/migrations/20230817111938_swap_events_target_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapEventsTargetIdToBigintForSelfHosts, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:events) { table(:events) }
+
+ shared_examples 'column `target_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE events ALTER COLUMN target_id TYPE bigint')
+ connection.execute('ALTER TABLE events DROP COLUMN IF EXISTS target_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('bigint')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('bigint')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `target_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `target_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `target_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE events ALTER COLUMN target_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE events ADD COLUMN IF NOT EXISTS target_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE events DROP COLUMN IF EXISTS target_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('bigint')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE events ALTER COLUMN target_id TYPE integer')
+ connection.execute('ALTER TABLE events ADD COLUMN IF NOT EXISTS target_id_convert_to_bigint bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_cd1aeb22b34a() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."target_id_convert_to_bigint" := NEW."target_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE events DROP COLUMN IF EXISTS target_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('integer')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ events.reset_column_information
+
+ expect(events.columns.find { |c| c.name == 'target_id' }.sql_type).to eq('bigint')
+ expect(events.columns.find { |c| c.name == 'target_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230817143637_swap_award_emoji_note_id_to_bigint_for_self_hosts_spec.rb b/spec/migrations/20230817143637_swap_award_emoji_note_id_to_bigint_for_self_hosts_spec.rb
new file mode 100644
index 00000000000..a9521a40d0d
--- /dev/null
+++ b/spec/migrations/20230817143637_swap_award_emoji_note_id_to_bigint_for_self_hosts_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapAwardEmojiNoteIdToBigintForSelfHosts, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:award_emoji) { table(:award_emoji) }
+
+ shared_examples 'column `awardable_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE award_emoji ALTER COLUMN awardable_id TYPE bigint')
+ connection.execute('ALTER TABLE award_emoji DROP COLUMN IF EXISTS awardable_id_convert_to_bigint')
+ end
+
+ it 'does not swap the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ award_emoji.reset_column_information
+
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('bigint')
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ award_emoji.reset_column_information
+
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('bigint')
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `awardable_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `awardable_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `awardable_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE award_emoji ALTER COLUMN awardable_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE award_emoji ADD COLUMN IF NOT EXISTS awardable_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE award_emoji DROP COLUMN IF EXISTS awardable_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('bigint')
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE award_emoji ALTER COLUMN awardable_id TYPE integer')
+ connection.execute('ALTER TABLE award_emoji ADD COLUMN IF NOT EXISTS awardable_id_convert_to_bigint bigint')
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_909cf0a06094() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."awardable_id_convert_to_bigint" := NEW."awardable_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE award_emoji DROP COLUMN IF EXISTS awardable_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ award_emoji.reset_column_information
+
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('integer')
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ award_emoji.reset_column_information
+
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('bigint')
+ expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230818083610_queue_backfill_users_with_defaults_spec.rb b/spec/migrations/20230818083610_queue_backfill_users_with_defaults_spec.rb
new file mode 100644
index 00000000000..4cd72b4fa7a
--- /dev/null
+++ b/spec/migrations/20230818083610_queue_backfill_users_with_defaults_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillUsersWithDefaults, feature_category: :user_profile do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :users,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230818085219_queue_backfill_user_preferences_with_defaults_spec.rb b/spec/migrations/20230818085219_queue_backfill_user_preferences_with_defaults_spec.rb
new file mode 100644
index 00000000000..eff14be22f6
--- /dev/null
+++ b/spec/migrations/20230818085219_queue_backfill_user_preferences_with_defaults_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillUserPreferencesWithDefaults, feature_category: :user_profile do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :user_preferences,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230818142801_queue_create_compliance_standards_adherence_spec.rb b/spec/migrations/20230818142801_queue_create_compliance_standards_adherence_spec.rb
new file mode 100644
index 00000000000..466d0bca997
--- /dev/null
+++ b/spec/migrations/20230818142801_queue_create_compliance_standards_adherence_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueCreateComplianceStandardsAdherence, feature_category: :compliance_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ context 'for EE' do
+ before do
+ allow(Gitlab).to receive(:ee?).and_return(true)
+ end
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+ end
+
+ context 'for FOSS' do
+ before do
+ allow(Gitlab).to receive(:ee?).and_return(false)
+ end
+
+ it 'does not schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230821081603_queue_convert_credit_card_validation_data_to_hashes_spec.rb b/spec/migrations/20230821081603_queue_convert_credit_card_validation_data_to_hashes_spec.rb
new file mode 100644
index 00000000000..36ab25ffa3e
--- /dev/null
+++ b/spec/migrations/20230821081603_queue_convert_credit_card_validation_data_to_hashes_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueConvertCreditCardValidationDataToHashes, feature_category: :user_profile do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :user_credit_card_validations,
+ column_name: :user_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230822104028_delete_project_callout_three_spec.rb b/spec/migrations/20230822104028_delete_project_callout_three_spec.rb
new file mode 100644
index 00000000000..127af643976
--- /dev/null
+++ b/spec/migrations/20230822104028_delete_project_callout_three_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DeleteProjectCalloutThree, feature_category: :groups_and_projects do
+ let(:migration) { described_class.new }
+
+ let(:user) { table(:users).create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
+ let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:project_callout) { table(:user_project_callouts) }
+
+ let!(:project_callouts_1) { project_callout.create!(project_id: project.id, user_id: user.id, feature_name: 1) }
+ let!(:project_callouts_3) { project_callout.create!(project_id: project.id, user_id: user.id, feature_name: 3) }
+
+ it 'deletes only feature name 3' do
+ expect { migrate! }.to change { project_callout.count }.from(2).to(1)
+ expect(project_callout.find_by_id(project_callouts_3.id)).to be_nil
+ end
+end
diff --git a/spec/migrations/20230822151454_remove_free_user_cap_email_workers_spec.rb b/spec/migrations/20230822151454_remove_free_user_cap_email_workers_spec.rb
new file mode 100644
index 00000000000..fd56c9a0988
--- /dev/null
+++ b/spec/migrations/20230822151454_remove_free_user_cap_email_workers_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveFreeUserCapEmailWorkers, :migration, feature_category: :onboarding do
+ describe '#up' do
+ it 'calls sidekiq_remove_jobs with correct argument' do
+ deprecated_job_classes = %w[
+ Namespaces::FreeUserCap::BackfillNotificationClearingJobsWorker
+ Namespaces::FreeUserCap::BackfillNotificationJobsWorker
+ Namespaces::FreeUserCap::NotificationClearingWorker
+ Namespaces::FreeUserCap::OverLimitNotificationWorker
+ ]
+
+ expect_next_instance_of(described_class) do |migration|
+ expect(migration).to receive(:sidekiq_remove_jobs)
+ .with({ job_klasses: deprecated_job_classes })
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/20230823090001_queue_backfill_project_statistics_storage_size_with_recent_size_spec.rb b/spec/migrations/20230823090001_queue_backfill_project_statistics_storage_size_with_recent_size_spec.rb
new file mode 100644
index 00000000000..07e15663d1d
--- /dev/null
+++ b/spec/migrations/20230823090001_queue_backfill_project_statistics_storage_size_with_recent_size_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillProjectStatisticsStorageSizeWithRecentSize, feature_category: :consumables_cost_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :project_statistics,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230823140934_add_linked_items_widget_to_ticket_work_item_type_spec.rb b/spec/migrations/20230823140934_add_linked_items_widget_to_ticket_work_item_type_spec.rb
new file mode 100644
index 00000000000..6a83b4b1a7c
--- /dev/null
+++ b/spec/migrations/20230823140934_add_linked_items_widget_to_ticket_work_item_type_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddLinkedItemsWidgetToTicketWorkItemType, :migration, feature_category: :portfolio_management do
+ it_behaves_like 'migration that adds a widget to a work item type' do
+ let(:target_type_enum_value) { described_class::TICKET_ENUM_VALUE }
+ let(:target_type) { :ticket }
+ let(:additional_types) { { ticket: 8 } }
+ let(:widgets_for_type) do
+ {
+ 'Assignees' => 0,
+ 'Description' => 1,
+ 'Hierarchy' => 2,
+ 'Labels' => 3,
+ 'Notes' => 5,
+ 'Iteration' => 9,
+ 'Milestone' => 4,
+ 'Weight' => 8,
+ 'Current user todos' => 15,
+ 'Start and due date' => 6,
+ 'Health status' => 7,
+ 'Notifications' => 14,
+ 'Award emoji' => 16
+ }.freeze
+ end
+ end
+end
diff --git a/spec/migrations/20230830121830_queue_update_users_set_external_if_service_account_spec.rb b/spec/migrations/20230830121830_queue_update_users_set_external_if_service_account_spec.rb
new file mode 100644
index 00000000000..12839e0852b
--- /dev/null
+++ b/spec/migrations/20230830121830_queue_update_users_set_external_if_service_account_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueUpdateUsersSetExternalIfServiceAccount, feature_category: :system_access do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :users,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL.to_i,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230831084632_queue_sync_scan_result_policies_spec.rb b/spec/migrations/20230831084632_queue_sync_scan_result_policies_spec.rb
new file mode 100644
index 00000000000..3c4a7382e02
--- /dev/null
+++ b/spec/migrations/20230831084632_queue_sync_scan_result_policies_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueSyncScanResultPolicies, feature_category: :security_policy_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :security_orchestration_policy_configurations,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230906204934_restart_self_hosted_sent_notifications_bigint_conversion_spec.rb b/spec/migrations/20230906204934_restart_self_hosted_sent_notifications_bigint_conversion_spec.rb
new file mode 100644
index 00000000000..01dbb5d1ef8
--- /dev/null
+++ b/spec/migrations/20230906204934_restart_self_hosted_sent_notifications_bigint_conversion_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+def column_type_from_table(table, column)
+ table.columns.find { |c| c.name == column }.sql_type
+end
+
+RSpec.describe RestartSelfHostedSentNotificationsBigintConversion, feature_category: :database do
+ let(:sent_notifications) { table(:sent_notifications) }
+
+ before do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(!self_hosted)
+ # rubocop: enable RSpec/AnyInstanceOf
+ end
+
+ context 'when is self-hosted' do
+ let(:self_hosted) { true }
+
+ describe '#up' do
+ context 'when id is already a bigint' do
+ it 'does nothing' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ sent_notifications.reset_column_information
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('bigint')
+ }
+ migration.after -> {
+ sent_notifications.reset_column_information
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('bigint')
+ }
+ end
+ end
+ end
+ end
+
+ context 'when id is an integer and id_convert_to_bigint exists' do
+ before do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer')
+ conn.execute('ALTER TABLE sent_notifications ADD COLUMN id_convert_to_bigint BIGINT')
+ sent_notifications.reset_column_information
+ end
+
+ after do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
+ conn.execute('ALTER TABLE sent_notifications DROP COLUMN id_convert_to_bigint')
+ sent_notifications.reset_column_information
+ end
+
+ it 'does nothing' do
+ disable_migrations_output do
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('integer')
+ expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).not_to be_nil
+ migrate!
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('integer')
+ expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).not_to be_nil
+ end
+ end
+ end
+
+ context 'when id is an integer and id_convert_to_bigint does not exist' do
+ before do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer')
+ conn.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
+ sent_notifications.reset_column_information
+ end
+
+ after do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
+ conn.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
+ sent_notifications.reset_column_information
+ end
+
+ it 'creates id_convert_to_bigint' do
+ disable_migrations_output do
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('integer')
+ expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be_nil
+ migrate!
+ sent_notifications.reset_column_information
+ expect(column_type_from_table(sent_notifications, 'id')).to eq('integer')
+ expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).not_to be_nil
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ context 'when id is an integer and id_convert_to_bigint exists' do
+ before do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer')
+ conn.execute('ALTER TABLE sent_notifications ADD COLUMN id_convert_to_bigint BIGINT')
+ sent_notifications.reset_column_information
+ end
+
+ after do
+ conn = described_class.new.connection
+ conn.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
+ conn.execute('ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint')
+ sent_notifications.reset_column_information
+ end
+
+ it 'drops id_convert_to_bigint' do
+ disable_migrations_output do
+ migrate!
+ schema_migrate_down!
+ end
+ expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when is not self-hosted' do
+ let(:self_hosted) { false }
+
+ describe '#up' do
+ it 'is a bigint and result in no change' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ sent_notifications.reset_column_information
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ }
+ migration.after -> {
+ sent_notifications.reset_column_information
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ }
+ end
+ end
+ end
+ end
+
+ # Do not need to describe #down since it's a no-op and we did reversible test above
+ end
+end
diff --git a/spec/migrations/20230906204935_restart_self_hosted_sent_notifications_backfill_spec.rb b/spec/migrations/20230906204935_restart_self_hosted_sent_notifications_backfill_spec.rb
new file mode 100644
index 00000000000..f2c9ce3d005
--- /dev/null
+++ b/spec/migrations/20230906204935_restart_self_hosted_sent_notifications_backfill_spec.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+def column_type_from_table(table, column)
+ table.columns.find { |c| c.name == column }.sql_type
+end
+
+def sent_notifications_backfills(connection)
+ res = connection.execute <<~SQL
+ SELECT * FROM batched_background_migrations WHERE table_name = 'sent_notifications'
+ SQL
+
+ res.ntuples
+end
+
+def create_previous_backfill(connection)
+ connection.execute <<~SQL
+ INSERT INTO batched_background_migrations
+ (min_value, max_value, batch_size, sub_batch_size, interval, "status",#{' '}
+ job_class_name, batch_class_name,
+ table_name, column_name, job_arguments,
+ gitlab_schema, created_at, updated_at)
+ VALUES
+ (1, 3, 20000, 1000, 120, 3,
+ 'CopyColumnUsingBackgroundMigrationJob', 'PrimaryKeyBatchingStrategy',
+ 'sent_notifications', 'id', '[["id"], ["id_convert_to_bigint"]]',
+ 'gitlab_main', NOW(), NOW())
+ SQL
+end
+
+RSpec.describe RestartSelfHostedSentNotificationsBackfill, feature_category: :database do
+ let(:sent_notifications) { table(:sent_notifications) }
+
+ before do
+ # rubocop: disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(!self_hosted)
+ # rubocop: enable RSpec/AnyInstanceOf
+ end
+
+ describe '#up' do
+ context 'when is self-hosted' do
+ let(:self_hosted) { true }
+
+ context 'when id is integer' do
+ before do
+ described_class.new.connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer')
+ described_class.new.connection.execute(
+ 'ALTER TABLE sent_notifications ADD COLUMN IF NOT EXISTS id_convert_to_bigint BIGINT'
+ )
+ sent_notifications.reset_column_information
+ end
+
+ after do
+ described_class.new.connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
+ described_class.new.connection.execute(
+ 'ALTER TABLE sent_notifications DROP COLUMN IF EXISTS id_convert_to_bigint'
+ )
+ sent_notifications.reset_column_information
+ end
+
+ context 'when a backfill has never been done' do
+ let(:id_convert_to_bigint_sample) { 0 }
+
+ before do
+ described_class.new.connection.execute <<~SQL
+ INSERT INTO
+ sent_notifications
+ (id_convert_to_bigint, reply_key)
+ VALUES (#{id_convert_to_bigint_sample}, 4)
+ SQL
+ end
+
+ after do
+ described_class.new.connection.execute <<~SQL
+ DELETE FROM sent_notifications
+ SQL
+ end
+
+ context 'when there is a record of an incomplete backfill' do
+ before do
+ create_previous_backfill(described_class.new.connection)
+ end
+
+ after do
+ described_class.new.connection.execute <<~SQL
+ DELETE FROM batched_background_migrations
+ SQL
+ end
+
+ it 'calls delete_batched_background_migration and does not raise an error' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:delete_batched_background_migration)
+ end
+ disable_migrations_output do
+ expect { migrate! }.not_to raise_error
+ end
+ expect(sent_notifications_backfills(described_class.new.connection)).to eq 1
+ end
+ end
+
+ context 'when there is no previous record of a backfill' do
+ it 'begins a backfill' do
+ disable_migrations_output do
+ migrate!
+ end
+ expect(sent_notifications_backfills(described_class.new.connection)).to eq 1
+ end
+ end
+ end
+
+ context 'when a backfill has previously been done' do
+ let(:id_convert_to_bigint_sample) { 4 }
+
+ before do
+ described_class.new.connection.execute <<~SQL
+ INSERT INTO
+ sent_notifications
+ (id_convert_to_bigint, reply_key)
+ VALUES (#{id_convert_to_bigint_sample}, 4)
+ SQL
+ end
+
+ after do
+ described_class.new.connection.execute <<~SQL
+ DELETE FROM sent_notifications
+ SQL
+ end
+
+ it 'does not start a backfill' do
+ disable_migrations_output do
+ migrate!
+ end
+ expect(sent_notifications_backfills(described_class.new.connection)).to eq 0
+ end
+ end
+ end
+
+ context 'when id is a bigint' do
+ it 'does not start a backfill' do
+ disable_migrations_output do
+ migrate!
+ end
+ expect(sent_notifications_backfills(described_class.new.connection)).to eq 0
+ end
+ end
+ end
+
+ context 'when is not self-hosted' do
+ let(:self_hosted) { false }
+
+ it 'does not start a backfill' do
+ disable_migrations_output do
+ migrate!
+ end
+ expect(sent_notifications_backfills(described_class.new.connection)).to eq 0
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230907155247_queue_backfill_has_merge_request_of_vulnerability_reads_spec.rb b/spec/migrations/20230907155247_queue_backfill_has_merge_request_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..7214e0114d4
--- /dev/null
+++ b/spec/migrations/20230907155247_queue_backfill_has_merge_request_of_vulnerability_reads_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillHasMergeRequestOfVulnerabilityReads, feature_category: :database do
+ let!(:batched_migration) { described_class::MIGRATION_NAME }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :vulnerability_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/backfill_alert_management_prometheus_integrations_spec.rb b/spec/migrations/backfill_alert_management_prometheus_integrations_spec.rb
new file mode 100644
index 00000000000..dcc364aa44a
--- /dev/null
+++ b/spec/migrations/backfill_alert_management_prometheus_integrations_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillAlertManagementPrometheusIntegrations, feature_category: :incident_management do
+ let(:namespace_class) { table(:namespaces) }
+ let(:project_class) { table(:projects) }
+ let(:settings_class) { table(:project_alerting_settings) }
+ let(:http_integrations_class) { table(:alert_management_http_integrations) }
+ let(:integration_class) { table(:integrations) }
+
+ let!(:namespace_1) { namespace_class.create!(name: "namespace_1", path: "namespace_1") }
+ let!(:namespace_2) { namespace_class.create!(name: "namespace_2", path: "namespace_2") }
+ let!(:namespace_3) { namespace_class.create!(name: "namespace_3", path: "namespace_3") }
+ let!(:project_1) { project_class.create!(project_namespace_id: namespace_1.id, namespace_id: namespace_1.id) }
+ let!(:project_2) { project_class.create!(project_namespace_id: namespace_2.id, namespace_id: namespace_1.id) }
+ let!(:project_3) { project_class.create!(project_namespace_id: namespace_3.id, namespace_id: namespace_1.id) }
+
+ let!(:http_integrations) do
+ [
+ create_http_integration(project_2, 'legacy', name: 'Legacy HTTP'),
+ create_http_integration(project_2, 'other', name: 'Other Prometheus', type: 1)
+ ]
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+
+ # disabled integration
+ create_prometheus_integration(project_1, active: false)
+ create_alerting_settings(project_1, token: :a)
+
+ # enabled integration
+ create_prometheus_integration(project_2, active: true)
+ create_alerting_settings(project_2, token: :b)
+
+ # settings without integration
+ create_alerting_settings(project_3, token: :c)
+
+ # Should ignore: another type of integration in the same project
+ integration_class.create!(
+ project_id: project_3.id,
+ type_new: 'Integrations::Bamboo',
+ active: true
+ )
+ end
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(http_integrations_class.all).to match_array(http_integrations)
+ }
+
+ migration.after -> {
+ expect(http_integrations_class.all).to contain_exactly(
+ *http_integrations,
+ expected_http_integration(project_1, token: :a, active: false),
+ expected_http_integration(project_2, token: :b, active: true),
+ expected_http_integration(project_3, token: :c, active: false)
+ )
+ }
+ end
+ end
+
+ context 'with existing synced http integrations' do
+ let(:synced_integration) do
+ create_http_integration(project_2, 'legacy-prometheus', name: 'Prometheus', active: false)
+ end
+
+ let!(:http_integrations) { [synced_integration] }
+
+ it 'does not overwrite synced attributes' do
+ expect { migrate! }.to not_change { synced_integration.attributes }
+
+ expect(http_integrations_class.all).to contain_exactly(
+ expected_http_integration(project_1, token: :a, active: false),
+ synced_integration,
+ expected_http_integration(project_3, token: :c, active: false)
+ )
+ end
+ end
+
+ private
+
+ def create_prometheus_integration(project, active: true, **args)
+ integration_class.create!(
+ project_id: project.id,
+ type_new: 'Integrations::Prometheus',
+ active: active,
+ **args
+ )
+ end
+
+ def create_alerting_settings(project, token:)
+ settings_class.create!(
+ project_id: project.id,
+ encrypted_token: "token_#{token}",
+ encrypted_token_iv: "iv_#{token}"
+ )
+ end
+
+ def create_http_integration(project, endpoint_id, type: 0, **args)
+ http_integrations_class.create!(
+ project_id: project.id,
+ active: true,
+ encrypted_token_iv: 'iv',
+ encrypted_token: 'token',
+ endpoint_identifier: endpoint_id,
+ type_identifier: type,
+ **args
+ )
+ end
+
+ def expected_http_integration(project, token:, active:)
+ having_attributes(
+ project_id: project.id,
+ active: active,
+ encrypted_token: "token_#{token}",
+ encrypted_token_iv: "iv_#{token}",
+ name: 'Prometheus',
+ endpoint_identifier: 'legacy-prometheus',
+ type_identifier: 1
+ )
+ end
+end
diff --git a/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb b/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
new file mode 100644
index 00000000000..482cca67e46
--- /dev/null
+++ b/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_self_managed_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureMrUserMentionsNoteIdBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
+ describe '#up' do
+ let(:migration_arguments) do
+ {
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: 'merge_request_user_mentions',
+ column_name: 'id',
+ job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
+ }
+ end
+
+ it 'ensures the migration is completed for self-managed instances' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+ expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
+ end
+
+ migrate!
+ end
+
+ it 'skips the check for GitLab.com, dev, or test' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_for_self_managed_spec.rb b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
new file mode 100644
index 00000000000..5cba691e20e
--- /dev/null
+++ b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_for_self_managed_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigintForSelfManaged, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+ let(:merge_request_user_mentions) { table(:merge_request_user_mentions) }
+
+ shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
+ before do
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE merge_request_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ after do
+ connection.execute('ALTER TABLE merge_request_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ merge_request_user_mentions.reset_column_information
+
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+
+ migration.after -> {
+ merge_request_user_mentions.reset_column_information
+
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
+ }
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(described_class).to(
+ receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
+ )
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when GitLab.com, dev, or test' do
+ let(:com_or_dev_or_test_but_not_jh?) { true }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
+ end
+
+ context 'when self-managed instance columns already swapped' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute(
+ 'ALTER TABLE merge_request_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
+ )
+
+ disable_migrations_output { migrate! }
+ end
+
+ after do
+ connection.execute('ALTER TABLE merge_request_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'does not swaps the columns' do
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
+ eq('integer')
+ )
+ end
+ end
+
+ context 'when self-managed instance' do
+ let(:com_or_dev_or_test_but_not_jh?) { false }
+
+ before do
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE integer')
+ connection.execute(
+ 'ALTER TABLE merge_request_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint'
+ )
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
+ connection.execute(
+ 'DROP INDEX IF EXISTS index_merge_request_user_mentions_on_note_id_convert_to_bigint CASCADE'
+ )
+ connection.execute('CREATE OR REPLACE FUNCTION trigger_bfcbace4260d() RETURNS trigger LANGUAGE plpgsql AS $$
+ BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
+ end
+
+ after do
+ connection.execute('ALTER TABLE merge_request_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
+ end
+
+ it 'swaps the columns' do
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ merge_request_user_mentions.reset_column_information
+
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
+ expect(merge_request_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to(
+ eq('bigint')
+ )
+ }
+
+ migration.after -> {
+ merge_request_user_mentions.reset_column_information
+
+ expect(merge_request_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(merge_request_user_mentions.columns.find do |c|
+ c.name == 'note_id_convert_to_bigint'
+ end.sql_type).to(
+ eq('integer')
+ )
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 422dd9a463b..a808cb1c823 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -270,6 +270,11 @@ RSpec.describe Ability do
end
describe '.issues_readable_by_user' do
+ it 'is aliased to .work_items_readable_by_user' do
+ expect(described_class.method(:issues_readable_by_user))
+ .to eq(described_class.method(:work_items_readable_by_user))
+ end
+
context 'with an admin when admin mode is enabled', :enable_admin_mode do
it 'returns all given issues' do
user = build(:user, admin: true)
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 584f9b010ad..1fa60a210e2 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -385,13 +385,28 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
end
end
- describe '#other_reports_for_user' do
- let(:report) { create(:abuse_report) }
- let(:another_user_report) { create(:abuse_report, user: report.user) }
- let(:another_report) { create(:abuse_report) }
+ describe '#past_closed_reports_for_user' do
+ let(:report_1) { create(:abuse_report, :closed) }
+ let(:report_2) { create(:abuse_report, user: report.user) }
+ let(:report_3) { create(:abuse_report, :closed, user: report.user) }
- it 'returns other reports for the same user' do
- expect(report.other_reports_for_user).to match_array(another_user_report)
+ it 'returns past closed reports for the same user' do
+ expect(report.past_closed_reports_for_user).to match_array(report_3)
+ end
+ end
+
+ describe '#similar_open_reports_for_user' do
+ let(:report_1) { create(:abuse_report, category: 'spam') }
+ let(:report_2) { create(:abuse_report, category: 'spam', user: report.user) }
+ let(:report_3) { create(:abuse_report, category: 'offensive', user: report.user) }
+ let(:report_4) { create(:abuse_report, :closed, category: 'spam', user: report.user) }
+
+ it 'returns open reports for the same user and category' do
+ expect(report.similar_open_reports_for_user).to match_array(report_2)
+ end
+
+ it 'returns no abuse reports when the report is closed' do
+ expect(report_4.similar_open_reports_for_user).to match_array(described_class.none)
end
end
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 54169c254a6..af884fdb83c 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -650,25 +650,13 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do
end
end
- describe '.set_active_user_cookie' do
+ describe '.set_active_user_cookie', :freeze_time do
let(:auth) { double(cookies: {}) }
it 'sets marketing cookie' do
described_class.set_active_user_cookie(auth)
- expect(auth.cookies[:about_gitlab_active_user][:value]).to be_truthy
- end
- end
-
- describe '.unset_active_user_cookie' do
- let(:auth) { double(cookies: {}) }
-
- before do
- described_class.set_active_user_cookie(auth)
- end
-
- it 'unsets marketing cookie' do
- described_class.unset_active_user_cookie(auth)
- expect(auth.cookies[:about_gitlab_active_user]).to be_nil
+ expect(auth.cookies[:gitlab_user][:value]).to be_truthy
+ expect(auth.cookies[:gitlab_user][:expires]).to be_within(1.minute).of(2.weeks.from_now)
end
end
end
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
index 479ae8a4966..dc26d0323d7 100644
--- a/spec/models/alert_management/http_integration_spec.rb
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe AlertManagement::HttpIntegration, feature_category: :incident_man
let_it_be(:project) { create(:project) }
- subject(:integration) { build(:alert_management_http_integration) }
+ subject(:integration) { build(:alert_management_http_integration, project: project) }
describe 'associations' do
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/alerting/project_alerting_setting_spec.rb b/spec/models/alerting/project_alerting_setting_spec.rb
index 90c5f8313b0..0424b94a1b7 100644
--- a/spec/models/alerting/project_alerting_setting_spec.rb
+++ b/spec/models/alerting/project_alerting_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Alerting::ProjectAlertingSetting do
+RSpec.describe Alerting::ProjectAlertingSetting, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
subject { create(:project_alerting_setting, project: project) }
@@ -37,4 +37,31 @@ RSpec.describe Alerting::ProjectAlertingSetting do
end
end
end
+
+ describe '#sync_http_integration after_save callback' do
+ let_it_be_with_reload(:setting) { create(:project_alerting_setting, :with_http_integration, project: project) }
+ let_it_be_with_reload(:http_integration) { setting.project.alert_management_http_integrations.last! }
+ let_it_be(:new_token) { 'new_token' }
+
+ context 'with corresponding HTTP integration' do
+ let_it_be(:original_token) { http_integration.token }
+
+ it 'syncs the attribute' do
+ expect { setting.update!(token: new_token) }
+ .to change { http_integration.reload.token }
+ .from(original_token).to(new_token)
+ end
+ end
+
+ context 'without corresponding HTTP integration' do
+ before do
+ http_integration.update_columns(endpoint_identifier: 'legacy')
+ end
+
+ it 'does not sync the attribute or execute extra queries' do
+ expect { setting.update!(token: new_token) }
+ .not_to change { http_integration.reload.token }
+ end
+ end
+ end
end
diff --git a/spec/models/analytics/cycle_analytics/runtime_limiter_spec.rb b/spec/models/analytics/cycle_analytics/runtime_limiter_spec.rb
new file mode 100644
index 00000000000..1a5200719c8
--- /dev/null
+++ b/spec/models/analytics/cycle_analytics/runtime_limiter_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::RuntimeLimiter, feature_category: :value_stream_management do
+ let(:max_runtime) { 321 }
+ let(:runtime_limiter) { described_class.new(max_runtime) }
+
+ describe '#elapsed_time' do
+ it 'reports monotonic elapsed time since instantiation' do
+ elapsed = 123
+ first_monotonic_time = 100
+ second_monotonic_time = first_monotonic_time + elapsed
+
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .and_return(first_monotonic_time, second_monotonic_time)
+
+ expect(runtime_limiter.elapsed_time).to eq(elapsed)
+ end
+ end
+
+ describe '#over_time?' do
+ it 'returns true if over time' do
+ start_time = 100
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(start_time, start_time + max_runtime - 1)
+
+ expect(runtime_limiter.over_time?).to be(false)
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(start_time + max_runtime)
+ expect(runtime_limiter.over_time?).to be(true)
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(start_time + max_runtime + 1)
+ expect(runtime_limiter.over_time?).to be(true)
+ end
+ end
+
+ describe '#was_over_time?' do
+ it 'returns true if over_time? returned true at an earlier step' do
+ first_monotonic_time = 10
+ second_monotonic_time = first_monotonic_time + 50
+ third_monotonic_time = second_monotonic_time + 50 # over time: 110 > 100
+
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .and_return(first_monotonic_time, second_monotonic_time, third_monotonic_time)
+
+ runtime_limiter = described_class.new(100)
+
+ expect(runtime_limiter.over_time?).to be(false) # uses the second_monotonic_time
+ expect(runtime_limiter.was_over_time?).to be(false)
+
+ expect(runtime_limiter.over_time?).to be(true) # uses the third_monotonic_time
+ expect(runtime_limiter.was_over_time?).to be(true)
+ end
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 5a70bec8b33..3fc7d8f6fc8 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.kroki_formats).to eq({}) }
it { expect(setting.default_branch_protection_defaults).to eq({}) }
it { expect(setting.max_decompressed_archive_size).to eq(25600) }
+ it { expect(setting.decompress_archive_file_timeout).to eq(210) }
end
describe 'validations' do
@@ -134,6 +135,9 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
+ it { is_expected.to validate_numericality_of(:decompress_archive_file_timeout).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(:decompress_archive_file_timeout) }
+
it { is_expected.to validate_numericality_of(:dependency_proxy_ttl_group_policy_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:dependency_proxy_ttl_group_policy_worker_capacity) }
@@ -237,6 +241,11 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(nil).for(:users_get_by_id_limit_allowlist) }
it { is_expected.to allow_value([]).for(:users_get_by_id_limit_allowlist) }
+ it { is_expected.to allow_value(many_usernames(100)).for(:search_rate_limit_allowlist) }
+ it { is_expected.not_to allow_value(many_usernames(101)).for(:search_rate_limit_allowlist) }
+ it { is_expected.not_to allow_value(nil).for(:search_rate_limit_allowlist) }
+ it { is_expected.to allow_value([]).for(:search_rate_limit_allowlist) }
+
it { is_expected.to allow_value('all_tiers').for(:whats_new_variant) }
it { is_expected.to allow_value('current_tier').for(:whats_new_variant) }
it { is_expected.to allow_value('disabled').for(:whats_new_variant) }
@@ -436,11 +445,14 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(nil).for(:snowplow_collector_hostname) }
it { is_expected.to allow_value("snowplow.gitlab.com").for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value("db-snowplow.gitlab.com").for(:snowplow_database_collector_hostname) }
+ it { is_expected.not_to allow_value("#{'a' * 256}db-snowplow.gitlab.com").for(:snowplow_database_collector_hostname) }
it { is_expected.not_to allow_value('/example').for(:snowplow_collector_hostname) }
end
context 'when snowplow is not enabled' do
it { is_expected.to allow_value(nil).for(:snowplow_collector_hostname) }
+ it { is_expected.to allow_value(nil).for(:snowplow_database_collector_hostname) }
end
end
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index 586ec8f723a..b179f2df816 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe AwardEmoji do
+RSpec.describe AwardEmoji, feature_category: :team_planning do
+ let_it_be(:user) { create(:user) }
+
describe 'Associations' do
it { is_expected.to belong_to(:awardable) }
it { is_expected.to belong_to(:user) }
@@ -60,7 +62,6 @@ RSpec.describe AwardEmoji do
end
context 'custom emoji' do
- let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:emoji) { create(:custom_emoji, name: 'partyparrot', namespace: group) }
let_it_be(:project) { create(:project, namespace: group) }
@@ -144,27 +145,27 @@ RSpec.describe AwardEmoji do
describe 'broadcasting updates' do
context 'on a note' do
let(:note) { create(:note_on_issue) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: note) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: note) }
it 'broadcasts updates on the note when saved' do
- expect(note).to receive(:expire_etag_cache)
+ expect(note).to receive(:broadcast_noteable_notes_changed)
expect(note).to receive(:trigger_note_subscription_update)
award_emoji.save!
end
it 'broadcasts updates on the note when destroyed' do
- expect(note).to receive(:expire_etag_cache)
+ expect(note).to receive(:broadcast_noteable_notes_changed)
expect(note).to receive(:trigger_note_subscription_update)
award_emoji.destroy!
end
context 'when importing' do
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: note, importing: true) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: note, importing: true) }
it 'does not broadcast updates on the note when saved' do
- expect(note).not_to receive(:expire_etag_cache)
+ expect(note).not_to receive(:broadcast_noteable_notes_changed)
expect(note).not_to receive(:trigger_note_subscription_update)
award_emoji.save!
@@ -174,17 +175,17 @@ RSpec.describe AwardEmoji do
context 'on another awardable' do
let(:issue) { create(:issue) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: issue) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: issue) }
it 'does not broadcast updates on the issue when saved' do
- expect(issue).not_to receive(:expire_etag_cache)
+ expect(issue).not_to receive(:broadcast_noteable_notes_changed)
expect(issue).not_to receive(:trigger_note_subscription_update)
award_emoji.save!
end
it 'does not broadcast updates on the issue when destroyed' do
- expect(issue).not_to receive(:expire_etag_cache)
+ expect(issue).not_to receive(:broadcast_noteable_notes_changed)
expect(issue).not_to receive(:trigger_note_subscription_update)
award_emoji.destroy!
@@ -194,7 +195,7 @@ RSpec.describe AwardEmoji do
describe 'bumping updated at' do
let(:note) { create(:note_on_issue) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: note) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: note) }
it 'calls bump_updated_at on the note when saved' do
expect(note).to receive(:bump_updated_at)
@@ -210,7 +211,7 @@ RSpec.describe AwardEmoji do
context 'on another awardable' do
let(:issue) { create(:issue) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: issue) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: issue) }
it 'does not error out when saved' do
expect { award_emoji.save! }.not_to raise_error
@@ -248,8 +249,8 @@ RSpec.describe AwardEmoji do
describe 'updating upvotes_count' do
context 'on an issue' do
let(:issue) { create(:issue) }
- let(:upvote) { build(:award_emoji, :upvote, user: build(:user), awardable: issue) }
- let(:downvote) { build(:award_emoji, :downvote, user: build(:user), awardable: issue) }
+ let(:upvote) { build(:award_emoji, :upvote, user: user, awardable: issue) }
+ let(:downvote) { build(:award_emoji, :downvote, user: user, awardable: issue) }
it 'updates upvotes_count on the issue when saved' do
expect(issue).to receive(:update_column).with(:upvotes_count, 1).once
@@ -268,7 +269,7 @@ RSpec.describe AwardEmoji do
context 'on another awardable' do
let(:merge_request) { create(:merge_request) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: merge_request) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: merge_request) }
it 'does not update upvotes_count on the merge_request when saved' do
expect(merge_request).not_to receive(:update_column)
@@ -329,7 +330,7 @@ RSpec.describe AwardEmoji do
describe '#to_ability_name' do
let(:merge_request) { create(:merge_request) }
- let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: merge_request) }
+ let(:award_emoji) { build(:award_emoji, user: user, awardable: merge_request) }
it 'returns correct ability name' do
expect(award_emoji.to_ability_name).to be('emoji')
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index da1eb12e9b8..3e98ba0973e 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -438,4 +438,12 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
end
end
+
+ describe '#source_version' do
+ subject { build(:bulk_import_entity, :group_entity) }
+
+ it 'pulls the source version from the associated BulkImport' do
+ expect(subject.source_version).to eq(subject.bulk_import.source_version_info)
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index a556244ae00..2a5d781edc7 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_default: :keep do
+ using RSpec::Parameterized::TableSyntax
include Ci::TemplateHelpers
include AfterNextHelpers
@@ -1493,8 +1494,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
describe 'state transition metrics' do
- using RSpec::Parameterized::TableSyntax
-
subject { build.send(event) }
where(:state, :report_count, :trait) do
@@ -2129,8 +2128,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
describe '#ref_slug' do
- using RSpec::Parameterized::TableSyntax
-
where(:ref, :slug) do
'master' | 'master'
'1-foo' | '1-foo'
@@ -2468,8 +2465,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
context 'when build has environment and user-provided variables' do
let(:expected_variables) do
predefined_variables.map { |variable| variable.fetch(:key) } +
- %w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG
- CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_TIER CI_ENVIRONMENT_URL]
+ %w[YAML_VARIABLE CI_ENVIRONMENT_SLUG CI_ENVIRONMENT_URL]
end
before do
@@ -2478,8 +2474,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
build.yaml_variables = [{ key: 'YAML_VARIABLE', value: 'var', public: true }]
build.environment = 'staging'
- # CI_ENVIRONMENT_NAME is set in predefined_variables when job environment is provided
- predefined_variables.insert(18, { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false })
+ insert_expected_predefined_variables(
+ [
+ { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_ACTION', value: 'start', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_TIER', value: 'staging', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_URL', value: 'https://gitlab.com', public: true, masked: false }
+ ],
+ after: 'CI_NODE_TOTAL')
end
it 'matches explicit variables ordering' do
@@ -2550,6 +2552,11 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
expect(runner_vars).not_to include('CI_JOB_JWT_V2')
end
end
+
+ def insert_expected_predefined_variables(variables, after:)
+ index = predefined_variables.index { |h| h[:key] == after }
+ predefined_variables.insert(index + 1, *variables)
+ end
end
context 'when build has user' do
@@ -2583,34 +2590,28 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'when build has an environment' do
- let(:environment_variables) do
+ let(:expected_environment_variables) do
[
{ key: 'CI_ENVIRONMENT_NAME', value: 'production', public: true, masked: false },
- { key: 'CI_ENVIRONMENT_SLUG', value: 'prod-slug', public: true, masked: false },
- { key: 'CI_ENVIRONMENT_TIER', value: 'production', public: true, masked: false }
+ { key: 'CI_ENVIRONMENT_ACTION', value: 'start', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_TIER', value: 'production', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_URL', value: 'http://prd.example.com/$CI_JOB_NAME', public: true, masked: false }
]
end
- let!(:environment) do
- create(
- :environment,
- project: build.project,
- name: 'production',
- slug: 'prod-slug',
- tier: 'production',
- external_url: ''
- )
- end
-
- before do
- build.update!(environment: 'production')
- end
+ let(:build) { create(:ci_build, :with_deployment, :deploy_to_production, ref: pipeline.ref, pipeline: pipeline) }
shared_examples 'containing environment variables' do
- it { is_expected.to include(*environment_variables) }
+ it { is_expected.to include(*expected_environment_variables) }
end
context 'when no URL was set' do
+ before do
+ build.update!(options: { environment: { url: nil } })
+ build.persisted_environment.update!(external_url: nil)
+ expected_environment_variables.delete_if { |var| var[:key] == 'CI_ENVIRONMENT_URL' }
+ end
+
it_behaves_like 'containing environment variables'
it 'does not have CI_ENVIRONMENT_URL' do
@@ -2620,12 +2621,26 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
+ context 'when environment is created dynamically' do
+ let(:build) { create(:ci_build, :with_deployment, :start_review_app, ref: pipeline.ref, pipeline: pipeline) }
+
+ let(:expected_environment_variables) do
+ [
+ { key: 'CI_ENVIRONMENT_NAME', value: 'review/master', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_ACTION', value: 'start', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_TIER', value: 'development', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_URL', value: 'http://staging.example.com/$CI_JOB_NAME', public: true, masked: false }
+ ]
+ end
+
+ it_behaves_like 'containing environment variables'
+ end
+
context 'when an URL was set' do
let(:url) { 'http://host/test' }
before do
- environment_variables <<
- { key: 'CI_ENVIRONMENT_URL', value: url, public: true, masked: false }
+ expected_environment_variables.find { |var| var[:key] == 'CI_ENVIRONMENT_URL' }[:value] = url
end
context 'when the URL was set from the job' do
@@ -2641,14 +2656,15 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it_behaves_like 'containing environment variables'
it 'puts $CI_ENVIRONMENT_URL in the last so all other variables are available to be used when runners are trying to expand it' do
- expect(subject.to_runner_variables.last).to eq(environment_variables.last)
+ expect(subject.to_runner_variables.last).to eq(expected_environment_variables.last)
end
end
end
context 'when the URL was not set from the job, but environment' do
before do
- environment.update!(external_url: url)
+ build.update!(options: { environment: { url: nil } })
+ build.persisted_environment.update!(external_url: url)
end
it_behaves_like 'containing environment variables'
@@ -2682,10 +2698,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'when environment scope matches build environment' do
- before do
- create(:environment, name: 'staging', project: project)
- build.update!(environment: 'staging')
- end
+ let(:build) { create(:ci_build, :with_deployment, :start_staging, ref: pipeline.ref, pipeline: pipeline) }
it { is_expected.to include(environment_specific_variable) }
end
@@ -4001,73 +4014,61 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe 'pages deployments' do
- let_it_be(:build, reload: true) { create(:ci_build, pipeline: pipeline, user: user) }
+ describe '#pages_generator?', feature_category: :pages do
+ where(:name, :enabled, :result) do
+ 'foo' | false | false
+ 'pages' | false | false
+ 'pages:preview' | true | false
+ 'pages' | true | true
+ end
- context 'when job is "pages"' do
+ with_them do
before do
- build.name = 'pages'
+ stub_pages_setting(enabled: enabled)
+ build.update!(name: name)
end
- context 'when pages are enabled' do
- before do
- allow(Gitlab.config.pages).to receive_messages(enabled: true)
- end
-
- it 'is marked as pages generator' do
- expect(build).to be_pages_generator
- end
-
- context 'job succeeds' do
- it "calls pages worker" do
- expect(PagesWorker).to receive(:perform_async).with(:deploy, build.id)
+ subject { build.pages_generator? }
- build.success!
- end
- end
+ it { is_expected.to eq(result) }
+ end
+ end
- context 'job fails' do
- it "does not call pages worker" do
- expect(PagesWorker).not_to receive(:perform_async)
+ describe 'pages deployments', feature_category: :pages do
+ let_it_be(:build, reload: true) { create(:ci_build, name: 'pages', pipeline: pipeline, user: user) }
- build.drop!
- end
- end
+ context 'when pages are enabled' do
+ before do
+ stub_pages_setting(enabled: true)
end
- context 'when pages are disabled' do
- before do
- allow(Gitlab.config.pages).to receive_messages(enabled: false)
- end
+ context 'and job succeeds' do
+ it "calls pages worker" do
+ expect(PagesWorker).to receive(:perform_async).with(:deploy, build.id)
- it 'is not marked as pages generator' do
- expect(build).not_to be_pages_generator
+ build.success!
end
+ end
- context 'job succeeds' do
- it "does not call pages worker" do
- expect(PagesWorker).not_to receive(:perform_async)
+ context 'and job fails' do
+ it "does not call pages worker" do
+ expect(PagesWorker).not_to receive(:perform_async)
- build.success!
- end
+ build.drop!
end
end
end
- context 'when job is not "pages"' do
+ context 'when pages are disabled' do
before do
- build.name = 'other-job'
+ stub_pages_setting(enabled: false)
end
- it 'is not marked as pages generator' do
- expect(build).not_to be_pages_generator
- end
-
- context 'job succeeds' do
+ context 'and job succeeds' do
it "does not call pages worker" do
expect(PagesWorker).not_to receive(:perform_async)
- build.success
+ build.success!
end
end
end
@@ -5604,26 +5605,4 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
-
- describe 'routing table switch' do
- context 'with ff disabled' do
- before do
- stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: false)
- end
-
- it 'uses the legacy table' do
- expect(described_class.table_name).to eq('ci_builds')
- end
- end
-
- context 'with ff enabled' do
- before do
- stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: true)
- end
-
- it 'uses the routing table' do
- expect(described_class.table_name).to eq('p_ci_builds')
- end
- end
- end
end
diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb
index 159b70d7f8f..f28a0e82bbd 100644
--- a/spec/models/ci/catalog/listing_spec.rb
+++ b/spec/models/ci/catalog/listing_spec.rb
@@ -34,9 +34,9 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the namespace has catalog resources' do
- let_it_be(:resource) { create(:catalog_resource, project: project_1) }
- let_it_be(:resource_2) { create(:catalog_resource, project: project_2) }
- let_it_be(:other_namespace_resource) { create(:catalog_resource, project: project_3) }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project_1) }
+ let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
+ let_it_be(:other_namespace_resource) { create(:ci_catalog_resource, project: project_3) }
it 'contains only catalog resources for projects in that namespace' do
is_expected.to contain_exactly(resource, resource_2)
@@ -65,8 +65,8 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the user only has access to some projects in the namespace' do
- let!(:resource_1) { create(:catalog_resource, project: project_1) }
- let!(:resource_2) { create(:catalog_resource, project: project_2) }
+ let!(:resource_1) { create(:ci_catalog_resource, project: project_1) }
+ let!(:resource_2) { create(:ci_catalog_resource, project: project_2) }
before do
project_1.add_developer(user)
@@ -79,7 +79,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the user does not have access to the namespace' do
- let!(:resource) { create(:catalog_resource, project: project_1) }
+ let!(:resource) { create(:ci_catalog_resource, project: project_1) }
it { is_expected.to be_empty }
end
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index 4608e611ea1..082283bb7bc 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -6,9 +6,9 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project, name: 'A') }
let_it_be(:project_2) { build(:project, name: 'Z') }
let_it_be(:project_3) { build(:project, name: 'L') }
- let_it_be(:resource) { create(:catalog_resource, project: project) }
- let_it_be(:resource_2) { create(:catalog_resource, project: project_2) }
- let_it_be(:resource_3) { create(:catalog_resource, project: project_3) }
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+ let_it_be(:resource_2) { create(:ci_catalog_resource, project: project_2) }
+ let_it_be(:resource_3) { create(:ci_catalog_resource, project: project_3) }
let_it_be(:release1) { create(:release, project: project, released_at: Time.zone.now - 2.days) }
let_it_be(:release2) { create(:release, project: project, released_at: Time.zone.now - 1.day) }
diff --git a/spec/models/ci/catalog/resources/component_spec.rb b/spec/models/ci/catalog/resources/component_spec.rb
index caaf76e610d..e8c92ce0788 100644
--- a/spec/models/ci/catalog/resources/component_spec.rb
+++ b/spec/models/ci/catalog/resources/component_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::Catalog::Resources::Component, type: :model, feature_category: :pipeline_composition do
- let(:component) { build(:catalog_resource_component) }
+ let(:component) { build(:ci_catalog_resource_component) }
it { is_expected.to belong_to(:catalog_resource).class_name('Ci::Catalog::Resource') }
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 56e69cc2b9c..a8e9d36a3a7 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1216,13 +1216,12 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
before do
runner.tick_runner_queue
- runner.destroy!
end
it 'cleans up the queue' do
- Gitlab::Redis::Cache.with do |redis|
- expect(redis.get(queue_key)).to be_nil
- end
+ expect(Gitlab::Workhorse).to receive(:cleanup_key).with(queue_key)
+
+ runner.destroy!
end
end
end
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
index 41f8215b713..bc158fc9117 100644
--- a/spec/models/clusters/agent_token_spec.rb
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Clusters::AgentToken do
+RSpec.describe Clusters::AgentToken, feature_category: :deployment_management do
it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required }
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
@@ -12,8 +12,9 @@ RSpec.describe Clusters::AgentToken do
it_behaves_like 'having unique enum values'
describe 'scopes' do
+ let_it_be(:agent) { create(:cluster_agent) }
+
describe '.order_last_used_at_desc' do
- let_it_be(:agent) { create(:cluster_agent) }
let_it_be(:token_1) { create(:cluster_agent_token, agent: agent, last_used_at: 7.days.ago) }
let_it_be(:token_2) { create(:cluster_agent_token, agent: agent, last_used_at: nil) }
let_it_be(:token_3) { create(:cluster_agent_token, agent: agent, last_used_at: 2.days.ago) }
@@ -25,8 +26,8 @@ RSpec.describe Clusters::AgentToken do
end
describe 'status-related scopes' do
- let!(:active_token) { create(:cluster_agent_token) }
- let!(:revoked_token) { create(:cluster_agent_token, :revoked) }
+ let!(:active_token) { create(:cluster_agent_token, agent: agent) }
+ let!(:revoked_token) { create(:cluster_agent_token, :revoked, agent: agent) }
describe '.with_status' do
context 'when filtering by active status' do
@@ -48,6 +49,29 @@ RSpec.describe Clusters::AgentToken do
it { is_expected.to contain_exactly(active_token) }
end
end
+
+ describe '.connected' do
+ let!(:token) { create(:cluster_agent_token, agent: agent, status: status, last_used_at: last_used_at) }
+
+ let(:status) { :active }
+ let(:last_used_at) { 2.minutes.ago }
+
+ subject { described_class.connected }
+
+ it { is_expected.to contain_exactly(token) }
+
+ context 'when the token has not been used recently' do
+ let(:last_used_at) { 2.hours.ago }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the token is not active' do
+ let(:status) { :revoked }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
describe '#token' do
@@ -64,6 +88,13 @@ RSpec.describe Clusters::AgentToken do
agent_token = create(:cluster_agent_token)
expect(agent_token.token.length).to be >= 50
end
+
+ it 'has a prefix' do
+ agent_token = build(:cluster_agent_token, token_encrypted: nil)
+ agent_token.save!
+
+ expect(agent_token.token).to start_with described_class::TOKEN_PREFIX
+ end
end
describe '#to_ability_name' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 9ce9f0e13b5..e9257b08bca 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -44,6 +44,24 @@ RSpec.describe CommitStatus, feature_category: :continuous_integration do
it { is_expected.not_to be_retried }
it { expect(described_class.primary_key).to eq('id') }
+ describe '.switch_table_names' do
+ before do
+ stub_env('USE_CI_BUILDS_ROUTING_TABLE', flag_value)
+ end
+
+ context 'with the env flag disabled' do
+ let(:flag_value) { 'false' }
+
+ it { expect(described_class.switch_table_names).to eq(:ci_builds) }
+ end
+
+ context 'with the env flag enabled' do
+ let(:flag_value) { 'true' }
+
+ it { expect(described_class.switch_table_names).to eq(:p_ci_builds) }
+ end
+ end
+
describe '#author' do
subject { commit_status.author }
@@ -1067,26 +1085,4 @@ RSpec.describe CommitStatus, feature_category: :continuous_integration do
it_behaves_like 'having enum with nil value'
end
-
- describe 'routing table switch' do
- context 'with ff disabled' do
- before do
- stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: false)
- end
-
- it 'uses the legacy table' do
- expect(described_class.table_name).to eq('ci_builds')
- end
- end
-
- context 'with ff enabled' do
- before do
- stub_feature_flags(ci_partitioning_use_ci_builds_routing_table: true)
- end
-
- it 'uses the routing table' do
- expect(described_class.table_name).to eq('p_ci_builds')
- end
- end
- end
end
diff --git a/spec/models/concerns/as_cte_spec.rb b/spec/models/concerns/as_cte_spec.rb
index 06d9650ec46..c92d46ef25f 100644
--- a/spec/models/concerns/as_cte_spec.rb
+++ b/spec/models/concerns/as_cte_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe AsCte do
it { expect(subject.query).to eq(query) }
it { expect(subject.table.name).to eq(name.to_s) }
- context 'with materialized parameter', if: Gitlab::Database::AsWithMaterialized.materialized_supported? do
+ context 'with materialized parameter' do
subject { query.as_cte(name, materialized: materialized).to_arel.to_sql }
context 'as true' do
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 75c5cac899b..f274f8c96ff 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -75,6 +75,26 @@ RSpec.describe EachBatch do
expect(ids).to eq(ids.sort.reverse)
end
+ shared_examples 'preloaded batch' do |method|
+ it 'respects preloading without N+1 queries' do
+ one, two = User.first(2)
+
+ create(:key, user: one)
+
+ scope = User.send(method, :keys)
+
+ control = ActiveRecord::QueryRecorder.new { scope.each_batch(of: 5) { |batch| batch.each(&:keys) } }
+
+ create(:key, user: one)
+ create(:key, user: two)
+
+ expect { scope.each_batch(of: 5) { |batch| batch.each(&:keys) } }.not_to exceed_query_limit(control)
+ end
+ end
+
+ it_behaves_like 'preloaded batch', :preload
+ it_behaves_like 'preloaded batch', :includes
+
describe 'current scope' do
let(:entry) { create(:user, sign_in_count: 1) }
let(:ids_with_new_relation) { model.where(id: entry.id).pluck(:id) }
diff --git a/spec/models/concerns/expirable_spec.rb b/spec/models/concerns/expirable_spec.rb
index 78fe265a6bb..f7f1ce611b4 100644
--- a/spec/models/concerns/expirable_spec.rb
+++ b/spec/models/concerns/expirable_spec.rb
@@ -17,8 +17,11 @@ RSpec.describe Expirable do
it 'scopes the query when multiple models are expirable' do
expired_access_token = create(:personal_access_token, :expired, user: no_expire.user)
- expect(PersonalAccessToken.expired.joins(user: :members)).to match_array([expired_access_token])
- expect(PersonalAccessToken.joins(user: :members).merge(ProjectMember.expired)).to eq([])
+ ::Gitlab::Database.allow_cross_joins_across_databases(url:
+ 'https://gitlab.com/gitlab-org/gitlab/-/issues/422405') do
+ expect(PersonalAccessToken.expired.joins(user: :members)).to match_array([expired_access_token])
+ expect(PersonalAccessToken.joins(user: :members).merge(ProjectMember.expired)).to eq([])
+ end
end
it 'works with a timestamp expired_at field', time_travel_to: '2022-03-14T11:30:00Z' do
diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb
index 49c3d11ed6b..54614ec2b21 100644
--- a/spec/models/concerns/has_user_type_spec.rb
+++ b/spec/models/concerns/has_user_type_spec.rb
@@ -3,6 +3,13 @@
require 'spec_helper'
RSpec.describe User, feature_category: :system_access do
+ User::USER_TYPES.keys.each do |type| # rubocop:disable RSpec/UselessDynamicDefinition
+ let_it_be(type) { create(:user, username: type, user_type: type) }
+ end
+ let(:bots) { User::BOT_USER_TYPES.map { |type| public_send(type) } }
+ let(:non_internal) { User::NON_INTERNAL_USER_TYPES.map { |type| public_send(type) } }
+ let(:everyone) { User::USER_TYPES.keys.map { |type| public_send(type) } }
+
specify 'types consistency checks', :aggregate_failures do
expect(described_class::USER_TYPES.keys)
.to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot
@@ -20,13 +27,6 @@ RSpec.describe User, feature_category: :system_access do
end
describe 'scopes & predicates' do
- User::USER_TYPES.keys.each do |type| # rubocop:disable RSpec/UselessDynamicDefinition
- let_it_be(type) { create(:user, username: type, user_type: type) }
- end
- let(:bots) { User::BOT_USER_TYPES.map { |type| public_send(type) } }
- let(:non_internal) { User::NON_INTERNAL_USER_TYPES.map { |type| public_send(type) } }
- let(:everyone) { User::USER_TYPES.keys.map { |type| public_send(type) } }
-
describe '.bots' do
it 'includes all bots' do
expect(described_class.bots).to match_array(bots)
@@ -118,5 +118,71 @@ RSpec.describe User, feature_category: :system_access do
end
end
end
+
+ describe '#resource_bot_resource' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:bot_user, :member_of, :owning_resource) do
+ ref(:human) | [ref(:group)] | nil
+ ref(:project_bot) | [] | nil # orphaned project bot
+ ref(:project_bot) | [ref(:group)] | ref(:group)
+ ref(:project_bot) | [ref(:project)] | ref(:project)
+
+ # Project bot can only be added to one group or project.
+ # That first group or project becomes the owning resource.
+ ref(:project_bot) | [ref(:group), ref(:project)] | ref(:group)
+ ref(:project_bot) | [ref(:group), ref(:group2)] | ref(:group)
+ ref(:project_bot) | [ref(:project), ref(:group)] | ref(:project)
+ ref(:project_bot) | [ref(:project), ref(:project2)] | ref(:project)
+ end
+
+ with_them do
+ before do
+ member_of.each { |resource| resource.add_developer(bot_user) }
+ end
+
+ it 'returns the owning resource' do
+ expect(bot_user.resource_bot_resource).to eq(owning_resource)
+ end
+ end
+ end
+
+ describe 'resource_bot_owners' do
+ it 'returns nil when user is not a project bot' do
+ expect(human.resource_bot_resource).to be_nil
+ end
+
+ context 'when the user is a project bot' do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+
+ subject(:owners) { project_bot.resource_bot_owners }
+
+ it 'returns an empty array when there is no owning resource' do
+ expect(owners).to match_array([])
+ end
+
+ it 'returns group owners when owned by a group' do
+ group = create(:group)
+ group.add_developer(project_bot)
+ group.add_owner(user1)
+
+ expect(owners).to match_array([user1])
+ end
+
+ it 'returns project maintainers when owned by a project' do
+ project = create(:project)
+ project.add_developer(project_bot)
+ project.add_maintainer(user2)
+
+ expect(owners).to match_array([user2])
+ end
+ end
+ end
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index e4af778b967..705f8f46a90 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -626,6 +626,21 @@ RSpec.describe Issuable, feature_category: :team_planning do
end
end
+ describe "#importing_or_transitioning?" do
+ let(:merge_request) { build(:merge_request, transitioning: transitioning, importing: importing) }
+
+ where(:transitioning, :importing, :result) do
+ true | false | true
+ false | true | true
+ true | true | true
+ false | false | false
+ end
+
+ with_them do
+ it { expect(merge_request.importing_or_transitioning?).to eq(result) }
+ end
+ end
+
describe '#labels_array' do
let(:project) { create(:project) }
let(:bug) { create(:label, project: project, title: 'bug') }
@@ -1023,6 +1038,22 @@ RSpec.describe Issuable, feature_category: :team_planning do
end
end
+ describe '#supports_lock_on_merge?' do
+ where(:issuable_type, :supports_lock_on_merge) do
+ :issue | false
+ :merge_request | false
+ :incident | false
+ end
+
+ with_them do
+ let(:issuable) { build_stubbed(issuable_type) }
+
+ subject { issuable.supports_lock_on_merge? }
+
+ it { is_expected.to eq(supports_lock_on_merge) }
+ end
+ end
+
describe '#severity' do
subject { issuable.severity }
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index 31ab8c23a84..a3f2e99f3da 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -100,28 +100,6 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
end
end
-
- describe 'additional_metrics' do
- let(:additional_metrics_environment_query) { Gitlab::Prometheus::Queries::AdditionalMetricsEnvironmentQuery }
- let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
- let(:time_window) { [1552642245.067, 1552642095.831] }
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'with valid data' do
- subject { integration.query(:additional_metrics_environment, environment, *time_window) }
-
- before do
- stub_reactive_cache(integration, prometheus_data, additional_metrics_environment_query, environment.id, *time_window)
- end
-
- it 'returns reactive data' do
- expect(subject).to eq(prometheus_data)
- end
- end
- end
end
describe '#calculate_reactive_cache' do
diff --git a/spec/models/concerns/require_email_verification_spec.rb b/spec/models/concerns/require_email_verification_spec.rb
index 1fb54e4276f..63312d4e1f1 100644
--- a/spec/models/concerns/require_email_verification_spec.rb
+++ b/spec/models/concerns/require_email_verification_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe RequireEmailVerification, feature_category: :insider_threat do
context 'when failed_attempts is LT overridden amount' do
before do
- instance.failed_attempts = 5
+ instance.failed_attempts = 2
end
it { is_expected.to eq(false) }
diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb
index 1423b56fa5d..e83a3d3417e 100644
--- a/spec/models/concerns/resolvable_discussion_spec.rb
+++ b/spec/models/concerns/resolvable_discussion_spec.rb
@@ -446,8 +446,8 @@ RSpec.describe Discussion, ResolvableDiscussion, feature_category: :code_review_
expect(subject.resolved?).to be true
end
- it "expires the etag cache of the noteable" do
- expect(subject.noteable).to receive(:expire_note_etag_cache)
+ it "broadcasts note change of the noteable" do
+ expect(subject.noteable).to receive(:broadcast_notes_changed)
subject.resolve!(current_user)
end
@@ -532,8 +532,8 @@ RSpec.describe Discussion, ResolvableDiscussion, feature_category: :code_review_
expect(subject.resolved?).to be false
end
- it "expires the etag cache of the noteable" do
- expect(subject.noteable).to receive(:expire_note_etag_cache)
+ it "broadcasts note change of the noteable" do
+ expect(subject.noteable).to receive(:broadcast_notes_changed)
subject.unresolve!
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 0bbe3dea812..2b6f8535743 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -3,24 +3,20 @@
require 'spec_helper'
RSpec.shared_examples 'routable resource' do
- describe '.find_by_full_path', :aggregate_failures do
+ shared_examples_for '.find_by_full_path' do
it 'finds records by their full path' do
expect(described_class.find_by_full_path(record.full_path)).to eq(record)
expect(described_class.find_by_full_path(record.full_path.upcase)).to eq(record)
end
- it 'returns nil for unknown paths' do
- expect(described_class.find_by_full_path('unknown')).to be_nil
- end
+ it 'checks if `optimize_routable` is enabled only once' do
+ expect(Routable).to receive(:optimize_routable_enabled?).once
- it 'includes route information when loading a record' do
- control_count = ActiveRecord::QueryRecorder.new do
- described_class.find_by_full_path(record.full_path)
- end.count
+ described_class.find_by_full_path(record.full_path)
+ end
- expect do
- described_class.find_by_full_path(record.full_path).route
- end.not_to exceed_all_query_limit(control_count)
+ it 'returns nil for unknown paths' do
+ expect(described_class.find_by_full_path('unknown')).to be_nil
end
context 'when path is a negative number' do
@@ -56,6 +52,26 @@ RSpec.shared_examples 'routable resource' do
end
end
end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures
+
+ context 'when the `optimize_routable` feature flag is turned OFF' do
+ before do
+ stub_feature_flags(optimize_routable: false)
+ end
+
+ it_behaves_like '.find_by_full_path', :aggregate_failures
+
+ it 'includes route information when loading a record' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.find_by_full_path(record.full_path)
+ end.count
+
+ expect do
+ described_class.find_by_full_path(record.full_path).route
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
end
RSpec.shared_examples 'routable resource with parent' do
@@ -93,7 +109,7 @@ RSpec.shared_examples 'routable resource with parent' do
end
end
-RSpec.describe Group, 'Routable', :with_clean_rails_cache do
+RSpec.describe Group, 'Routable', :with_clean_rails_cache, feature_category: :groups_and_projects do
let_it_be_with_reload(:group) { create(:group, name: 'foo') }
let_it_be(:nested_group) { create(:group, parent: group) }
@@ -223,7 +239,7 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do
end
end
-RSpec.describe Project, 'Routable', :with_clean_rails_cache do
+RSpec.describe Project, 'Routable', :with_clean_rails_cache, feature_category: :groups_and_projects do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace) }
@@ -235,9 +251,20 @@ RSpec.describe Project, 'Routable', :with_clean_rails_cache do
expect(project.route).not_to be_nil
expect(project.route.namespace).to eq(project.project_namespace)
end
+
+ describe '.find_by_full_path' do
+ it 'does not return a record if the sources are different, but the IDs match' do
+ group = create(:group, id: 1992)
+ project = create(:project, id: 1992)
+
+ record = described_class.where(id: project.id).find_by_full_path(group.full_path)
+
+ expect(record).to be_nil
+ end
+ end
end
-RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache do
+RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
it 'skips route creation for the resource' do
@@ -247,6 +274,22 @@ RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache
end
end
+RSpec.describe Routable, feature_category: :groups_and_projects do
+ describe '.optimize_routable_enabled?' do
+ subject { described_class.optimize_routable_enabled? }
+
+ it { is_expected.to eq(true) }
+
+ context 'when the `optimize_routable` feature flag is turned OFF' do
+ before do
+ stub_feature_flags(optimize_routable: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
+
def forcibly_hit_cached_lookup(record, method)
stub_feature_flags(cached_route_lookups: true)
expect(record).to receive(:persisted?).and_return(true)
diff --git a/spec/models/concerns/transitionable_spec.rb b/spec/models/concerns/transitionable_spec.rb
new file mode 100644
index 00000000000..b80d363ef78
--- /dev/null
+++ b/spec/models/concerns/transitionable_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Transitionable, feature_category: :code_review_workflow do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:klass) do
+ Class.new do
+ include Transitionable
+
+ def initialize(transitioning)
+ @transitioning = transitioning
+ end
+
+ def project
+ Project.new
+ end
+ end
+ end
+
+ let(:object) { klass.new(transitioning) }
+
+ describe '#transitioning?' do
+ where(:transitioning, :feature_flag, :result) do
+ true | true | true
+ false | false | false
+ true | false | false
+ false | true | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(skip_validations_during_transitions: feature_flag)
+ end
+
+ it { expect(object.transitioning?).to eq(result) }
+ end
+ end
+end
diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb
index 528b36babc6..2959556f5ae 100644
--- a/spec/models/deploy_key_spec.rb
+++ b/spec/models/deploy_key_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe DeployKey, :mailer do
context 'when user is not set' do
it 'returns the ghost user' do
- expect(deploy_key.user).to eq(User.ghost)
+ expect(deploy_key.user).to eq(Users::Internal.ghost)
end
end
end
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 72c0d1d1a64..98e5399f737 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -467,16 +467,6 @@ RSpec.describe DesignManagement::Design, feature_category: :design_management do
end
end
- describe '#note_etag_key' do
- it 'returns a correct etag key' do
- design = design1
-
- expect(design.note_etag_key).to eq(
- ::Gitlab::Routing.url_helpers.designs_project_issue_path(design.project, design.issue, { vueroute: design.filename })
- )
- end
- end
-
describe '#user_notes_count', :use_clean_rails_memory_store_caching do
# Note: Cache invalidation tests are in `design_user_notes_count_service_spec.rb`
it 'returns a count of user-generated notes' do
diff --git a/spec/models/doorkeeper/application_spec.rb b/spec/models/doorkeeper/application_spec.rb
new file mode 100644
index 00000000000..85b28346dfa
--- /dev/null
+++ b/spec/models/doorkeeper/application_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Doorkeeper::Application, type: :model, feature_category: :system_access do
+ let(:application) { create(:oauth_application) }
+
+ it 'uses a prefixed secret' do
+ expect(application.plaintext_secret).to match(/gloas-\h{64}/)
+ end
+end
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index 9814eed8b45..eeb8583251d 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -270,18 +270,6 @@ RSpec.describe EnvironmentStatus do
context 'when environment is stopped' do
before do
- stub_feature_flags(review_apps_redeploy_mr_widget: false)
- environment.stop!
- end
-
- it 'does not return environment status' do
- expect(subject.count).to eq(0)
- end
- end
-
- context 'when environment is stopped and review_apps_redeploy_mr_widget is turned on' do
- before do
- stub_feature_flags(review_apps_redeploy_mr_widget: true)
environment.stop!
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 23e72f6663a..3f671fc3f70 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2666,14 +2666,16 @@ RSpec.describe Group, feature_category: :groups_and_projects do
let(:group) { build(:group) }
context 'the group has owners' do
- before do
- group.add_owner(create(:user))
- group.add_owner(create(:user))
- end
-
it 'is the first owner' do
+ user_1 = create(:user)
+ user_2 = create(:user)
+ group.add_owner(user_2)
+ group.add_owner(user_1)
+
+ # The senior-most user (not member) who is an OWNER in the group
+ # is always treated as the first owner
expect(group.first_owner)
- .to eq(group.owners.first)
+ .to eq(user_1)
.and be_a(User)
end
end
@@ -3307,6 +3309,13 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
+ describe '#supports_lock_on_merge?' do
+ it_behaves_like 'checks self and root ancestor feature flag' do
+ let(:feature_flag) { :enforce_locked_labels_on_merge }
+ let(:feature_flag_method) { :supports_lock_on_merge? }
+ end
+ end
+
describe 'group shares' do
let!(:sub_group) { create(:group, parent: group) }
let!(:sub_sub_group) { create(:group, parent: sub_group) }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 4b88b3b3e65..e9a2635bf28 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -46,6 +46,20 @@ RSpec.describe WebHookLog, feature_category: :webhooks do
end
end
+ context 'with basic auth credentials and masked components' do
+ let(:web_hook_log) { build(:web_hook_log, web_hook: hook, url: 'http://test:123@{domain}.com:{port}') }
+
+ subject { web_hook_log.save! }
+
+ it { is_expected.to eq(true) }
+
+ it 'obfuscates the basic auth credentials' do
+ subject
+
+ expect(web_hook_log.url).to eq('http://*****:*****@{domain}.com:{port}')
+ end
+ end
+
context "with users' emails" do
let(:author) { build(:user) }
let(:user) { build(:user) }
@@ -235,4 +249,28 @@ RSpec.describe WebHookLog, feature_category: :webhooks do
it { expect(web_hook_log.request_headers).to eq(expected_headers) }
end
end
+
+ describe '#url_current?' do
+ let(:url) { 'example@gitlab.com' }
+
+ let(:hook) { build(:project_hook, url: url) }
+ let(:web_hook_log) do
+ build(
+ :web_hook_log,
+ web_hook: hook,
+ interpolated_url: hook.url,
+ url_hash: Gitlab::CryptoHelper.sha256('example@gitlab.com')
+ )
+ end
+
+ context 'with matching url' do
+ it { expect(web_hook_log.url_current?).to be_truthy }
+ end
+
+ context 'with different url' do
+ let(:url) { 'example@gitlab2.com' }
+
+ it { expect(web_hook_log.url_current?).to be_falsey }
+ end
+ end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 0b41b46ae3d..67e12092e1a 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -1041,9 +1041,9 @@ RSpec.describe Integration, feature_category: :integrations do
it 'returns all fields with type `password`' do
allow(subject).to receive(:fields).and_return(
[
- { name: 'password', type: :password },
- { name: 'secret', type: :password },
- { name: 'public', type: :text }
+ Integrations::Field.new(name: 'password', integration_class: subject.class, type: :password),
+ Integrations::Field.new(name: 'secret', integration_class: subject.class, type: :password),
+ Integrations::Field.new(name: 'public', integration_class: subject.class, type: :text)
])
expect(subject.secret_fields).to match_array(%w[password secret])
diff --git a/spec/models/integrations/base_chat_notification_spec.rb b/spec/models/integrations/base_chat_notification_spec.rb
index 675035095c5..497f2f1e7c9 100644
--- a/spec/models/integrations/base_chat_notification_spec.rb
+++ b/spec/models/integrations/base_chat_notification_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Integrations::BaseChatNotification, feature_category: :integratio
context 'when webhook is not required' do
it 'returns true' do
- allow(chat_integration).to receive(:requires_webhook?).and_return(false)
+ allow(chat_integration.class).to receive(:requires_webhook?).and_return(false)
expect(chat_integration).to receive(:notify).and_return(true)
expect(chat_integration.execute(data)).to be true
@@ -347,6 +347,12 @@ RSpec.describe Integrations::BaseChatNotification, feature_category: :integratio
end
end
+ describe '#help' do
+ it 'raises an error' do
+ expect { subject.help }.to raise_error(NotImplementedError)
+ end
+ end
+
describe '#event_channel_name' do
it 'returns the channel field name for the given event' do
expect(subject.event_channel_name(:event)).to eq('event_channel')
@@ -364,4 +370,17 @@ RSpec.describe Integrations::BaseChatNotification, feature_category: :integratio
expect { subject.event_channel_value(:foo) }.to raise_error(NoMethodError)
end
end
+
+ describe '#api_field_names' do
+ context 'when channels are masked' do
+ let(:project) { build(:project) }
+ let(:integration) { build(:discord_integration, project: project, webhook: 'https://discord.com/api/') }
+
+ it 'does not include channel properties', :aggregate_failures do
+ integration.event_channel_names.each do |field|
+ expect(integration.api_field_names).not_to include(field)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/integrations/chat_message/deployment_message_spec.rb b/spec/models/integrations/chat_message/deployment_message_spec.rb
index d16c191bd08..630ae902331 100644
--- a/spec/models/integrations/chat_message/deployment_message_spec.rb
+++ b/spec/models/integrations/chat_message/deployment_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::ChatMessage::DeploymentMessage do
+RSpec.describe Integrations::ChatMessage::DeploymentMessage, feature_category: :integrations do
subject { described_class.new(args) }
let_it_be(:user) { create(:user, name: 'John Smith', username: 'smith') }
@@ -103,15 +103,33 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage do
}.merge(params)
end
- it 'returns attachments with the data returned by the deployment data builder' do
- job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
- commit_url = Gitlab::UrlBuilder.build(deployment.commit)
- user_url = Gitlab::Routing.url_helpers.user_url(user)
+ context 'without markdown' do
+ it 'returns attachments with the data returned by the deployment data builder' do
+ job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
+ commit_url = Gitlab::UrlBuilder.build(deployment.commit)
+ user_url = Gitlab::Routing.url_helpers.user_url(user)
+
+ expect(subject.attachments).to eq([{
+ text: "<#{project.web_url}|myspace/myproject> with job <#{job_url}|##{ci_build.id}> by <#{user_url}|John Smith (smith)>\n<#{commit_url}|#{deployment.short_sha}>: #{commit.title}",
+ color: "good"
+ }])
+ end
+ end
- expect(subject.attachments).to eq([{
- text: "[myspace/myproject](#{project.web_url}) with job [##{ci_build.id}](#{job_url}) by [John Smith (smith)](#{user_url})\n[#{deployment.short_sha}](#{commit_url}): #{commit.title}",
- color: "good"
- }])
+ context 'with markdown' do
+ before do
+ args.merge!(markdown: true)
+ end
+
+ it 'returns attachments with the data returned by the deployment data builder' do
+ job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
+ commit_url = Gitlab::UrlBuilder.build(deployment.commit)
+ user_url = Gitlab::Routing.url_helpers.user_url(user)
+
+ expect(subject.attachments).to eq(
+ "[myspace/myproject](#{project.web_url}) with job [##{ci_build.id}](#{job_url}) by [John Smith (smith)](#{user_url})\n[#{deployment.short_sha}](#{commit_url}): #{commit.title}"
+ )
+ end
end
it 'returns attachments for a failed deployment' do
diff --git a/spec/models/integrations/confluence_spec.rb b/spec/models/integrations/confluence_spec.rb
index d267e4a71c2..a34b55c3c6b 100644
--- a/spec/models/integrations/confluence_spec.rb
+++ b/spec/models/integrations/confluence_spec.rb
@@ -63,6 +63,12 @@ RSpec.describe Integrations::Confluence, feature_category: :integrations do
end
end
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path('confluence.svg'))
+ end
+ end
+
describe 'Caching has_confluence on project_settings' do
subject { project.project_setting.has_confluence? }
diff --git a/spec/models/integrations/mattermost_spec.rb b/spec/models/integrations/mattermost_spec.rb
index f7702846b6c..224bc4acdda 100644
--- a/spec/models/integrations/mattermost_spec.rb
+++ b/spec/models/integrations/mattermost_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-RSpec.describe Integrations::Mattermost do
+RSpec.describe Integrations::Mattermost, feature_category: :integrations do
it_behaves_like Integrations::SlackMattermostNotifier, "Mattermost"
end
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index da43d851b31..4a998efe665 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -422,6 +422,34 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
end
end
+ describe '#sync_http_integration after_save callback' do
+ context 'with corresponding HTTP integration' do
+ let_it_be_with_reload(:http_integration) { create(:alert_management_prometheus_integration, :legacy, project: project) }
+
+ it 'syncs the attribute' do
+ expect { integration.update!(manual_configuration: false) }
+ .to change { http_integration.reload.active }
+ .from(true).to(false)
+ end
+
+ context 'when changing a different attribute' do
+ it 'does not sync the attribute or execute extra queries' do
+ expect { integration.update!(api_url: 'https://any.url') }
+ .to issue_fewer_queries_than { integration.update!(manual_configuration: false) }
+ end
+ end
+ end
+
+ context 'without corresponding HTTP integration' do
+ let_it_be(:other_http_integration) { create(:alert_management_prometheus_integration, project: project) }
+
+ it 'does not sync the attribute or execute extra queries' do
+ expect { integration.update!(manual_configuration: false) }
+ .not_to change { other_http_integration.reload.active }
+ end
+ end
+ end
+
describe '#editable?' do
it 'is editable' do
expect(integration.editable?).to be(true)
diff --git a/spec/models/integrations/shimo_spec.rb b/spec/models/integrations/shimo_spec.rb
index be626012ab2..95289343d0d 100644
--- a/spec/models/integrations/shimo_spec.rb
+++ b/spec/models/integrations/shimo_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Integrations::Shimo do
+RSpec.describe ::Integrations::Shimo, feature_category: :integrations do
describe '#fields' do
let(:shimo_integration) { build(:shimo_integration) }
@@ -60,4 +60,10 @@ RSpec.describe ::Integrations::Shimo do
expect { create(:shimo_integration) }.to change(ProjectSetting, :count).by(1)
end
end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path('logos/shimo.svg'))
+ end
+ end
end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 218d92ffe05..59ee3746d8f 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::Slack do
+RSpec.describe Integrations::Slack, feature_category: :integrations do
it_behaves_like Integrations::SlackMattermostNotifier, 'Slack'
it_behaves_like Integrations::BaseSlackNotification, factory: :integrations_slack do
before do
diff --git a/spec/models/integrations/zentao_spec.rb b/spec/models/integrations/zentao_spec.rb
index 2fa4df0e900..460ce7629cc 100644
--- a/spec/models/integrations/zentao_spec.rb
+++ b/spec/models/integrations/zentao_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::Zentao do
+RSpec.describe Integrations::Zentao, feature_category: :integrations do
let(:url) { 'https://jihudemo.zentao.net' }
let(:api_url) { 'https://jihudemo.zentao.net' }
let(:api_token) { 'ZENTAO_TOKEN' }
@@ -80,6 +80,12 @@ RSpec.describe Integrations::Zentao do
end
end
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path('logos/zentao.svg'))
+ end
+ end
+
describe '#client_url' do
subject(:integration) { build(:zentao_integration, api_url: api_url, url: 'url').client_url }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 9db710cb3cc..4e217e3a9f7 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -28,7 +28,6 @@ RSpec.describe Issue, feature_category: :team_planning do
it { is_expected.to have_many(:resource_milestone_events) }
it { is_expected.to have_many(:resource_state_events) }
it { is_expected.to have_and_belong_to_many(:prometheus_alert_events) }
- it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
it { is_expected.to have_many(:issue_email_participants) }
it { is_expected.to have_one(:email) }
@@ -953,7 +952,7 @@ RSpec.describe Issue, feature_category: :team_planning do
subject { issue.from_service_desk? }
context 'when issue author is support bot' do
- let(:issue) { create(:issue, project: reusable_project, author: ::User.support_bot) }
+ let(:issue) { create(:issue, project: reusable_project, author: ::Users::Internal.support_bot) }
it { is_expected.to be_truthy }
end
@@ -1527,7 +1526,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
describe '#check_for_spam?' do
- let_it_be(:support_bot) { ::User.support_bot }
+ let_it_be(:support_bot) { ::Users::Internal.support_bot }
where(:support_bot?, :visibility_level, :confidential, :new_attributes, :check_for_spam?) do
### non-support-bot cases
@@ -1640,7 +1639,7 @@ RSpec.describe Issue, feature_category: :team_planning do
describe '.service_desk' do
it 'returns the service desk issue' do
- service_desk_issue = create(:issue, project: reusable_project, author: ::User.support_bot)
+ service_desk_issue = create(:issue, project: reusable_project, author: ::Users::Internal.support_bot)
regular_issue = create(:issue, project: reusable_project)
expect(described_class.service_desk).to include(service_desk_issue)
diff --git a/spec/models/loose_foreign_keys/modification_tracker_spec.rb b/spec/models/loose_foreign_keys/modification_tracker_spec.rb
index 069ccf85141..afc62f28f92 100644
--- a/spec/models/loose_foreign_keys/modification_tracker_spec.rb
+++ b/spec/models/loose_foreign_keys/modification_tracker_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe LooseForeignKeys::ModificationTracker do
+RSpec.describe LooseForeignKeys::ModificationTracker, feature_category: :database do
subject(:tracker) { described_class.new }
describe '#over_limit?' do
- it 'is true when deletion MAX_DELETES is exceeded' do
- stub_const('LooseForeignKeys::ModificationTracker::MAX_DELETES', 5)
+ it 'is true when deletion max_deletes is exceeded' do
+ expect(tracker).to receive(:max_deletes).and_return(5)
tracker.add_deletions('issues', 10)
expect(tracker).to be_over_limit
@@ -20,7 +20,7 @@ RSpec.describe LooseForeignKeys::ModificationTracker do
end
it 'is true when deletion MAX_UPDATES is exceeded' do
- stub_const('LooseForeignKeys::ModificationTracker::MAX_UPDATES', 5)
+ expect(tracker).to receive(:max_updates).and_return(5)
tracker.add_updates('issues', 3)
tracker.add_updates('issues', 4)
@@ -36,9 +36,11 @@ RSpec.describe LooseForeignKeys::ModificationTracker do
it 'is true when max runtime is exceeded' do
monotonic_time_before = 1 # this will be the start time
- monotonic_time_after = described_class::MAX_RUNTIME.to_i + 1 # this will be returned when over_limit? is called
+ monotonic_time_after = 31 # this will be returned when over_limit? is called
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(
+ monotonic_time_before, monotonic_time_after
+ )
tracker
diff --git a/spec/models/loose_foreign_keys/turbo_modification_tracker_spec.rb b/spec/models/loose_foreign_keys/turbo_modification_tracker_spec.rb
new file mode 100644
index 00000000000..0916a0845f5
--- /dev/null
+++ b/spec/models/loose_foreign_keys/turbo_modification_tracker_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LooseForeignKeys::TurboModificationTracker, feature_category: :database do
+ subject(:tracker) { described_class.new }
+
+ let(:normal_tracker) { LooseForeignKeys::ModificationTracker.new }
+
+ context 'with limits should be higher than LooseForeignKeys::ModificationTracker' do
+ it 'expect max_deletes to be equal or higher' do
+ expect(tracker.max_deletes).to be >= normal_tracker.max_deletes
+ end
+
+ it 'expect max_updates to be equal or higher' do
+ expect(tracker.max_updates).to be >= normal_tracker.max_updates
+ end
+
+ it 'expect max_runtime to be equal or higher' do
+ expect(tracker.max_runtime).to be >= normal_tracker.max_runtime
+ end
+ end
+end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index f8aaae3edad..6dd5f9dec8c 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -1177,4 +1177,11 @@ RSpec.describe Member, feature_category: :groups_and_projects do
expect(described_class.sort_by_attribute('oldest_last_activity')).to eq([member3, member2, member1])
end
end
+
+ context 'with loose foreign key on members.user_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:user) }
+ let!(:model) { create(:group_member, user: parent) }
+ end
+ end
end
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index a07829abece..7307e76272d 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe GroupMember, feature_category: :cell do
describe '#update_two_factor_requirement' do
it 'is called after creation and deletion' do
- user = build :user
+ user = create :user
group = create :group
group_member = build :group_member, user: user, group: group
@@ -288,4 +288,18 @@ RSpec.describe GroupMember, feature_category: :cell do
it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
end
+
+ context 'group member welcome email', :sidekiq_inline, :saas do
+ let_it_be(:group) { create(:group) }
+
+ let(:user) { create(:user) }
+
+ it 'schedules plain welcome to the group email' do
+ expect_next_instance_of(NotificationService) do |notification|
+ expect(notification).to receive(:new_group_member)
+ end
+
+ group.add_developer(user)
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index da3f691b63a..b36737fc19d 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -135,10 +135,22 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
- let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1]) }
- let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2]) }
- let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: []) }
- let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request) }
+ let_it_be(:merge_request1) do
+ create(:merge_request, :prepared, :unique_branches, reviewers: [user1], created_at:
+ 2.days.ago)
+ end
+
+ let_it_be(:merge_request2) do
+ create(:merge_request, :unprepared, :unique_branches, reviewers: [user2], created_at:
+ 3.hours.ago)
+ end
+
+ let_it_be(:merge_request3) do
+ create(:merge_request, :unprepared, :unique_branches, reviewers: [], created_at:
+ Time.current)
+ end
+
+ let_it_be(:merge_request4) { create(:merge_request, :prepared, :draft_merge_request) }
describe '.preload_target_project_with_namespace' do
subject(:mr) { described_class.preload_target_project_with_namespace.first }
@@ -180,6 +192,14 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe '.recently_unprepared' do
+ it 'only returns the recently unprepared mrs' do
+ merge_request5 = create(:merge_request, :unprepared, :unique_branches, created_at: merge_request3.created_at)
+
+ expect(described_class.recently_unprepared).to eq([merge_request3, merge_request5])
+ end
+ end
+
describe '.by_sorted_source_branches' do
let(:fork_for_project) { fork_project(project) }
@@ -340,6 +360,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe "#validate_reviewer_size_length" do
+ let(:merge_request) { build(:merge_request, transitioning: transitioning) }
+
+ where(:transitioning, :to_or_not_to) do
+ false | :to
+ true | :not_to
+ end
+
+ with_them do
+ it do
+ expect(merge_request).send(to_or_not_to, receive(:validate_reviewer_size_length))
+
+ merge_request.valid?
+ end
+ end
+ end
+
describe '#validate_target_project' do
let(:merge_request) do
build(:merge_request, source_project: project, target_project: project, importing: importing)
@@ -366,6 +403,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { expect(merge_request.valid?(false)).to eq true }
end
end
+
+ context "with the skip_validations_during_transition_feature_flag" do
+ let(:merge_request) { build(:merge_request, transitioning: transitioning) }
+
+ where(:transitioning, :to_or_not_to) do
+ false | :to
+ true | :not_to
+ end
+
+ with_them do
+ it do
+ expect(merge_request).send(to_or_not_to, receive(:validate_target_project))
+
+ merge_request.valid?
+ end
+ end
+ end
end
end
@@ -2099,6 +2153,16 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject.mark_as_merged!
end
+ context 'and merged_commit_sha is present' do
+ before do
+ subject.update_attribute(:merged_commit_sha, pipeline.sha)
+ end
+
+ it 'returns the pipeline associated with that merge request' do
+ expect(subject.merge_pipeline).to eq(pipeline)
+ end
+ end
+
context 'and there is a merge commit' do
before do
subject.update_attribute(:merge_commit_sha, pipeline.sha)
@@ -2850,6 +2914,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject.mark_as_merged!
end
+ it 'returns merged_commit_sha when there is a merged_commit_sha' do
+ subject.update_attribute(:merged_commit_sha, sha)
+
+ expect(subject.merged_commit_sha).to eq(sha)
+ end
+
it 'returns merge_commit_sha when there is a merge_commit_sha' do
subject.update_attribute(:merge_commit_sha, sha)
@@ -3275,6 +3345,31 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject.mergeable?(check_mergeability_retry_lease: true)
end
end
+
+ context 'with skip_rebase_check option' do
+ before do
+ allow(subject).to receive_messages(
+ mergeable_state?: true,
+ check_mergeability: nil,
+ can_be_merged?: true
+ )
+ end
+
+ where(:should_be_rebased, :skip_rebase_check, :expected_mergeable) do
+ false | false | true
+ false | true | true
+ true | false | false
+ true | true | true
+ end
+
+ with_them do
+ it 'overrides should_be_rebased?' do
+ allow(subject).to receive(:should_be_rebased?) { should_be_rebased }
+
+ expect(subject.mergeable?(skip_rebase_check: skip_rebase_check)).to eq(expected_mergeable)
+ end
+ end
+ end
end
describe '#skipped_mergeable_checks' do
@@ -4442,6 +4537,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
shared_examples 'for an invalid state transition' do
specify 'is not a valid state transition' do
expect { transition! }.to raise_error(StateMachines::InvalidTransition)
+ expect(subject.transitioning?).to be_falsey
end
end
@@ -4451,6 +4547,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
.to change { subject.merge_status }
.from(merge_status.to_s)
.to(expected_merge_status)
+ expect(subject.transitioning?).to be_falsey
end
end
@@ -5493,7 +5590,8 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:ref) { subject.target_project.repository.commit.id }
before do
- expect(subject.target_project).to receive(:mark_primary_write_location)
+ expect(subject.target_project.sticking).to receive(:stick)
+ .with(:project, subject.target_project.id)
end
it 'updates commit ID' do
@@ -5806,4 +5904,56 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { is_expected.to eq(false) }
end
end
+
+ describe '#supports_lock_on_merge?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ subject { merge_request.supports_lock_on_merge? }
+
+ context 'when MR is open' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when MR is merged' do
+ before do
+ merge_request.state = :merged
+ end
+
+ it { is_expected.to eq(true) }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+
+ describe '#missing_required_squash?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:squash, :project_requires_squash, :expected) do
+ false | true | true
+ false | false | false
+ true | true | false
+ true | false | false
+ end
+
+ with_them do
+ let(:merge_request) { build_stubbed(:merge_request, squash: squash) }
+
+ subject { merge_request.missing_required_squash? }
+
+ before do
+ allow(merge_request.target_project).to(
+ receive(:squash_always?)
+ .and_return(project_requires_squash)
+ )
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+ end
end
diff --git a/spec/models/metrics/dashboard/annotation_spec.rb b/spec/models/metrics/dashboard/annotation_spec.rb
deleted file mode 100644
index 7c4f392fcdc..00000000000
--- a/spec/models/metrics/dashboard/annotation_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::Annotation do
- using RSpec::Parameterized::TableSyntax
-
- describe 'validation' do
- it { is_expected.to validate_presence_of(:description) }
- it { is_expected.to validate_presence_of(:dashboard_path) }
- it { is_expected.to validate_presence_of(:starting_at) }
- it { is_expected.to validate_length_of(:dashboard_path).is_at_most(255) }
- it { is_expected.to validate_length_of(:panel_xid).is_at_most(255) }
- it { is_expected.to validate_length_of(:description).is_at_most(255) }
-
- context 'ending_at_after_starting_at' do
- where(:starting_at, :ending_at, :valid?, :message) do
- 2.days.ago.beginning_of_day | 1.day.ago.beginning_of_day | true | nil
- 1.day.ago.beginning_of_day | nil | true | nil
- 1.day.ago.beginning_of_day | 1.day.ago.beginning_of_day | true | nil
- 1.day.ago.beginning_of_day | 2.days.ago.beginning_of_day | false | /Ending at can't be before starting_at time/
- nil | 2.days.ago.beginning_of_day | false | /Starting at can't be blank/ # validation is covered by other method, be we need to assure, that ending_at_after_starting_at will not break with nil as starting_at
- nil | nil | false | /Starting at can't be blank/ # validation is covered by other method, be we need to assure, that ending_at_after_starting_at will not break with nil as starting_at
- end
-
- with_them do
- subject(:annotation) { build(:metrics_dashboard_annotation, starting_at: starting_at, ending_at: ending_at) }
-
- it do
- expect(annotation.valid?).to be(valid?)
- expect(annotation.errors.full_messages).to include(message) if message
- end
- end
- end
- end
-
- describe 'scopes' do
- let_it_be(:nine_minutes_old_annotation) { create(:metrics_dashboard_annotation, starting_at: 9.minutes.ago) }
- let_it_be(:fifteen_minutes_old_annotation) { create(:metrics_dashboard_annotation, starting_at: 15.minutes.ago) }
- let_it_be(:just_created_annotation) { create(:metrics_dashboard_annotation) }
-
- describe '#after' do
- it 'returns only younger annotations' do
- expect(described_class.after(12.minutes.ago)).to match_array [nine_minutes_old_annotation, just_created_annotation]
- end
- end
-
- describe '#before' do
- it 'returns only older annotations' do
- expect(described_class.before(5.minutes.ago)).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation]
- end
- end
-
- describe '#for_dashboard' do
- let!(:other_dashboard_annotation) { create(:metrics_dashboard_annotation, dashboard_path: 'other_dashboard.yml') }
-
- it 'returns annotations only for appointed dashboard' do
- expect(described_class.for_dashboard('other_dashboard.yml')).to match_array [other_dashboard_annotation]
- end
- end
-
- describe '#ending_before' do
- it 'returns annotations only for appointed dashboard' do
- freeze_time do
- twelve_minutes_old_annotation = create(:metrics_dashboard_annotation, starting_at: 15.minutes.ago, ending_at: 12.minutes.ago)
- create(:metrics_dashboard_annotation, starting_at: 15.minutes.ago, ending_at: 11.minutes.ago)
-
- expect(described_class.ending_before(11.minutes.ago)).to match_array [fifteen_minutes_old_annotation, twelve_minutes_old_annotation]
- end
- end
- end
- end
-end
diff --git a/spec/models/metrics/users_starred_dashboard_spec.rb b/spec/models/metrics/users_starred_dashboard_spec.rb
deleted file mode 100644
index c89344c0a1c..00000000000
--- a/spec/models/metrics/users_starred_dashboard_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::UsersStarredDashboard do
- describe 'associations' do
- it { is_expected.to belong_to(:project).inverse_of(:metrics_users_starred_dashboards) }
- it { is_expected.to belong_to(:user).inverse_of(:metrics_users_starred_dashboards) }
- end
-
- describe 'validation' do
- subject { build(:metrics_users_starred_dashboard) }
-
- it { is_expected.to validate_presence_of(:user_id) }
- it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_presence_of(:dashboard_path) }
- it { is_expected.to validate_length_of(:dashboard_path).is_at_most(255) }
- it { is_expected.to validate_uniqueness_of(:dashboard_path).scoped_to(%i[user_id project_id]) }
- end
-
- context 'scopes' do
- let_it_be(:project) { create(:project) }
- let_it_be(:starred_dashboard_a) { create(:metrics_users_starred_dashboard, project: project, dashboard_path: 'path_a') }
- let_it_be(:starred_dashboard_b) { create(:metrics_users_starred_dashboard, project: project, dashboard_path: 'path_b') }
- let_it_be(:starred_dashboard_c) { create(:metrics_users_starred_dashboard, dashboard_path: 'path_b') }
-
- describe '#for_project' do
- it 'selects only starred dashboards belonging to project' do
- expect(described_class.for_project(project)).to contain_exactly starred_dashboard_a, starred_dashboard_b
- end
- end
-
- describe '#for_project_dashboard' do
- it 'selects only starred dashboards belonging to project with given dashboard path' do
- expect(described_class.for_project_dashboard(project, 'path_b')).to contain_exactly starred_dashboard_b
- end
- end
- end
-end
diff --git a/spec/models/ml/model_version_spec.rb b/spec/models/ml/model_version_spec.rb
index 4bb272fef5d..83639fca9e1 100644
--- a/spec/models/ml/model_version_spec.rb
+++ b/spec/models/ml/model_version_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:model) }
- it { is_expected.to belong_to(:package) }
+ it { is_expected.to belong_to(:package).class_name('Packages::MlModel::Package') }
end
describe 'validation' do
@@ -83,14 +83,6 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
it { expect(errors[:package]).to include(error_message) }
end
-
- context 'when package is not ml_model' do
- let(:package) do
- build_stubbed(:generic_package, project: base_project, name: model1.name, version: valid_version)
- end
-
- it { expect(errors[:package]).to include('package must be ml_model') }
- end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 623c9c7e07c..a0deee0f2d3 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -443,6 +443,15 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
+ describe '.by_root_id' do
+ it 'returns correct namespaces' do
+ expect(described_class.by_root_id(namespace1.id)).to match_array([namespace1, namespace1sub])
+ expect(described_class.by_root_id(namespace2.id)).to match_array([namespace2, namespace2sub])
+ expect(described_class.by_root_id(namespace1sub.id)).to be_empty
+ expect(described_class.by_root_id(nil)).to be_empty
+ end
+ end
+
describe '.filter_by_path' do
it 'includes correct namespaces' do
expect(described_class.filter_by_path(namespace1.path)).to eq([namespace1])
@@ -1070,17 +1079,30 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
it 'defaults use_minimum_char_limit to true' do
- expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: true).once
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: true, exact_matches_first: false).once
described_class.search('my namespace')
end
it 'passes use_minimum_char_limit if it is set' do
- expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: false).once
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: false, exact_matches_first: false).once
described_class.search('my namespace', use_minimum_char_limit: false)
end
+ context 'with multiple matching namespaces' do
+ let_it_be(:first_group) { create(:group, name: 'some name', path: 'z-path') }
+ let_it_be(:second_group) { create(:group, name: 'some name too', path: 'a-path') }
+
+ it 'returns exact matches first' do
+ expect(described_class.search('some name', exact_matches_first: true).to_a).to eq([first_group, second_group])
+ end
+
+ it 'returns exact matches first when parents are included' do
+ expect(described_class.search('some name', include_parents: true, exact_matches_first: true).to_a).to eq([first_group, second_group])
+ end
+ end
+
context 'with project namespaces' do
let_it_be(:project) { create(:project, namespace: parent_group, path: 'some-new-path') }
let_it_be(:project_namespace) { project.project_namespace }
@@ -1186,8 +1208,11 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
describe '#move_dir', :request_store do
- shared_examples "namespace restrictions" do
- context "when any project has container images" do
+ context 'hashed storage' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project_empty_repo, namespace: namespace) }
+
+ context 'when any project has container images' do
let(:container_repository) { create(:container_repository) }
before do
@@ -1207,190 +1232,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
)
end
end
- end
-
- context 'legacy storage' do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
-
- it_behaves_like 'namespace restrictions'
-
- it "raises error when directory exists" do
- expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
- end
-
- it "moves dir if path changed" do
- namespace.update!(path: namespace.full_path + '_new')
-
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{namespace.path}/#{project.path}.git")).to be_truthy
- end
-
- context 'when #write_projects_repository_config raises an error' do
- context 'in test environment' do
- it 'raises an exception' do
- expect(namespace).to receive(:write_projects_repository_config).and_raise('foo')
-
- expect do
- namespace.update!(path: namespace.full_path + '_new')
- end.to raise_error('foo')
- end
- end
-
- context 'in production environment' do
- it 'does not cancel later callbacks' do
- expect(namespace).to receive(:write_projects_repository_config).and_raise('foo')
- expect(namespace).to receive(:move_dir).and_wrap_original do |m, *args|
- move_dir_result = m.call(*args)
-
- expect(move_dir_result).to be_truthy # Must be truthy, or else later callbacks would be canceled
-
- move_dir_result
- end
- expect(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) # like prod
-
- namespace.update!(path: namespace.full_path + '_new')
- end
- end
- end
-
- shared_examples 'move_dir without repository storage feature' do |storage_version|
- let(:namespace) { create(:namespace) }
- let(:gitlab_shell) { namespace.gitlab_shell }
- let!(:project) { create(:project_empty_repo, namespace: namespace, storage_version: storage_version) }
-
- it 'calls namespace service' do
- expect(gitlab_shell).to receive(:add_namespace).and_return(true)
- expect(gitlab_shell).to receive(:mv_namespace).and_return(true)
-
- namespace.move_dir
- end
- end
-
- shared_examples 'move_dir with repository storage feature' do |storage_version|
- let(:namespace) { create(:namespace) }
- let(:gitlab_shell) { namespace.gitlab_shell }
- let!(:project) { create(:project_empty_repo, namespace: namespace, storage_version: storage_version) }
-
- it 'does not call namespace service' do
- expect(gitlab_shell).not_to receive(:add_namespace)
- expect(gitlab_shell).not_to receive(:mv_namespace)
-
- namespace.move_dir
- end
- end
-
- context 'project is without repository storage feature' do
- [nil, 0].each do |storage_version|
- it_behaves_like 'move_dir without repository storage feature', storage_version
- end
- end
-
- context 'project has repository storage feature' do
- [1, 2].each do |storage_version|
- it_behaves_like 'move_dir with repository storage feature', storage_version
- end
- end
-
- context 'with subgroups' do
- let(:parent) { create(:group, name: 'parent', path: 'parent') }
- let(:new_parent) { create(:group, name: 'new_parent', path: 'new_parent') }
- let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
- let!(:project) { create(:project_empty_repo, :legacy_storage, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { FileUploader.root }
- let(:pages_dir) { File.join(TestEnv.pages_path) }
-
- def expect_project_directories_at(namespace_path, with_pages: true)
- expected_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
- end
- expected_upload_path = File.join(uploads_dir, namespace_path, 'the-project')
- expected_pages_path = File.join(pages_dir, namespace_path, 'the-project')
-
- expect(File.directory?(expected_repository_path)).to be_truthy
- expect(File.directory?(expected_upload_path)).to be_truthy
- expect(File.directory?(expected_pages_path)).to be(with_pages)
- end
-
- before do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
- end
- FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
- FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
- end
-
- after do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.remove_entry(File.join(TestEnv.repos_path, parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, new_parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, child.full_path), true)
- end
- FileUtils.remove_entry(File.join(uploads_dir, project.full_path), true)
- FileUtils.remove_entry(pages_dir, true)
- end
-
- context 'renaming child' do
- context 'when no projects have pages deployed' do
- it 'moves the repository and uploads', :sidekiq_inline do
- project.pages_metadatum.update!(deployed: false)
- child.update!(path: 'renamed')
-
- expect_project_directories_at('parent/renamed', with_pages: false)
- end
- end
- end
-
- context 'renaming parent' do
- context 'when no projects have pages deployed' do
- it 'moves the repository and uploads', :sidekiq_inline do
- project.pages_metadatum.update!(deployed: false)
- parent.update!(path: 'renamed')
-
- expect_project_directories_at('renamed/child', with_pages: false)
- end
- end
- end
-
- context 'moving from one parent to another' do
- context 'when no projects have pages deployed' do
- it 'moves the repository and uploads', :sidekiq_inline do
- project.pages_metadatum.update!(deployed: false)
- child.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/child', with_pages: false)
- end
- end
- end
-
- context 'moving from having a parent to root' do
- context 'when no projects have pages deployed' do
- it 'moves the repository and uploads', :sidekiq_inline do
- project.pages_metadatum.update!(deployed: false)
- child.update!(parent: nil)
-
- expect_project_directories_at('child', with_pages: false)
- end
- end
- end
-
- context 'moving from root to having a parent' do
- context 'when no projects have pages deployed' do
- it 'moves the repository and uploads', :sidekiq_inline do
- project.pages_metadatum.update!(deployed: false)
- parent.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/parent/child', with_pages: false)
- end
- end
- end
- end
- end
-
- context 'hashed storage' do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
-
- it_behaves_like 'namespace restrictions'
it "repository directory remains unchanged if path changed" do
before_disk_path = project.disk_path
@@ -1430,73 +1271,6 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
- describe '#rm_dir', 'callback' do
- let(:repository_storage_path) do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- Gitlab.config.repositories.storages.default.legacy_disk_path
- end
- end
-
- let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) }
- let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") }
- let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
-
- context 'legacy storage' do
- let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: namespace) }
-
- it 'renames its dirs when deleted' do
- allow(GitlabShellWorker).to receive(:perform_in)
-
- namespace.destroy!
-
- expect(File.exist?(deleted_path_in_dir)).to be(true)
- end
-
- it 'schedules the namespace for deletion' do
- expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
-
- namespace.destroy!
- end
-
- context 'in sub-groups' do
- let(:parent) { create(:group, path: 'parent') }
- let(:child) { create(:group, parent: parent, path: 'child') }
- let!(:project) { create(:project_empty_repo, :legacy_storage, namespace: child) }
- let(:path_in_dir) { File.join(repository_storage_path, 'parent', 'child') }
- let(:deleted_path) { File.join('parent', "child+#{child.id}+deleted") }
- let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
-
- it 'renames its dirs when deleted' do
- allow(GitlabShellWorker).to receive(:perform_in)
-
- child.destroy!
-
- expect(File.exist?(deleted_path_in_dir)).to be(true)
- end
-
- it 'schedules the namespace for deletion' do
- expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
-
- child.destroy!
- end
- end
- end
-
- context 'hashed storage' do
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
-
- it 'has no repositories base directories to remove' do
- expect(GitlabShellWorker).not_to receive(:perform_in)
-
- expect(File.exist?(path_in_dir)).to be(false)
-
- namespace.destroy!
-
- expect(File.exist?(deleted_path_in_dir)).to be(false)
- end
- end
- end
-
describe '.find_by_path_or_name' do
before do
@namespace = create(:namespace, name: 'WoW', path: 'woW')
@@ -2126,6 +1900,48 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
+ describe '#first_auto_devops_config' do
+ let(:instance_autodevops_status) { Gitlab::CurrentSettings.auto_devops_enabled? }
+
+ context 'when namespace.auto_devops_enabled is not set' do
+ let(:group) { create(:group) }
+
+ it 'returns the config values using the instance setting' do
+ expect(group.first_auto_devops_config).to eq({ scope: :instance, status: instance_autodevops_status })
+ end
+
+ context 'when namespace does not have auto_deveops enabled but has a parent' do
+ let!(:parent) { create(:group, auto_devops_enabled: true) }
+ let!(:group) { create(:group, parent: parent) }
+
+ it 'returns the first_auto_devops_config of the parent' do
+ expect(parent).to receive(:first_auto_devops_config).and_call_original
+
+ expect(group.first_auto_devops_config).to eq({ scope: :group, status: true })
+ end
+
+ context 'then the parent is deleted' do
+ before do
+ parent.delete
+ group.reload
+ end
+
+ it 'returns its own config with status based on the instance settings' do
+ expect(group.first_auto_devops_config).to eq({ scope: :instance, status: instance_autodevops_status })
+ end
+ end
+ end
+ end
+
+ context 'when namespace.auto_devops_enable is set' do
+ let(:group) { create(:group, auto_devops_enabled: false) }
+
+ it 'returns the correct config values' do
+ expect(group.first_auto_devops_config).to eq({ scope: :group, status: false })
+ end
+ end
+ end
+
describe '#user_namespace?' do
subject { namespace.user_namespace? }
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 0fc689b9f6c..2b26c73aa7a 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Note, feature_category: :team_planning do
describe 'associations' do
it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:namespace) }
it { is_expected.to belong_to(:noteable).touch(false) }
it { is_expected.to belong_to(:author).class_name('User') }
@@ -32,6 +33,7 @@ RSpec.describe Note, feature_category: :team_planning do
it { is_expected.to validate_length_of(:note).is_at_most(1_000_000) }
it { is_expected.to validate_presence_of(:note) }
it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:namespace) }
context 'when note is on commit' do
before do
@@ -310,6 +312,70 @@ RSpec.describe Note, feature_category: :team_planning do
it { is_expected.to be false }
end
end
+
+ describe '#ensure_namespace_id' do
+ context 'for a project noteable' do
+ let_it_be(:issue) { create(:issue) }
+
+ it 'copies the project_namespace_id of the project' do
+ note = build(:note, noteable: issue, project: issue.project)
+
+ note.valid?
+
+ expect(note.namespace_id).to eq(issue.project.project_namespace_id)
+ end
+
+ context 'when noteable is changed' do
+ let_it_be(:another_issue) { create(:issue) }
+
+ it 'updates the namespace_id' do
+ note = create(:note, noteable: issue, project: issue.project)
+
+ note.noteable = another_issue
+ note.project = another_issue.project
+ note.valid?
+
+ expect(note.namespace_id).to eq(another_issue.project.project_namespace_id)
+ end
+ end
+
+ context 'when project is missing' do
+ it 'does not raise an exception' do
+ note = build(:note, noteable: issue, project: nil)
+
+ expect { note.valid? }.not_to raise_error
+ end
+ end
+ end
+
+ context 'for a personal snippet note' do
+ let_it_be(:snippet) { create(:personal_snippet) }
+
+ it 'copies the personal namespace_id of the author' do
+ note = build(:note, noteable: snippet, project: nil)
+
+ note.valid?
+
+ expect(note.namespace_id).to eq(snippet.author.namespace.id)
+ end
+
+ context 'when snippet author is missing' do
+ it 'does not raise an exception' do
+ note = build(:note, noteable: build(:personal_snippet, author: nil), project: nil)
+
+ expect { note.valid? }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when noteable is missing' do
+ it 'does not raise an exception' do
+ note = build(:note, noteable: nil, project: nil)
+
+ expect { note.valid? }.not_to raise_error
+ end
+ end
+ end
end
describe "Commit notes" do
@@ -1595,53 +1661,30 @@ RSpec.describe Note, feature_category: :team_planning do
end
end
- describe 'expiring ETag cache' do
+ describe 'broadcasting note changes' do
let_it_be(:issue) { create(:issue) }
let(:note) { build(:note, project: issue.project, noteable: issue) }
- def expect_expiration(noteable)
- expect_any_instance_of(Gitlab::EtagCaching::Store)
- .to receive(:touch)
- .with("/#{noteable.project.namespace.to_param}/#{noteable.project.to_param}/noteable/#{noteable.class.name.underscore}/#{noteable.id}/notes")
- end
-
it 'broadcasts an Action Cable event for the noteable' do
expect(Noteable::NotesChannel).to receive(:broadcast_to).with(note.noteable, event: 'updated')
note.save!
end
- context 'when action_cable_notes is disabled' do
- before do
- stub_feature_flags(action_cable_notes: false)
- end
-
- it 'does not broadcast an Action Cable event' do
- expect(Noteable::NotesChannel).not_to receive(:broadcast_to)
-
- note.save!
- end
- end
-
- it "expires cache for note's issue when note is saved" do
- expect_expiration(note.noteable)
-
+ it 'broadcast an Action Cable event for the noteable when note is destroyed' do
note.save!
- end
- it "expires cache for note's issue when note is destroyed" do
- note.save!
- expect_expiration(note.noteable)
+ expect(Noteable::NotesChannel).to receive(:broadcast_to).with(note.noteable, event: 'updated')
note.destroy!
end
- context 'when issuable etag caching is disabled' do
- it 'does not store cache key' do
- allow(note.noteable).to receive(:etag_caching_enabled?).and_return(false)
+ context 'when issuable real_time_notes is disabled' do
+ it 'does not broadcast an Action Cable event' do
+ allow(note.noteable).to receive(:real_time_notes_enabled?).and_return(false)
- expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
+ expect(Noteable::NotesChannel).not_to receive(:broadcast_to)
note.save!
end
@@ -1653,8 +1696,8 @@ RSpec.describe Note, feature_category: :team_planning do
context 'when adding a note to the MR' do
let(:note) { build(:note, noteable: merge_request, project: merge_request.project) }
- it 'expires the MR note etag cache' do
- expect_expiration(merge_request)
+ it 'broadcasts an Action Cable event for the MR' do
+ expect(Noteable::NotesChannel).to receive(:broadcast_to).with(merge_request, event: 'updated')
note.save!
end
@@ -1663,8 +1706,8 @@ RSpec.describe Note, feature_category: :team_planning do
context 'when adding a note to a commit on the MR' do
let(:note) { build(:note_on_commit, commit_id: merge_request.commits.first.id, project: merge_request.project) }
- it 'expires the MR note etag cache' do
- expect_expiration(merge_request)
+ it 'broadcasts an Action Cable event for the MR' do
+ expect(Noteable::NotesChannel).to receive(:broadcast_to).with(merge_request, event: 'updated')
note.save!
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 730a9045d7f..9bf95051730 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -221,4 +221,11 @@ RSpec.describe NotificationSetting do
it { is_expected.to eq([notification_setting_1, notification_setting_3]) }
end
+
+ context 'with loose foreign key on notification_settings.user_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:user) }
+ let!(:model) { create(:notification_setting, user: parent) }
+ end
+ end
end
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index b21a2bf2079..55c82369f33 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe OauthAccessToken do
+RSpec.describe OauthAccessToken, feature_category: :system_access do
let(:app_one) { create(:oauth_application) }
let(:app_two) { create(:oauth_application) }
let(:app_three) { create(:oauth_application) }
@@ -23,6 +23,10 @@ RSpec.describe OauthAccessToken do
end
describe 'Doorkeeper secret storing' do
+ it 'does not have a prefix' do
+ expect(token.plaintext_token).not_to start_with('gl')
+ end
+
it 'stores the token in hashed format' do
expect(token.token).not_to eq(token.plaintext_token)
end
diff --git a/spec/models/organizations/organization_spec.rb b/spec/models/organizations/organization_spec.rb
index 7838fc1c5a4..2f9f04fd3e6 100644
--- a/spec/models/organizations/organization_spec.rb
+++ b/spec/models/organizations/organization_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
it { is_expected.to have_many :groups }
it { is_expected.to have_many(:users).through(:organization_users).inverse_of(:organizations) }
it { is_expected.to have_many(:organization_users).inverse_of(:organization) }
+ it { is_expected.to have_many :projects }
end
describe 'validations' do
diff --git a/spec/models/packages/dependency_link_spec.rb b/spec/models/packages/dependency_link_spec.rb
index d8fde8f5eb3..3022a960c4c 100644
--- a/spec/models/packages/dependency_link_spec.rb
+++ b/spec/models/packages/dependency_link_spec.rb
@@ -1,7 +1,28 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::DependencyLink, type: :model do
+RSpec.describe Packages::DependencyLink, type: :model, feature_category: :package_registry do
+ let_it_be(:package1) { create(:package) }
+ let_it_be(:package2) { create(:package) }
+ let_it_be(:dependency1) { create(:packages_dependency) }
+ let_it_be(:dependency2) { create(:packages_dependency) }
+
+ let_it_be(:dependency_link1) do
+ create(:packages_dependency_link, :dev_dependencies, package: package1, dependency: dependency1)
+ end
+
+ let_it_be(:dependency_link2) do
+ create(:packages_dependency_link, :dependencies, package: package1, dependency: dependency2)
+ end
+
+ let_it_be(:dependency_link3) do
+ create(:packages_dependency_link, :dependencies, package: package2, dependency: dependency1)
+ end
+
+ let_it_be(:dependency_link4) do
+ create(:packages_dependency_link, :dependencies, package: package2, dependency: dependency2)
+ end
+
describe 'relationships' do
it { is_expected.to belong_to(:package).inverse_of(:dependency_links) }
it { is_expected.to belong_to(:dependency).inverse_of(:dependency_links) }
@@ -53,4 +74,49 @@ RSpec.describe Packages::DependencyLink, type: :model do
end
end
end
+
+ describe '.dependency_ids_grouped_by_type' do
+ let(:packages) { Packages::Package.where(id: [package1.id, package2.id]) }
+
+ subject { described_class.dependency_ids_grouped_by_type(packages) }
+
+ it 'aggregates dependencies by type', :aggregate_failures do
+ result = Gitlab::Json.parse(subject.to_json)
+
+ expect(result.count).to eq(2)
+ expect(result).to include(
+ hash_including(
+ 'package_id' => package1.id,
+ 'dependency_ids_by_type' => {
+ '1' => [dependency2.id],
+ '2' => [dependency1.id]
+ }
+ ),
+ hash_including(
+ 'package_id' => package2.id,
+ 'dependency_ids_by_type' => {
+ '1' => [dependency1.id, dependency2.id]
+ }
+ )
+ )
+ end
+ end
+
+ describe '.for_packages' do
+ let(:packages) { Packages::Package.where(id: package1.id) }
+
+ subject { described_class.for_packages(packages) }
+
+ it 'returns dependency links for selected packages' do
+ expect(subject).to contain_exactly(dependency_link1, dependency_link2)
+ end
+ end
+
+ describe '.select_dependency_id' do
+ subject { described_class.select_dependency_id }
+
+ it 'returns only dependency_id' do
+ expect(subject[0].attributes).to eq('dependency_id' => dependency1.id, 'id' => nil)
+ end
+ end
end
diff --git a/spec/models/packages/ml_model/package_spec.rb b/spec/models/packages/ml_model/package_spec.rb
new file mode 100644
index 00000000000..6a327197672
--- /dev/null
+++ b/spec/models/packages/ml_model/package_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::MlModel::Package, feature_category: :mlops do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:ml_model) { create(:ml_model_package, project: project) }
+ let_it_be(:generic_package) { create(:generic_package, project: project) }
+
+ describe 'associations' do
+ it { is_expected.to have_one(:model_version) }
+ end
+
+ describe 'all' do
+ it 'fetches only ml_model packages' do
+ expect(described_class.all).to eq([ml_model])
+ end
+ end
+
+ describe '#valid?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:valid_version) { '1.0.0' }
+ let_it_be(:valid_name) { 'some_model' }
+
+ let(:version) { valid_version }
+ let(:name) { valid_name }
+
+ let(:ml_model) { described_class.new(version: version, name: name, project: project) }
+
+ subject(:errors) do
+ ml_model.validate
+ ml_model.errors
+ end
+
+ it { expect(ml_model).to validate_presence_of(:name) }
+ it { expect(ml_model).to validate_presence_of(:version) }
+
+ it 'validates a valid ml_model package' do
+ expect(errors).to be_empty
+ end
+
+ describe 'name' do
+ where(:ctx, :name) do
+ 'name is blank' | ''
+ 'name is nil' | nil
+ 'name is not valid package name' | '!!()()'
+ 'name is too large' | ('a' * 256)
+ end
+ with_them do
+ it { expect(errors).to include(:name) }
+ end
+ end
+
+ describe 'version' do
+ where(:ctx, :version) do
+ 'version is blank' | ''
+ 'version is nil' | nil
+ 'version is not valid semver' | 'v1.0.0'
+ 'version is too large' | ('a' * 256)
+ end
+ with_them do
+ it { expect(errors).to include(:version) }
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/nuget/metadatum_spec.rb b/spec/models/packages/nuget/metadatum_spec.rb
index e1520c0782f..c8e052baf6f 100644
--- a/spec/models/packages/nuget/metadatum_spec.rb
+++ b/spec/models/packages/nuget/metadatum_spec.rb
@@ -16,18 +16,7 @@ RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :pack
it { is_expected.to validate_length_of(:authors).is_at_most(described_class::MAX_AUTHORS_LENGTH) }
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to validate_length_of(:description).is_at_most(described_class::MAX_DESCRIPTION_LENGTH) }
-
- context 'for normalized_version presence' do
- it { is_expected.to validate_presence_of(:normalized_version) }
-
- context 'when nuget_normalized_version feature flag is disabled' do
- before do
- stub_feature_flags(nuget_normalized_version: false)
- end
-
- it { is_expected.not_to validate_presence_of(:normalized_version) }
- end
- end
+ it { is_expected.to validate_presence_of(:normalized_version) }
%i[license_url project_url icon_url].each do |url|
describe "##{url}" do
@@ -87,16 +76,6 @@ RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :pack
expect(nuget_metadatum.normalized_version).to eq(normalized_version)
end
-
- context 'when the nuget_normalized_version feature flag is disabled' do
- before do
- stub_feature_flags(nuget_normalized_version: false)
- end
-
- it 'does not save the normalized version' do
- expect(nuget_metadatum.normalized_version).not_to eq(normalized_version)
- end
- end
end
end
end
diff --git a/spec/models/packages/nuget/symbol_spec.rb b/spec/models/packages/nuget/symbol_spec.rb
new file mode 100644
index 00000000000..52e95c11939
--- /dev/null
+++ b/spec/models/packages/nuget/symbol_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Symbol, type: :model, feature_category: :package_registry do
+ subject(:symbol) { create(:nuget_symbol) }
+
+ it { is_expected.to be_a FileStoreMounter }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package).inverse_of(:nuget_symbols) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:file) }
+ it { is_expected.to validate_presence_of(:file_path) }
+ it { is_expected.to validate_presence_of(:signature) }
+ it { is_expected.to validate_presence_of(:object_storage_key) }
+ it { is_expected.to validate_presence_of(:size) }
+ it { is_expected.to validate_uniqueness_of(:signature).scoped_to(:file_path) }
+ it { is_expected.to validate_uniqueness_of(:object_storage_key).case_insensitive }
+ end
+
+ describe 'delegations' do
+ it { is_expected.to delegate_method(:project_id).to(:package) }
+ end
+
+ describe 'callbacks' do
+ describe 'before_validation' do
+ describe '#set_object_storage_key' do
+ context 'when signature and project_id are present' do
+ it 'sets the object_storage_key' do
+ expected_key = Gitlab::HashedPath.new(
+ 'packages', 'nuget', symbol.package_id, 'symbols', OpenSSL::Digest::SHA256.hexdigest(symbol.signature),
+ root_hash: symbol.project_id
+ ).to_s
+
+ symbol.valid?
+
+ expect(symbol.object_storage_key).to eq(expected_key)
+ end
+ end
+
+ context 'when signature is not present' do
+ subject(:symbol) { build(:nuget_symbol, signature: nil) }
+
+ it 'does not set the object_storage_key' do
+ symbol.valid?
+
+ expect(symbol.object_storage_key).to be_nil
+ end
+ end
+
+ context 'when project_id is not present' do
+ subject(:symbol) { build(:nuget_symbol) }
+
+ before do
+ allow(symbol).to receive(:project_id).and_return(nil)
+ end
+
+ it 'does not set the object_storage_key' do
+ symbol.valid?
+
+ expect(symbol.object_storage_key).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 381b5af117e..e113218e828 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to have_one(:rubygems_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:npm_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:rpm_metadatum).inverse_of(:package) }
+ it { is_expected.to have_many(:nuget_symbols).inverse_of(:package) }
end
describe '.with_debian_codename' do
@@ -875,14 +876,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
subject { described_class.with_npm_scope('test') }
it { is_expected.to contain_exactly(package1) }
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- end
-
- it { is_expected.to contain_exactly(package1) }
- end
end
describe '.without_nuget_temporary_name' do
@@ -1505,4 +1498,38 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
end
+
+ describe 'inheritance' do
+ let_it_be(:project) { create(:project) }
+
+ let(:format) { "" }
+ let(:package) { create("#{format}_package", project: project) }
+ let(:package_id) { package.id }
+
+ subject { described_class.find_by(id: package_id).class }
+
+ described_class
+ .package_types
+ .keys
+ .map(&:to_sym)
+ .each do |package_format|
+ if described_class.inheritance_column_to_class_map[package_format].nil?
+ context "for package format #{package_format}" do
+ let(:format) { package_format }
+
+ it 'maps to Packages::Package' do
+ is_expected.to eq(described_class)
+ end
+ end
+ else
+ context "for package format #{package_format}" do
+ let(:format) { package_format }
+
+ it 'maps to the correct class' do
+ is_expected.to eq(described_class.inheritance_column_to_class_map[package_format].constantize)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/packages/protection/rule_spec.rb b/spec/models/packages/protection/rule_spec.rb
new file mode 100644
index 00000000000..b368687e6d8
--- /dev/null
+++ b/spec/models/packages/protection/rule_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :package_registry do
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project).inverse_of(:package_protection_rules) }
+ end
+
+ describe 'enums' do
+ describe '#package_type' do
+ it { is_expected.to define_enum_for(:package_type).with_values(npm: Packages::Package.package_types[:npm]) }
+ end
+ end
+
+ describe 'validations' do
+ subject { build(:package_protection_rule) }
+
+ describe '#package_name_pattern' do
+ it { is_expected.to validate_presence_of(:package_name_pattern) }
+ it { is_expected.to validate_uniqueness_of(:package_name_pattern).scoped_to(:project_id, :package_type) }
+ it { is_expected.to validate_length_of(:package_name_pattern).is_at_most(255) }
+ end
+
+ describe '#package_type' do
+ it { is_expected.to validate_presence_of(:package_type) }
+ end
+
+ describe '#push_protected_up_to_access_level' do
+ it { is_expected.to validate_presence_of(:push_protected_up_to_access_level) }
+
+ it {
+ is_expected.to validate_inclusion_of(:push_protected_up_to_access_level).in_array([Gitlab::Access::DEVELOPER,
+ Gitlab::Access::MAINTAINER, Gitlab::Access::OWNER])
+ }
+ end
+ end
+end
diff --git a/spec/models/pages/virtual_domain_spec.rb b/spec/models/pages/virtual_domain_spec.rb
index b5a421295b2..02e3fd67f2d 100644
--- a/spec/models/pages/virtual_domain_spec.rb
+++ b/spec/models/pages/virtual_domain_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Pages::VirtualDomain do
+RSpec.describe Pages::VirtualDomain, feature_category: :pages do
describe '#certificate and #key pair' do
let(:domain) { nil }
let(:project) { instance_double(Project) }
@@ -25,6 +25,8 @@ RSpec.describe Pages::VirtualDomain do
end
describe '#lookup_paths' do
+ let(:domain) { nil }
+ let(:trim_prefix) { nil }
let(:project_a) { instance_double(Project) }
let(:project_b) { instance_double(Project) }
let(:project_c) { instance_double(Project) }
@@ -32,44 +34,43 @@ RSpec.describe Pages::VirtualDomain do
let(:pages_lookup_path_b) { instance_double(Pages::LookupPath, prefix: 'bbb', source: { type: 'zip', path: 'https://example.com' }) }
let(:pages_lookup_path_without_source) { instance_double(Pages::LookupPath, prefix: 'ccc', source: nil) }
+ subject(:virtual_domain) do
+ described_class.new(projects: project_list, domain: domain, trim_prefix: trim_prefix)
+ end
+
+ before do
+ allow(Pages::LookupPath)
+ .to receive(:new)
+ .with(project_a, domain: domain, trim_prefix: trim_prefix)
+ .and_return(pages_lookup_path_a)
+
+ allow(Pages::LookupPath)
+ .to receive(:new)
+ .with(project_b, domain: domain, trim_prefix: trim_prefix)
+ .and_return(pages_lookup_path_b)
+
+ allow(Pages::LookupPath)
+ .to receive(:new)
+ .with(project_c, domain: domain, trim_prefix: trim_prefix)
+ .and_return(pages_lookup_path_without_source)
+ end
+
context 'when there is pages domain provided' do
let(:domain) { instance_double(PagesDomain) }
-
- subject(:virtual_domain) { described_class.new(projects: [project_a, project_b, project_c], domain: domain) }
+ let(:project_list) { [project_a, project_b, project_c] }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
- expect(project_a).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_a)
- expect(project_b).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_b)
- expect(project_c).to receive(:pages_lookup_path).with(domain: domain, trim_prefix: nil).and_return(pages_lookup_path_without_source)
-
expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
end
end
context 'when there is trim_prefix provided' do
- subject(:virtual_domain) { described_class.new(projects: [project_a, project_b], trim_prefix: 'group/') }
+ let(:trim_prefix) { 'group/' }
+ let(:project_list) { [project_a, project_b] }
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
- expect(project_a).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_a)
- expect(project_b).to receive(:pages_lookup_path).with(trim_prefix: 'group/', domain: nil).and_return(pages_lookup_path_b)
-
expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
end
end
end
-
- describe '#cache_key' do
- it 'returns the cache key based in the given cache_control' do
- cache_control = instance_double(::Gitlab::Pages::CacheControl, cache_key: 'cache_key')
- virtual_domain = described_class.new(projects: [instance_double(Project)], cache: cache_control)
-
- expect(virtual_domain.cache_key).to eq('cache_key')
- end
-
- it 'returns nil when no cache_control is given' do
- virtual_domain = described_class.new(projects: [instance_double(Project)])
-
- expect(virtual_domain.cache_key).to be_nil
- end
- end
end
diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb
index bff69485e43..916197fe5e9 100644
--- a/spec/models/pages_deployment_spec.rb
+++ b/spec/models/pages_deployment_spec.rb
@@ -81,6 +81,57 @@ RSpec.describe PagesDeployment, feature_category: :pages do
end
end
+ describe '.deactivate_deployments_older_than', :freeze_time do
+ let!(:other_project_deployment) do
+ create(:pages_deployment)
+ end
+
+ let!(:other_path_prefix_deployment) do
+ create(:pages_deployment, project: project, path_prefix: 'other')
+ end
+
+ let!(:deactivated_deployment) do
+ create(:pages_deployment, project: project, deleted_at: 5.minutes.ago)
+ end
+
+ it 'updates only older deployments for the same project and path prefix' do
+ deployment1 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+ deployment2 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+ deployment3 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+
+ expect { described_class.deactivate_deployments_older_than(deployment2) }
+ .to change { deployment1.reload.deleted_at }
+ .from(nil).to(Time.zone.now)
+ .and change { deployment1.reload.updated_at }
+ .to(Time.zone.now)
+
+ expect(deployment2.reload.deleted_at).to be_nil
+ expect(deployment3.reload.deleted_at).to be_nil
+ expect(other_project_deployment.deleted_at).to be_nil
+ expect(other_path_prefix_deployment.reload.deleted_at).to be_nil
+ expect(deactivated_deployment.reload.deleted_at).to eq(5.minutes.ago)
+ end
+
+ it 'updates only older deployments for the same project with the given time' do
+ deployment1 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+ deployment2 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+ deployment3 = create(:pages_deployment, project: project, updated_at: 5.minutes.ago)
+ time = 30.minutes.from_now
+
+ expect { described_class.deactivate_deployments_older_than(deployment2, time: time) }
+ .to change { deployment1.reload.deleted_at }
+ .from(nil).to(time)
+ .and change { deployment1.reload.updated_at }
+ .to(Time.zone.now)
+
+ expect(deployment2.reload.deleted_at).to be_nil
+ expect(deployment3.reload.deleted_at).to be_nil
+ expect(other_project_deployment.deleted_at).to be_nil
+ expect(other_path_prefix_deployment.reload.deleted_at).to be_nil
+ expect(deactivated_deployment.reload.deleted_at).to eq(5.minutes.ago)
+ end
+ end
+
describe '#migrated?' do
it 'returns false for normal deployment' do
deployment = create(:pages_deployment)
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 3030756a413..cd740bca502 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -201,6 +201,17 @@ RSpec.describe PagesDomain do
describe 'validations' do
it { is_expected.to validate_presence_of(:verification_code) }
+
+ context 'when validating max certificate key length' do
+ it 'validates the certificate key length' do
+ valid_domain = build(:pages_domain, :key_length_8192)
+ expect(valid_domain).to be_valid
+
+ invalid_domain = build(:pages_domain, :extra_long_key)
+ expect(invalid_domain).to be_invalid
+ expect(invalid_domain.errors[:key]).to include('Certificate Key is too long. (Max 8192 bytes)')
+ end
+ end
end
describe 'default values' do
diff --git a/spec/models/performance_monitoring/prometheus_metric_spec.rb b/spec/models/performance_monitoring/prometheus_metric_spec.rb
deleted file mode 100644
index 58bb59793cf..00000000000
--- a/spec/models/performance_monitoring/prometheus_metric_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PerformanceMonitoring::PrometheusMetric do
- let(:json_content) do
- {
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => "http_requests_total"
- }
- end
-
- describe '.from_json' do
- subject { described_class.from_json(json_content) }
-
- it 'creates a PrometheusMetric object' do
- expect(subject).to be_a described_class
- expect(subject.id).to eq(json_content['id'])
- expect(subject.unit).to eq(json_content['unit'])
- expect(subject.label).to eq(json_content['label'])
- expect(subject.query_range).to eq(json_content['query_range'])
- end
-
- describe 'validations' do
- context 'json_content is not a hash' do
- let(:json_content) { nil }
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when unit is missing' do
- before do
- json_content['unit'] = nil
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when query and query_range is missing' do
- before do
- json_content['query_range'] = nil
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when query_range is missing but query is available' do
- before do
- json_content['query_range'] = nil
- json_content['query'] = 'http_requests_total'
- end
-
- subject { described_class.from_json(json_content) }
-
- it { is_expected.to be_valid }
- end
- end
- end
-end
diff --git a/spec/models/performance_monitoring/prometheus_panel_group_spec.rb b/spec/models/performance_monitoring/prometheus_panel_group_spec.rb
deleted file mode 100644
index 497f80483eb..00000000000
--- a/spec/models/performance_monitoring/prometheus_panel_group_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PerformanceMonitoring::PrometheusPanelGroup do
- let(:json_content) do
- {
- "group" => "Group Title",
- "panels" => [{
- "type" => "area-chart",
- "title" => "Chart Title",
- "y_label" => "Y-Axis",
- "metrics" => [{
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => "http_requests_total"
- }]
- }]
- }
- end
-
- describe '.from_json' do
- subject { described_class.from_json(json_content) }
-
- it 'creates a PrometheusPanelGroup object' do
- expect(subject).to be_a described_class
- expect(subject.group).to eq(json_content['group'])
- expect(subject.panels).to all(be_a PerformanceMonitoring::PrometheusPanel)
- end
-
- describe 'validations' do
- context 'json_content is not a hash' do
- let(:json_content) { nil }
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when group is missing' do
- before do
- json_content.delete('group')
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when panels are missing' do
- before do
- json_content['panels'] = []
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
- end
- end
-end
diff --git a/spec/models/performance_monitoring/prometheus_panel_spec.rb b/spec/models/performance_monitoring/prometheus_panel_spec.rb
deleted file mode 100644
index 42dcbbdb8e0..00000000000
--- a/spec/models/performance_monitoring/prometheus_panel_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PerformanceMonitoring::PrometheusPanel do
- let(:json_content) do
- {
- "max_value" => 1,
- "type" => "area-chart",
- "title" => "Chart Title",
- "y_label" => "Y-Axis",
- "weight" => 1,
- "metrics" => [{
- "id" => "metric_of_ages",
- "unit" => "count",
- "label" => "Metric of Ages",
- "query_range" => "http_requests_total"
- }]
- }
- end
-
- describe '#new' do
- it 'accepts old schema format' do
- expect { described_class.new(json_content) }.not_to raise_error
- end
-
- it 'accepts new schema format' do
- expect { described_class.new(json_content.merge("y_axis" => { "precision" => 0 })) }.not_to raise_error
- end
- end
-
- describe '.from_json' do
- subject { described_class.from_json(json_content) }
-
- it 'creates a PrometheusPanelGroup object' do
- expect(subject).to be_a described_class
- expect(subject.type).to eq(json_content['type'])
- expect(subject.title).to eq(json_content['title'])
- expect(subject.y_label).to eq(json_content['y_label'])
- expect(subject.weight).to eq(json_content['weight'])
- expect(subject.metrics).to all(be_a PerformanceMonitoring::PrometheusMetric)
- end
-
- describe 'validations' do
- context 'json_content is not a hash' do
- let(:json_content) { nil }
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when title is missing' do
- before do
- json_content['title'] = nil
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
-
- context 'when metrics are missing' do
- before do
- json_content.delete('metrics')
- end
-
- subject { described_class.from_json(json_content) }
-
- it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
- end
- end
- end
-
- describe '.id' do
- it 'returns hexdigest of group_title, type and title as the panel id' do
- group_title = 'Business Group'
- panel_type = 'area-chart'
- panel_title = 'New feature requests made'
-
- expect(Digest::SHA2).to receive(:hexdigest).with("#{group_title}#{panel_type}#{panel_title}").and_return('hexdigest')
- expect(described_class.new(title: panel_title, type: panel_type).id(group_title)).to eql 'hexdigest'
- end
- end
-end
diff --git a/spec/models/plan_spec.rb b/spec/models/plan_spec.rb
index 73e88a17e24..fe3365ca78f 100644
--- a/spec/models/plan_spec.rb
+++ b/spec/models/plan_spec.rb
@@ -3,6 +3,18 @@
require 'spec_helper'
RSpec.describe Plan do
+ describe 'scopes', :aggregate_failures do
+ let_it_be(:default_plan) { create(:default_plan) }
+
+ describe '.by_name' do
+ it 'returns plans by their name' do
+ expect(described_class.by_name('default')).to match_array([default_plan])
+ expect(described_class.by_name(%w[default unknown])).to match_array([default_plan])
+ expect(described_class.by_name(nil)).to be_empty
+ end
+ end
+ end
+
describe '#default?' do
subject { plan.default? }
diff --git a/spec/models/pool_repository_spec.rb b/spec/models/pool_repository_spec.rb
index 93c1e59458d..bafda406774 100644
--- a/spec/models/pool_repository_spec.rb
+++ b/spec/models/pool_repository_spec.rb
@@ -13,15 +13,17 @@ RSpec.describe PoolRepository, feature_category: :source_code_management do
let!(:pool_repository) { create(:pool_repository) }
it { is_expected.to validate_presence_of(:shard) }
- it { is_expected.to validate_presence_of(:source_project) }
end
describe 'scopes' do
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project) }
let_it_be(:new_shard) { create(:shard, name: 'new') }
- let_it_be(:pool_repository1) { create(:pool_repository, source_project: project1) }
- let_it_be(:pool_repository2) { create(:pool_repository, source_project: project1, shard: new_shard) }
+ let_it_be(:pool_repository1) { create(:pool_repository, source_project: project1, disk_path: 'disk_path') }
+ let_it_be(:pool_repository2) do
+ create(:pool_repository, source_project: project1, disk_path: 'disk_path', shard: new_shard)
+ end
+
let_it_be(:another_pool_repository) { create(:pool_repository, source_project: project2) }
describe '.by_source_project' do
@@ -32,8 +34,8 @@ RSpec.describe PoolRepository, feature_category: :source_code_management do
end
end
- describe '.by_source_project_and_shard_name' do
- subject { described_class.by_source_project_and_shard_name(project1, new_shard.name) }
+ describe '.by_disk_path_and_shard_name' do
+ subject { described_class.by_disk_path_and_shard_name('disk_path', new_shard.name) }
it 'returns only a requested pool repository' do
is_expected.to match_array([pool_repository2])
@@ -91,4 +93,38 @@ RSpec.describe PoolRepository, feature_category: :source_code_management do
end
end
end
+
+ describe '#object_pool' do
+ subject { pool.object_pool }
+
+ let(:pool) { build(:pool_repository, :ready, source_project: project, disk_path: disk_path) }
+ let(:project) { build(:project) }
+ let(:disk_path) { 'disk_path' }
+
+ it 'returns an object pool instance' do
+ is_expected.to be_a_kind_of(Gitlab::Git::ObjectPool)
+
+ is_expected.to have_attributes(
+ storage: pool.shard.name,
+ relative_path: "#{pool.disk_path}.git",
+ source_repository: pool.source_project.repository.raw,
+ gl_project_path: pool.source_project.full_path
+ )
+ end
+
+ context 'when source project is missing' do
+ let(:project) { nil }
+
+ it 'returns an object pool instance' do
+ is_expected.to be_a_kind_of(Gitlab::Git::ObjectPool)
+
+ is_expected.to have_attributes(
+ storage: pool.shard.name,
+ relative_path: "#{pool.disk_path}.git",
+ source_repository: nil,
+ gl_project_path: nil
+ )
+ end
+ end
+ end
end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 2ba7f5c4ca4..9fed05342aa 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -3,6 +3,23 @@
require 'spec_helper'
RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
+ describe 'create' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+
+ let(:project_auth) do
+ build(
+ :project_authorization,
+ user: user,
+ project: project_1
+ )
+ end
+
+ it 'sets is_unique' do
+ expect { project_auth.save! }.to change { project_auth.is_unique }.to(true)
+ end
+ end
+
describe 'unique user, project authorizations' do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }
@@ -65,6 +82,26 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.all_values) }
end
+ describe 'scopes' do
+ describe '.non_guests' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_original_owner_authorization) { project.owner.project_authorizations.first }
+ let_it_be(:project_authorization_guest) { create(:project_authorization, :guest, project: project) }
+ let_it_be(:project_authorization_reporter) { create(:project_authorization, :reporter, project: project) }
+ let_it_be(:project_authorization_developer) { create(:project_authorization, :developer, project: project) }
+ let_it_be(:project_authorization_maintainer) { create(:project_authorization, :maintainer, project: project) }
+ let_it_be(:project_authorization_owner) { create(:project_authorization, :owner, project: project) }
+
+ it 'returns all records which are greater than Guests access' do
+ expect(described_class.non_guests.map(&:attributes)).to match_array([
+ project_authorization_reporter, project_authorization_developer,
+ project_authorization_maintainer, project_authorization_owner,
+ project_original_owner_authorization
+ ].map(&:attributes))
+ end
+ end
+ end
+
describe '.insert_all' do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }
diff --git a/spec/models/project_authorizations/changes_spec.rb b/spec/models/project_authorizations/changes_spec.rb
index d0718153d16..5f4dd963fb3 100644
--- a/spec/models/project_authorizations/changes_spec.rb
+++ b/spec/models/project_authorizations/changes_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:user_id, :project_id,
- :access_level)).to match_array(authorizations_to_add.map(&:values))
+ :access_level, :is_unique)).to match_array(authorizations_to_add.map(&:values))
end
end
@@ -101,7 +101,13 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:user_id, :project_id,
- :access_level)).to match_array(authorizations_to_add.map(&:values))
+ :access_level, :is_unique)).to match_array(authorizations_to_add.map(&:values))
+ end
+
+ it 'writes is_unique' do
+ apply_project_authorization_changes
+
+ expect(user.project_authorizations.pluck(:is_unique)).to all(be(true))
end
it_behaves_like 'logs the detail', batch_size: 2
diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb
index 0a818147bfc..1c53f6eae52 100644
--- a/spec/models/project_ci_cd_setting_spec.rb
+++ b/spec/models/project_ci_cd_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectCiCdSetting do
+RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do
using RSpec::Parameterized::TableSyntax
describe 'validations' do
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 48c9567ebb3..39e77df1900 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -437,4 +437,24 @@ RSpec.describe ProjectFeature, feature_category: :groups_and_projects do
end
end
# rubocop:enable Gitlab/FeatureAvailableUsage
+
+ describe '#private?' do
+ where(:merge_requests_access_level, :expected_value) do
+ ProjectFeature::PUBLIC | false
+ ProjectFeature::ENABLED | false
+ ProjectFeature::PRIVATE | true
+ end
+
+ with_them do
+ let(:project) { build_stubbed(:project) }
+
+ subject { project.project_feature.private?(:merge_requests) }
+
+ before do
+ project.project_feature.merge_requests_access_level = merge_requests_access_level
+ end
+
+ it { is_expected.to be(expected_value) }
+ end
+ end
end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 7ceb4931c4f..10f4791b216 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -125,6 +125,14 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
end
end
+ describe '#completed?' do
+ it { expect(described_class.new(status: :failed)).to be_completed }
+ it { expect(described_class.new(status: :finished)).to be_completed }
+ it { expect(described_class.new(status: :canceled)).to be_completed }
+ it { expect(described_class.new(status: :scheduled)).not_to be_completed }
+ it { expect(described_class.new(status: :started)).not_to be_completed }
+ end
+
describe '#expire_etag_cache' do
context 'when project import type has realtime changes endpoint' do
before do
diff --git a/spec/models/project_metrics_setting_spec.rb b/spec/models/project_metrics_setting_spec.rb
deleted file mode 100644
index 6639f9cb208..00000000000
--- a/spec/models/project_metrics_setting_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProjectMetricsSetting do
- describe 'Associations' do
- it { is_expected.to belong_to(:project) }
- end
-
- describe 'Validations' do
- context 'when external_dashboard_url is over 255 chars' do
- before do
- subject.external_dashboard_url = 'https://' + 'a' * 250
- end
-
- it 'fails validation' do
- expect(subject).not_to be_valid
- expect(subject.errors.messages[:external_dashboard_url])
- .to include('is too long (maximum is 255 characters)')
- end
- end
-
- context 'with unsafe url' do
- before do
- subject.external_dashboard_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
- end
-
- it { is_expected.to be_invalid }
- end
-
- context 'non ascii chars in external_dashboard_url' do
- before do
- subject.external_dashboard_url = 'http://gitlab.com/api/0/projects/project1/something€'
- end
-
- it { is_expected.to be_invalid }
- end
-
- context 'internal url in external_dashboard_url' do
- before do
- subject.external_dashboard_url = 'http://192.168.1.1'
- end
-
- it { is_expected.to be_valid }
- end
-
- context 'dashboard_timezone' do
- it { is_expected.to define_enum_for(:dashboard_timezone).with_values({ local: 0, utc: 1 }) }
-
- it 'defaults to local' do
- expect(subject.dashboard_timezone).to eq('local')
- end
- end
- end
-
- describe '#dashboard_timezone=' do
- it 'downcases string' do
- subject.dashboard_timezone = 'UTC'
-
- expect(subject.dashboard_timezone).to eq('utc')
- end
- end
-end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 5d622b8eccd..aedfc7fca53 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -17,12 +17,14 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it_behaves_like 'ensures runners_token is prefixed', :project
describe 'associations' do
+ it { is_expected.to belong_to(:organization) }
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
it { is_expected.to belong_to(:project_namespace).class_name('Namespaces::ProjectNamespace').with_foreign_key('project_namespace_id').inverse_of(:project) }
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to belong_to(:pool_repository) }
it { is_expected.to have_many(:users) }
+ it { is_expected.to have_many(:maintainers).through(:project_members).source(:user).conditions(members: { access_level: Gitlab::Access::MAINTAINER }) }
it { is_expected.to have_many(:events) }
it { is_expected.to have_many(:merge_requests) }
it { is_expected.to have_many(:merge_request_metrics).class_name('MergeRequest::Metrics') }
@@ -139,11 +141,9 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:source_pipelines) }
it { is_expected.to have_many(:prometheus_alert_events) }
- it { is_expected.to have_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:alert_management_alerts) }
it { is_expected.to have_many(:alert_management_http_integrations) }
it { is_expected.to have_many(:jira_imports) }
- it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:project) }
it { is_expected.to have_many(:repository_storage_moves) }
it { is_expected.to have_many(:reviews).inverse_of(:project) }
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
@@ -152,6 +152,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
it { is_expected.to have_many(:npm_metadata_caches).class_name('Packages::Npm::MetadataCache') }
it { is_expected.to have_one(:packages_cleanup_policy).class_name('Packages::Cleanup::Policy').inverse_of(:project) }
+ it { is_expected.to have_many(:package_protection_rules).class_name('Packages::Protection::Rule').inverse_of(:project) }
it { is_expected.to have_many(:pipeline_artifacts).dependent(:restrict_with_error) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
it { is_expected.to have_many(:timelogs) }
@@ -224,6 +225,23 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.lfs_objects.to_a).to eql([lfs_object])
end
+ describe 'maintainers association' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer1) { create(:user) }
+ let_it_be(:maintainer2) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ before do
+ project.add_maintainer(maintainer1)
+ project.add_maintainer(maintainer2)
+ project.add_reporter(reporter)
+ end
+
+ it 'returns only maintainers' do
+ expect(project.maintainers).to match_array([maintainer1, maintainer2])
+ end
+ end
+
context 'after initialized' do
it "has a project_feature" do
expect(described_class.new.project_feature).to be_present
@@ -1140,6 +1158,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
merge_pipelines_enabled
merge_trains_enabled
auto_rollback_enabled
+ merge_trains_skip_train_allowed
)
end
end
@@ -2380,7 +2399,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#service_desk_address' do
+ describe '#service_desk_address', feature_category: :service_desk do
let_it_be(:project, reload: true) { create(:project, service_desk_enabled: true) }
subject { project.service_desk_address }
@@ -2424,7 +2443,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
context 'when project_key is set' do
- it 'returns custom address including the project_key' do
+ it 'returns Service Desk alias address including the project_key' do
create(:service_desk_setting, project: project, project_key: 'key1')
expect(subject).to eq("foo+#{project.full_path_slug}-key1@bar.com")
@@ -2432,11 +2451,35 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
context 'when project_key is not set' do
- it 'returns custom address including the project full path' do
+ it 'returns Service Desk alias address including the project full path' do
expect(subject).to eq("foo+#{project.full_path_slug}-#{project.project_id}-issue-@bar.com")
end
end
end
+
+ context 'when custom email is enabled' do
+ let(:custom_email) { 'support@example.com' }
+
+ before do
+ setting = ServiceDeskSetting.new(project: project, custom_email: custom_email, custom_email_enabled: true)
+ allow(project).to receive(:service_desk_setting).and_return(setting)
+ end
+
+ it 'returns custom email address' do
+ expect(subject).to eq(custom_email)
+ end
+
+ context 'when feature flag service_desk_custom_email is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_email: false)
+ end
+
+ it 'returns custom email address' do
+ # Don't check for a specific value. Just make sure it's not the custom email
+ expect(subject).not_to eq(custom_email)
+ end
+ end
+ end
end
describe '.with_service_desk_key' do
@@ -3882,16 +3925,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#mark_primary_write_location' do
- let(:project) { create(:project) }
-
- it 'marks the location with project ID' do
- expect(ApplicationRecord.sticking).to receive(:mark_primary_write_location).with(:project, project.id)
-
- project.mark_primary_write_location
- end
- end
-
describe '#mark_stuck_remote_mirrors_as_failed!' do
it 'fails stuck remote mirrors' do
project = create(:project, :repository, :remote_mirror)
@@ -5086,7 +5119,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
subject { described_class.wrap_with_cte(projects) }
it 'wrapped query matches original' do
- expect(subject.to_sql).to match(/^WITH "projects_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ expect(subject.to_sql).to match(/^WITH "projects_cte" AS MATERIALIZED/)
expect(subject).to match_array(projects)
end
end
@@ -6986,8 +7019,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
let_it_be_with_reload(:project) { create(:project, :empty_repo) }
let_it_be(:shard_to) { create(:shard, name: 'test_second_storage') }
- let!(:pool1) { create(:pool_repository, source_project: project) }
- let!(:pool2) { create(:pool_repository, shard: shard_to, source_project: project) }
+ let(:disk_path1) { '@pool/aa/bb' }
+ let(:disk_path2) { disk_path1 }
+
+ let!(:pool1) { create(:pool_repository, disk_path: disk_path1, source_project: project) }
+ let!(:pool2) { create(:pool_repository, disk_path: disk_path2, shard: shard_to, source_project: project) }
let(:project_pool) { pool1 }
let(:repository_storage) { shard_to.name }
@@ -7045,6 +7081,14 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect { swap_pool_repository! }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'when pool repository has a different disk path' do
+ let(:disk_path2) { '@pool/different' }
+
+ it 'raises record not found error' do
+ expect { swap_pool_repository! }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
end
describe '#leave_pool_repository' do
@@ -7412,17 +7456,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
- describe '#pages_lookup_path' do
- let(:pages_domain) { build(:pages_domain) }
- let(:project) { build(:project) }
-
- it 'returns instance of Pages::LookupPath' do
- expect(Pages::LookupPath).to receive(:new).with(project, domain: pages_domain, trim_prefix: 'mygroup').and_call_original
-
- expect(project.pages_lookup_path(domain: pages_domain, trim_prefix: 'mygroup')).to be_a(Pages::LookupPath)
- end
- end
-
describe '.with_pages_deployed' do
it 'returns only projects that have pages deployed' do
_project_without_pages = create(:project)
@@ -8088,14 +8121,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.not_to include(user) }
end
- describe "#metrics_setting" do
- let(:project) { build(:project) }
-
- it 'creates setting if it does not exist' do
- expect(project.metrics_setting).to be_an_instance_of(ProjectMetricsSetting)
- end
- end
-
describe '#enabled_group_deploy_keys' do
let_it_be(:project) { create(:project) }
@@ -9170,6 +9195,20 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#supports_lock_on_merge?' do
+ it_behaves_like 'checks self (project) and root ancestor feature flag' do
+ let(:feature_flag) { :enforce_locked_labels_on_merge }
+ let(:feature_flag_method) { :supports_lock_on_merge? }
+ end
+ end
+
+ context 'with loose foreign key on organization_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:organization) }
+ let_it_be(:model) { create(:project, organization: parent) }
+ end
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index ea229ddf31f..af7457c78e2 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2113,6 +2113,17 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ describe '#update_refs' do
+ let(:expected_return) { 'updated' }
+ let(:params) { double }
+
+ it 'calls the update_refs method on the raw repo with the same params' do
+ expect(repository.raw_repository).to receive(:update_refs).with(params).and_return('updated')
+
+ expect(repository.update_refs(params)).to eq(expected_return)
+ end
+ end
+
describe '#ff_merge' do
let(:target_branch) { 'ff-target' }
let(:merge_request) do
@@ -2424,7 +2435,6 @@ RSpec.describe Repository, feature_category: :source_code_management do
:has_visible_content?,
:issue_template_names_hash,
:merge_request_template_names_hash,
- :user_defined_metrics_dashboard_paths,
:xcode_project?,
:has_ambiguous_refs?
]
@@ -3822,9 +3832,24 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ context 'when one of the param is nonexistant' do
+ it 'returns nil' do
+ expect(repository.get_patch_id('HEAD', "f" * 40)).to be_nil
+ end
+ end
+
context 'when two revisions are the same' do
- it 'raises an Gitlab::Git::CommandError error' do
- expect { repository.get_patch_id('HEAD', 'HEAD') }.to raise_error(Gitlab::Git::CommandError)
+ it 'returns nil' do
+ expect(repository.get_patch_id('HEAD', 'HEAD')).to be_nil
+ end
+ end
+
+ context 'when a Gitlab::Git::CommandError is raised' do
+ it 'returns nil' do
+ expect(repository.raw_repository)
+ .to receive(:get_patch_id).and_raise(Gitlab::Git::CommandError)
+
+ expect(repository.get_patch_id('HEAD', "f" * 40)).to be_nil
end
end
end
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index eb28010d57f..8cc89578e0e 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -52,24 +52,17 @@ RSpec.describe ResourceLabelEvent, feature_category: :team_planning, type: :mode
end
context 'callbacks' do
- describe '#expire_etag_cache' do
- def expect_expiration(issue)
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
- expect(instance).to receive(:touch)
- .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
- end
- end
-
- it 'expires resource note etag cache on event save' do
- expect_expiration(subject.issuable)
+ describe '#broadcast_notes_changed' do
+ it 'broadcasts note change on event save' do
+ expect(subject.issuable).to receive(:broadcast_notes_changed)
subject.save!
end
- it 'expires resource note etag cache on event destroy' do
+ it 'broadcasts note change on event destroy' do
subject.save!
- expect_expiration(subject.issuable)
+ expect(subject.issuable).to receive(:broadcast_notes_changed)
subject.destroy!
end
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index de101107268..5bd8b664d23 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -58,11 +58,13 @@ RSpec.describe ResourceStateEvent, feature_category: :team_planning, type: :mode
close_issue
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLOSED }
+ it_behaves_like 'internal event tracking' do
+ subject(:service_action) { close_issue }
+
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLOSED }
let(:project) { issue.project }
+ let(:namespace) { issue.project.namespace }
let(:user) { issue.author }
- subject(:service_action) { close_issue }
end
end
@@ -81,11 +83,13 @@ RSpec.describe ResourceStateEvent, feature_category: :team_planning, type: :mode
reopen_issue
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_REOPENED }
+ it_behaves_like 'internal event tracking' do
+ subject(:service_action) { reopen_issue }
+
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_REOPENED }
let(:project) { issue.project }
let(:user) { issue.author }
- subject(:service_action) { reopen_issue }
+ let(:namespace) { issue.project.namespace }
end
end
diff --git a/spec/models/review_spec.rb b/spec/models/review_spec.rb
index 2683dc93a4b..75cdea5bca7 100644
--- a/spec/models/review_spec.rb
+++ b/spec/models/review_spec.rb
@@ -41,4 +41,23 @@ RSpec.describe Review do
expect(review.participants).to include(review.author)
end
end
+
+ describe '#from_merge_request_author?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+ let(:review) { build_stubbed(:review, merge_request: merge_request, author: author) }
+
+ subject(:from_merge_request_author?) { review.from_merge_request_author? }
+
+ context 'when review author is the merge request author' do
+ let(:author) { merge_request.author }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when review author is not the merge request author' do
+ let(:author) { build_stubbed(:user) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index 0bdaa4994e5..aa5fc231e14 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Route do
+ include LooseForeignKeysHelper
+
let(:group) { create(:group, path: 'git_lab', name: 'git_lab') }
let(:route) { group.route }
@@ -285,6 +287,7 @@ RSpec.describe Route do
expect do
Group.delete(conflicting_group) # delete group with conflicting route
+ process_loose_foreign_key_deletions(record: conflicting_group)
end.to change { described_class.count }.by(-1)
# check the conflicting route is gone
@@ -305,4 +308,11 @@ RSpec.describe Route do
end
end
end
+
+ context 'with loose foreign key on routes.namespace_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:namespace) }
+ let!(:model) { parent.route }
+ end
+ end
end
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index c2fbede8ea9..8048f255272 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -293,7 +293,7 @@ RSpec.describe SnippetRepository do
it_behaves_like 'snippet repository with git errors', 'README', described_class::CommitError
context 'when user name is invalid' do
- let(:user) { create(:user, name: '.') }
+ let(:user) { create(:user, name: ',') }
it_behaves_like 'snippet repository with git errors', 'non_existing_file', described_class::InvalidSignatureError
end
diff --git a/spec/models/user_custom_attribute_spec.rb b/spec/models/user_custom_attribute_spec.rb
index 7d3806fcdfa..4c27e8d8944 100644
--- a/spec/models/user_custom_attribute_spec.rb
+++ b/spec/models/user_custom_attribute_spec.rb
@@ -49,8 +49,8 @@ RSpec.describe UserCustomAttribute, feature_category: :user_profile do
it 'adds the abuse report ID to user custom attributes' do
subject
- custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID).first
- expect(custom_attribute.value).to eq(abuse_report.id.to_s)
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID)
+ expect(custom_attribute.map(&:value)).to match([abuse_report.id.to_s])
end
context 'when abuse report is nil' do
@@ -65,6 +65,31 @@ RSpec.describe UserCustomAttribute, feature_category: :user_profile do
end
end
+ describe '.set_banned_by_spam_log' do
+ let_it_be(:user) { create(:user) }
+ let(:spam_log) { create(:spam_log, user: user) }
+
+ subject { described_class.set_banned_by_spam_log(spam_log) }
+
+ it 'adds the spam log ID to user custom attributes' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_SPAM_LOG_ID)
+ expect(custom_attribute.map(&:value)).to match([spam_log.id.to_s])
+ end
+
+ context 'when the spam log is nil' do
+ let(:spam_log) { nil }
+
+ it 'does not update custom attributes' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_SPAM_LOG_ID).first
+ expect(custom_attribute).to be_nil
+ end
+ end
+ end
+
describe '#upsert_custom_attributes' do
subject { described_class.upsert_custom_attributes(custom_attributes) }
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 729635b5a27..401a85e2f82 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -238,6 +238,20 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
end
+ describe '#keyboard_shortcuts_enabled' do
+ it 'is set to true by default' do
+ pref = described_class.new
+
+ expect(pref.keyboard_shortcuts_enabled).to eq(true)
+ end
+
+ it 'returns assigned value' do
+ pref = described_class.new(keyboard_shortcuts_enabled: false)
+
+ expect(pref.keyboard_shortcuts_enabled).to eq(false)
+ end
+ end
+
describe '#render_whitespace_in_code' do
it 'is set to false by default' do
pref = described_class.new
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 788600194a5..c611c3c26e3 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -65,6 +65,9 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:project_shortcut_buttons).to(:user_preference) }
it { is_expected.to delegate_method(:project_shortcut_buttons=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:keyboard_shortcuts_enabled).to(:user_preference) }
+ it { is_expected.to delegate_method(:keyboard_shortcuts_enabled=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:render_whitespace_in_code).to(:user_preference) }
it { is_expected.to delegate_method(:render_whitespace_in_code=).to(:user_preference).with_arguments(:args) }
@@ -168,7 +171,6 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:abuse_events).class_name('Abuse::Event').inverse_of(:user) }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
it { is_expected.to have_many(:releases).dependent(:nullify) }
- it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:user) }
it { is_expected.to have_many(:reviews).inverse_of(:author) }
it { is_expected.to have_many(:merge_request_assignees).inverse_of(:assignee) }
it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) }
@@ -2093,6 +2095,7 @@ RSpec.describe User, feature_category: :user_profile do
it 'uses SecureRandom to generate the incoming email token' do
allow_next_instance_of(User) do |user|
allow(user).to receive(:update_highest_role)
+ allow(user).to receive(:associate_with_enterprise_group)
end
allow_next_instance_of(Namespaces::UserNamespace) do |namespace|
@@ -2906,7 +2909,7 @@ RSpec.describe User, feature_category: :user_profile do
it 'applies defaults to user' do
expect(user.projects_limit).to eq(123)
expect(user.can_create_group).to be_falsey
- expect(user.theme_id).to eq(1)
+ expect(user.theme_id).to eq(3)
end
it 'does not undo projects_limit setting if it matches old DB default of 10' do
@@ -5343,56 +5346,6 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe '.ghost' do
- it "creates a ghost user if one isn't already present" do
- ghost = described_class.ghost
-
- expect(ghost).to be_ghost
- expect(ghost).to be_persisted
- expect(ghost.namespace).not_to be_nil
- expect(ghost.namespace).to be_persisted
- expect(ghost.user_type).to eq 'ghost'
- end
-
- it 'does not create a second ghost user if one is already present' do
- expect do
- described_class.ghost
- described_class.ghost
- end.to change { described_class.count }.by(1)
- expect(described_class.ghost).to eq(described_class.ghost)
- end
-
- context "when a regular user exists with the username 'ghost'" do
- it 'creates a ghost user with a non-conflicting username' do
- create(:user, username: 'ghost')
- ghost = described_class.ghost
-
- expect(ghost).to be_persisted
- expect(ghost.username).to eq('ghost1')
- end
- end
-
- context "when a regular user exists with the email 'ghost@example.com'" do
- it 'creates a ghost user with a non-conflicting email' do
- create(:user, email: 'ghost@example.com')
- ghost = described_class.ghost
-
- expect(ghost).to be_persisted
- expect(ghost.email).to eq('ghost1@example.com')
- end
- end
-
- context 'when a domain allowlist is in place' do
- before do
- stub_application_setting(domain_allowlist: ['gitlab.com'])
- end
-
- it 'creates a ghost user' do
- expect(described_class.ghost).to be_persisted
- end
- end
- end
-
describe '#update_two_factor_requirement' do
let(:user) { create :user }
@@ -6088,7 +6041,7 @@ RSpec.describe User, feature_category: :user_profile do
it 'creates an abuse report with the correct data' do
expect { subject }.to change { AbuseReport.count }.from(0).to(1)
expect(AbuseReport.last.attributes).to include({
- reporter_id: described_class.security_bot.id,
+ reporter_id: Users::Internal.security_bot.id,
user_id: user.id,
category: "spam",
message: 'Potential spammer account deletion'
@@ -6109,7 +6062,9 @@ RSpec.describe User, feature_category: :user_profile do
end
context 'when there is an existing abuse report' do
- let!(:abuse_report) { create(:abuse_report, user: user, reporter: described_class.security_bot, message: 'Existing') }
+ let!(:abuse_report) do
+ create(:abuse_report, user: user, reporter: Users::Internal.security_bot, message: 'Existing')
+ end
it 'updates the abuse report' do
subject
@@ -7535,66 +7490,6 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- context 'bot users' do
- shared_examples 'bot users' do |bot_type|
- it 'creates the user if it does not exist' do
- expect do
- described_class.public_send(bot_type)
- end.to change { User.where(user_type: bot_type).count }.by(1)
- end
-
- it 'creates a route for the namespace of the created user' do
- bot_user = described_class.public_send(bot_type)
-
- expect(bot_user.namespace.route).to be_present
- end
-
- it 'does not create a new user if it already exists' do
- described_class.public_send(bot_type)
-
- expect do
- described_class.public_send(bot_type)
- end.not_to change { User.count }
- end
- end
-
- shared_examples 'bot user avatars' do |bot_type, avatar_filename|
- it 'sets the custom avatar for the created bot' do
- bot_user = described_class.public_send(bot_type)
-
- expect(bot_user.avatar.url).to be_present
- expect(bot_user.avatar.filename).to eq(avatar_filename)
- end
- end
-
- it_behaves_like 'bot users', :alert_bot
- it_behaves_like 'bot users', :support_bot
- it_behaves_like 'bot users', :migration_bot
- it_behaves_like 'bot users', :security_bot
- it_behaves_like 'bot users', :ghost
- it_behaves_like 'bot users', :automation_bot
- it_behaves_like 'bot users', :admin_bot
-
- it_behaves_like 'bot user avatars', :alert_bot, 'alert-bot.png'
- it_behaves_like 'bot user avatars', :support_bot, 'support-bot.png'
- it_behaves_like 'bot user avatars', :security_bot, 'security-bot.png'
- it_behaves_like 'bot user avatars', :automation_bot, 'support-bot.png'
- it_behaves_like 'bot user avatars', :admin_bot, 'admin-bot.png'
-
- context 'when bot is the support_bot' do
- subject { described_class.support_bot }
-
- it { is_expected.to be_confirmed }
- end
-
- context 'when bot is the admin bot' do
- subject { described_class.admin_bot }
-
- it { is_expected.to be_admin }
- it { is_expected.to be_confirmed }
- end
- end
-
describe '#confirmation_required_on_sign_in?' do
subject { user.confirmation_required_on_sign_in? }
@@ -7868,7 +7763,7 @@ RSpec.describe User, feature_category: :user_profile do
let_it_be(:external_user) { create(:user, :external) }
let_it_be(:unconfirmed_user) { create(:user, confirmed_at: nil) }
let_it_be(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:internal_user) { User.alert_bot.tap { |u| u.confirm } }
+ let_it_be(:internal_user) { Users::Internal.alert_bot.tap { |u| u.confirm } }
it 'does not return blocked or banned users' do
expect(described_class.without_forbidden_states).to match_array(
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
index 4db3683c057..486d1c6d3ea 100644
--- a/spec/models/users/credit_card_validation_spec.rb
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -2,14 +2,19 @@
require 'spec_helper'
-RSpec.describe Users::CreditCardValidation do
+RSpec.describe Users::CreditCardValidation, feature_category: :user_profile do
it { is_expected.to belong_to(:user) }
it { is_expected.to validate_length_of(:holder_name).is_at_most(50) }
it { is_expected.to validate_length_of(:network).is_at_most(32) }
it { is_expected.to validate_numericality_of(:last_digits).is_less_than_or_equal_to(9999) }
- describe '.similar_records' do
+ it { is_expected.to validate_length_of(:last_digits_hash).is_at_most(44) }
+ it { is_expected.to validate_length_of(:holder_name_hash).is_at_most(44) }
+ it { is_expected.to validate_length_of(:expiration_date_hash).is_at_most(44) }
+ it { is_expected.to validate_length_of(:network_hash).is_at_most(44) }
+
+ describe '#similar_records' do
let(:card_details) do
subject.attributes.with_indifferent_access.slice(:expiration_date, :last_digits, :network, :holder_name)
end
@@ -53,6 +58,22 @@ RSpec.describe Users::CreditCardValidation do
end
describe 'scopes' do
+ describe '.find_or_initialize_by_user' do
+ subject(:find_or_initialize_by_user) { described_class.find_or_initialize_by_user(user.id) }
+
+ let_it_be(:user) { create(:user) }
+
+ context 'with no existing credit card record' do
+ it { is_expected.to be_a_new_record }
+ end
+
+ context 'with existing credit card record' do
+ let_it_be(:credit_card_validation) { create(:credit_card_validation, user: user) }
+
+ it { is_expected.to eq(credit_card_validation) }
+ end
+ end
+
describe '.by_banned_user' do
let(:banned_user) { create(:banned_user) }
let!(:credit_card) { create(:credit_card_validation) }
@@ -154,4 +175,122 @@ RSpec.describe Users::CreditCardValidation do
it { is_expected.not_to be_used_by_banned_user }
end
end
+
+ describe 'before_save' do
+ describe '#set_last_digits_hash' do
+ let(:credit_card_validation) { build(:credit_card_validation, last_digits: last_digits) }
+
+ subject(:save_credit_card_validation) { credit_card_validation.save! }
+
+ context 'when last_digits are nil' do
+ let(:last_digits) { nil }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.last_digits_hash } }
+ end
+
+ context 'when last_digits has a blank value' do
+ let(:last_digits) { ' ' }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.last_digits_hash } }
+ end
+
+ context 'when last_digits has a value' do
+ let(:last_digits) { 1111 }
+ let(:expected_last_digits_hash) { Gitlab::CryptoHelper.sha256(last_digits) }
+
+ it 'assigns correct last_digits_hash value' do
+ expect { save_credit_card_validation }.to change {
+ credit_card_validation.last_digits_hash
+ }.from(nil).to(expected_last_digits_hash)
+ end
+ end
+ end
+
+ describe '#set_holder_name_hash' do
+ let(:credit_card_validation) { build(:credit_card_validation, holder_name: holder_name) }
+
+ subject(:save_credit_card_validation) { credit_card_validation.save! }
+
+ context 'when holder_name is nil' do
+ let(:holder_name) { nil }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.holder_name_hash } }
+ end
+
+ context 'when holder_name has a blank value' do
+ let(:holder_name) { ' ' }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.holder_name_hash } }
+ end
+
+ context 'when holder_name has a value' do
+ let(:holder_name) { 'John Smith' }
+ let(:expected_holder_name_hash) { Gitlab::CryptoHelper.sha256(holder_name.downcase) }
+
+ it 'lowercases holder_name and assigns correct holder_name_hash value' do
+ expect { save_credit_card_validation }.to change {
+ credit_card_validation.holder_name_hash
+ }.from(nil).to(expected_holder_name_hash)
+ end
+ end
+ end
+
+ describe '#set_network_hash' do
+ let(:credit_card_validation) { build(:credit_card_validation, network: network) }
+
+ subject(:save_credit_card_validation) { credit_card_validation.save! }
+
+ context 'when network is nil' do
+ let(:network) { nil }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.network_hash } }
+ end
+
+ context 'when network has a blank value' do
+ let(:network) { ' ' }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.network_hash } }
+ end
+
+ context 'when network has a value' do
+ let(:network) { 'Visa' }
+ let(:expected_network_hash) { Gitlab::CryptoHelper.sha256(network.downcase) }
+
+ it 'lowercases network and assigns correct network_hash value' do
+ expect { save_credit_card_validation }.to change {
+ credit_card_validation.network_hash
+ }.from(nil).to(expected_network_hash)
+ end
+ end
+ end
+
+ describe '#set_expiration_date_hash' do
+ let(:credit_card_validation) { build(:credit_card_validation, expiration_date: expiration_date) }
+
+ subject(:save_credit_card_validation) { credit_card_validation.save! }
+
+ context 'when expiration_date is nil' do
+ let(:expiration_date) { nil }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.expiration_date_hash } }
+ end
+
+ context 'when expiration_date has a blank value' do
+ let(:expiration_date) { ' ' }
+
+ it { expect { save_credit_card_validation }.not_to change { credit_card_validation.expiration_date_hash } }
+ end
+
+ context 'when expiration_date has a value' do
+ let(:expiration_date) { 1.year.from_now.to_date }
+ let(:expected_expiration_date_hash) { Gitlab::CryptoHelper.sha256(expiration_date.to_s) }
+
+ it 'assigns correct expiration_date_hash value' do
+ expect { save_credit_card_validation }.to change {
+ credit_card_validation.expiration_date_hash
+ }.from(nil).to(expected_expiration_date_hash)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/users/group_visit_spec.rb b/spec/models/users/group_visit_spec.rb
new file mode 100644
index 00000000000..63c4631ad7d
--- /dev/null
+++ b/spec/models/users/group_visit_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::GroupVisit, feature_category: :navigation do
+ let_it_be(:entity) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:base_time) { DateTime.now }
+
+ before do
+ described_class.create!(entity_id: entity.id, user_id: user.id, visited_at: base_time)
+ end
+
+ it_behaves_like 'namespace visits model'
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:group_visit, entity_id: entity.id, user_id: user.id, visited_at: base_time) }
+ let!(:parent) { entity }
+ end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:group_visit, entity_id: entity.id, user_id: user.id, visited_at: base_time) }
+ let!(:parent) { user }
+ end
+end
diff --git a/spec/models/users/project_visit_spec.rb b/spec/models/users/project_visit_spec.rb
new file mode 100644
index 00000000000..38747bd6462
--- /dev/null
+++ b/spec/models/users/project_visit_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ProjectVisit, feature_category: :navigation do
+ let_it_be(:entity) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:base_time) { DateTime.now }
+
+ before do
+ described_class.create!(entity_id: entity.id, user_id: user.id, visited_at: base_time)
+ end
+
+ it_behaves_like 'namespace visits model'
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:project_visit, entity_id: entity.id, user_id: user.id, visited_at: base_time) }
+ let!(:parent) { entity }
+ end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:project_visit, entity_id: entity.id, user_id: user.id, visited_at: base_time) }
+ let!(:parent) { user }
+ end
+end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 541199e08cb..4b675faf99e 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -165,7 +165,8 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
subject { work_item.supported_quick_action_commands }
it 'returns quick action commands supported for all work items' do
- is_expected.to include(:title, :reopen, :close, :cc, :tableflip, :shrug, :type, :promote_to)
+ is_expected.to include(:title, :reopen, :close, :cc, :tableflip, :shrug, :type, :promote_to, :checkin_reminder,
+ :subscribe, :unsubscribe, :confidential, :award)
end
context 'when work item supports the assignee widget' do
@@ -635,4 +636,82 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
end
end
end
+
+ describe '#linked_work_items' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:authorized_project) { create(:project) }
+ let_it_be(:authorized_project2) { create(:project) }
+ let_it_be(:unauthorized_project) { create(:project, :private) }
+
+ let_it_be(:authorized_item_a) { create(:work_item, project: authorized_project) }
+ let_it_be(:authorized_item_b) { create(:work_item, project: authorized_project) }
+ let_it_be(:authorized_item_c) { create(:work_item, project: authorized_project2) }
+ let_it_be(:unauthorized_item) { create(:work_item, project: unauthorized_project) }
+
+ let_it_be(:work_item_link_a) { create(:work_item_link, source: authorized_item_a, target: authorized_item_b) }
+ let_it_be(:work_item_link_b) { create(:work_item_link, source: authorized_item_a, target: unauthorized_item) }
+ let_it_be(:work_item_link_c) { create(:work_item_link, source: authorized_item_a, target: authorized_item_c) }
+
+ before_all do
+ authorized_project.add_guest(user)
+ authorized_project2.add_guest(user)
+ end
+
+ it 'returns only authorized linked work items for given user' do
+ expect(authorized_item_a.linked_work_items(user)).to contain_exactly(authorized_item_b, authorized_item_c)
+ end
+
+ it 'returns work items with valid work_item_link_type' do
+ link_types = authorized_item_a.linked_work_items(user).map(&:issue_link_type)
+
+ expect(link_types).not_to be_empty
+ expect(link_types).not_to include(nil)
+ end
+
+ it 'returns work items including the link creation time' do
+ dates = authorized_item_a.linked_work_items(user).map(&:issue_link_created_at)
+
+ expect(dates).not_to be_empty
+ expect(dates).not_to include(nil)
+ end
+
+ it 'returns work items including the link update time' do
+ dates = authorized_item_a.linked_work_items(user).map(&:issue_link_updated_at)
+
+ expect(dates).not_to be_empty
+ expect(dates).not_to include(nil)
+ end
+
+ context 'when a user cannot read cross project' do
+ it 'only returns work items within the same project' do
+ allow(Ability).to receive(:allowed?).with(user, :read_all_resources, :global).and_call_original
+ expect(Ability).to receive(:allowed?).with(user, :read_cross_project).and_return(false)
+
+ expect(authorized_item_a.linked_work_items(user)).to contain_exactly(authorized_item_b)
+ end
+ end
+
+ context 'when filtering by link type' do
+ before do
+ work_item_link_c.update!(link_type: 'blocks')
+ end
+
+ it 'returns authorized work items with given link type' do
+ expect(authorized_item_a.linked_work_items(user, link_type: 'relates_to')).to contain_exactly(authorized_item_b)
+ end
+ end
+
+ context 'when authorize option is true and current_user is nil' do
+ it 'returns empty result' do
+ expect(authorized_item_a.linked_work_items).to be_empty
+ end
+ end
+
+ context 'when authorize option is false' do
+ it 'returns all work items linked to the work item' do
+ expect(authorized_item_a.linked_work_items(authorize: false))
+ .to contain_exactly(authorized_item_b, authorized_item_c, unauthorized_item)
+ end
+ end
+ end
end
diff --git a/spec/models/work_items/related_work_item_link_spec.rb b/spec/models/work_items/related_work_item_link_spec.rb
index 349e4c0ba49..3217ac52489 100644
--- a/spec/models/work_items/related_work_item_link_spec.rb
+++ b/spec/models/work_items/related_work_item_link_spec.rb
@@ -21,6 +21,46 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
let_it_be(:item_type) { described_class.issuable_name }
end
+ describe 'validations' do
+ let_it_be(:task1) { create(:work_item, :task, project: project) }
+ let_it_be(:task2) { create(:work_item, :task, project: project) }
+ let_it_be(:task3) { create(:work_item, :task, project: project) }
+
+ subject(:link) { build(:work_item_link, source_id: task1.id, target_id: task2.id) }
+
+ describe '#validate_max_number_of_links' do
+ shared_examples 'invalid due to exceeding max number of links' do
+ let(:error_msg) { 'This work item would exceed the maximum number of linked items.' }
+
+ before do
+ create(:work_item_link, source: source, target: target)
+ stub_const("#{described_class}::MAX_LINKS_COUNT", 1)
+ end
+
+ specify do
+ is_expected.to be_invalid
+ expect(link.errors.messages[error_item]).to include(error_msg)
+ end
+ end
+
+ context 'when source exceeds max' do
+ let(:source) { task1 }
+ let(:target) { task3 }
+ let(:error_item) { :source }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+
+ context 'when target exceeds max' do
+ let(:source) { task2 }
+ let(:target) { task3 }
+ let(:error_item) { :target }
+
+ it_behaves_like 'invalid due to exceeding max number of links'
+ end
+ end
+ end
+
describe '.issuable_type' do
it { expect(described_class.issuable_type).to eq(:issue) }
end
diff --git a/spec/models/work_items/widgets/description_spec.rb b/spec/models/work_items/widgets/description_spec.rb
index c24dc9cfb9c..a089cc32ecd 100644
--- a/spec/models/work_items/widgets/description_spec.rb
+++ b/spec/models/work_items/widgets/description_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe WorkItems::Widgets::Description do
work_item.update!(last_edited_by: nil)
end
- it { is_expected.to eq(User.ghost) }
+ it { is_expected.to eq(Users::Internal.ghost) }
end
end
diff --git a/spec/models/work_items/widgets/linked_items_spec.rb b/spec/models/work_items/widgets/linked_items_spec.rb
index b4a53b75561..fb3001b286b 100644
--- a/spec/models/work_items/widgets/linked_items_spec.rb
+++ b/spec/models/work_items/widgets/linked_items_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe WorkItems::Widgets::LinkedItems, feature_category: :portfolio_man
it { is_expected.to eq(:linked_items) }
end
- describe '#related_issues' do
- it { expect(described_class.new(work_item).related_issues(user)).to eq(work_item.related_issues(user)) }
+ describe '#linked_work_items' do
+ it { expect(described_class.new(work_item).linked_work_items(user)).to eq(work_item.linked_work_items(user)) }
end
end
diff --git a/spec/models/x509_certificate_spec.rb b/spec/models/x509_certificate_spec.rb
index 5723bd80739..a550b2caa44 100644
--- a/spec/models/x509_certificate_spec.rb
+++ b/spec/models/x509_certificate_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe X509Certificate do
describe 'validation' do
it { is_expected.to validate_presence_of(:subject_key_identifier) }
- it { is_expected.to validate_presence_of(:subject) }
it { is_expected.to validate_presence_of(:email) }
it { is_expected.to validate_presence_of(:serial_number) }
it { is_expected.to validate_presence_of(:x509_issuer_id) }
diff --git a/spec/models/x509_issuer_spec.rb b/spec/models/x509_issuer_spec.rb
index 3d04adf7e26..31470a443a2 100644
--- a/spec/models/x509_issuer_spec.rb
+++ b/spec/models/x509_issuer_spec.rb
@@ -5,8 +5,6 @@ require 'spec_helper'
RSpec.describe X509Issuer do
describe 'validation' do
it { is_expected.to validate_presence_of(:subject_key_identifier) }
- it { is_expected.to validate_presence_of(:subject) }
- it { is_expected.to validate_presence_of(:crl_url) }
end
describe '.safe_create!' do
diff --git a/spec/policies/ci/bridge_policy_spec.rb b/spec/policies/ci/bridge_policy_spec.rb
index d23355b4c1e..29eb4d94c03 100644
--- a/spec/policies/ci/bridge_policy_spec.rb
+++ b/spec/policies/ci/bridge_policy_spec.rb
@@ -16,22 +16,34 @@ RSpec.describe Ci::BridgePolicy do
it_behaves_like 'a deployable job policy', :ci_bridge
describe '#play_job' do
- before do
- fake_access = double('Gitlab::UserAccess')
- expect(fake_access).to receive(:can_update_branch?).with('master').and_return(can_update_branch)
- expect(Gitlab::UserAccess).to receive(:new).with(user, container: downstream_project).and_return(fake_access)
- end
+ context 'when downstream project exists' do
+ before do
+ fake_access = double('Gitlab::UserAccess')
+ expect(fake_access).to receive(:can_update_branch?).with('master').and_return(can_update_branch)
+ expect(Gitlab::UserAccess).to receive(:new).with(user, container: downstream_project).and_return(fake_access)
+ end
+
+ context 'when user can update the downstream branch' do
+ let(:can_update_branch) { true }
- context 'when user can update the downstream branch' do
- let(:can_update_branch) { true }
+ it 'allows' do
+ expect(policy).to be_allowed :play_job
+ end
+ end
+
+ context 'when user can not update the downstream branch' do
+ let(:can_update_branch) { false }
- it 'allows' do
- expect(policy).to be_allowed :play_job
+ it 'does not allow' do
+ expect(policy).not_to be_allowed :play_job
+ end
end
end
- context 'when user can not update the downstream branch' do
- let(:can_update_branch) { false }
+ context 'when downstream project does not exist' do
+ before do
+ bridge.update!(options: { trigger: { project: 'deleted-project' } })
+ end
it 'does not allow' do
expect(policy).not_to be_allowed :play_job
diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb
index 8a5b80e3051..e74bf8f7efa 100644
--- a/spec/policies/ci/pipeline_policy_spec.rb
+++ b/spec/policies/ci/pipeline_policy_spec.rb
@@ -142,5 +142,30 @@ RSpec.describe Ci::PipelinePolicy, :models do
end
end
end
+
+ describe 'read_dependency' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ project.add_developer(user)
+ allow(policy).to receive(:can?).with(:read_dependency, project).and_return(can_read_project_dependencies)
+ end
+
+ context 'when user is allowed to read project dependencies' do
+ let(:can_read_project_dependencies) { true }
+
+ it 'is enabled' do
+ expect(policy).to be_allowed :read_dependency
+ end
+ end
+
+ context 'when user is not allowed to read project dependencies' do
+ let(:can_read_project_dependencies) { false }
+
+ it 'is disabled' do
+ expect(policy).not_to be_allowed :read_dependency
+ end
+ end
+ end
end
end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 4fafe392aac..475e8f981dd 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -509,7 +509,7 @@ RSpec.describe GlobalPolicy, feature_category: :shared do
end
context 'when internal' do
- let(:current_user) { User.ghost }
+ let(:current_user) { Users::Internal.ghost }
it { is_expected.to be_disallowed(:use_slash_commands) }
end
@@ -695,4 +695,18 @@ RSpec.describe GlobalPolicy, feature_category: :shared do
it { is_expected.to be_disallowed(:create_instance_runner) }
end
end
+
+ describe 'create_organization' do
+ context 'with regular user' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_allowed(:create_organization) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:create_organizatinon) }
+ end
+ end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 89f083a69d6..4d72de27046 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -162,19 +162,16 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
group.update!(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
- it 'allows every maintainer permission plus creating subgroups' do
- create_subgroup_permission = [:create_subgroup]
- updated_maintainer_permissions =
- maintainer_permissions + create_subgroup_permission
- updated_owner_permissions =
- owner_permissions - create_subgroup_permission
-
+ it 'allows permissions from lower roles' do
expect_allowed(*public_permissions)
expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
- expect_allowed(*updated_maintainer_permissions)
- expect_disallowed(*updated_owner_permissions)
+ end
+
+ it 'allows every maintainer permission plus creating subgroups' do
+ expect_allowed(:create_subgroup, *maintainer_permissions)
+ expect_disallowed(*(owner_permissions - [:create_subgroup]))
end
end
@@ -245,7 +242,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
end
context 'migration bot' do
- let_it_be(:migration_bot) { User.migration_bot }
+ let_it_be(:migration_bot) { Users::Internal.migration_bot }
let_it_be(:current_user) { migration_bot }
it :aggregate_failures do
@@ -354,6 +351,9 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
expect_allowed(*guest_permissions)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
+ end
+
+ it 'allows every maintainer permission plus creating subgroups' do
expect_allowed(*maintainer_permissions)
expect_disallowed(*owner_permissions)
end
@@ -1261,7 +1261,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
context 'support bot' do
let_it_be_with_refind(:group) { create(:group, :private, :crm_enabled) }
- let_it_be(:current_user) { User.support_bot }
+ let_it_be(:current_user) { Users::Internal.support_bot }
before do
allow(Gitlab::ServiceDesk).to receive(:supported?).and_return(true)
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 1142d6f80fd..743d96ee3dd 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -18,8 +18,8 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
let(:group) { create(:group, :public) }
let(:reporter_from_group_link) { create(:user) }
let(:non_member) { create(:user) }
- let(:support_bot) { User.support_bot }
- let(:alert_bot) { User.alert_bot }
+ let(:support_bot) { Users::Internal.support_bot }
+ let(:alert_bot) { Users::Internal.alert_bot }
def permissions(user, issue)
described_class.new(user, issue)
diff --git a/spec/policies/organizations/organization_policy_spec.rb b/spec/policies/organizations/organization_policy_spec.rb
index e51362227c9..3fcfa63b1b2 100644
--- a/spec/policies/organizations/organization_policy_spec.rb
+++ b/spec/policies/organizations/organization_policy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
+ let_it_be(:current_user) { create :user }
subject(:policy) { described_class.new(current_user, organization) }
@@ -19,6 +20,7 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:admin_organization) }
it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_allowed(:read_organization_user) }
end
context 'when admin mode is disabled' do
@@ -27,13 +29,19 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
end
end
- context 'when the user is an organization user' do
- let_it_be(:current_user) { create :user }
-
+ context 'when the user is part of the organization' do
before do
create :organization_user, organization: organization, user: current_user
end
+ it { is_expected.to be_allowed(:read_organization_user) }
+ it { is_expected.to be_allowed(:read_organization) }
+ end
+
+ context 'when the user is not part of the organization' do
+ it { is_expected.to be_disallowed(:read_organization_user) }
+ # All organizations are currently public, and hence they are allowed to be read
+ # even if the user is not a part of the organization.
it { is_expected.to be_allowed(:read_organization) }
end
end
diff --git a/spec/policies/packages/policies/project_policy_spec.rb b/spec/policies/packages/policies/project_policy_spec.rb
index fde10f64be8..1f8a653f984 100644
--- a/spec/policies/packages/policies/project_policy_spec.rb
+++ b/spec/policies/packages/policies/project_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Policies::ProjectPolicy do
+RSpec.describe Packages::Policies::ProjectPolicy, feature_category: :package_registry do
include_context 'ProjectPolicy context'
let(:project) { public_project }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 350d834e63e..e7c2dcc4158 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -828,7 +828,7 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
context 'alert bot' do
- let(:current_user) { User.alert_bot }
+ let(:current_user) { Users::Internal.alert_bot }
it { is_expected.to be_allowed(:reporter_access) }
@@ -882,7 +882,7 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
context 'support bot' do
- let(:current_user) { User.support_bot }
+ let(:current_user) { Users::Internal.support_bot }
context 'with service desk disabled' do
it { expect_allowed(:public_access) }
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 150c7bd5f3e..a249597e900 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe BlobPresenter do
describe '#pipeline_editor_path' do
context 'when blob is .gitlab-ci.yml' do
- before do
+ before_all do
project.repository.create_file(
user,
'.gitlab-ci.yml',
@@ -144,6 +144,16 @@ RSpec.describe BlobPresenter do
let(:path) { '.gitlab-ci.yml' }
it { expect(presenter.pipeline_editor_path).to eq("/#{project.full_path}/-/ci/editor?branch_name=#{ref}") }
+
+ context 'when ref includes the qualifier' do
+ let(:ref) { 'refs/heads/main' }
+
+ it 'returns path to unqualified ref' do
+ allow(blob).to receive(:ref_type).and_return('heads')
+
+ expect(presenter.pipeline_editor_path).to eq("/#{project.full_path}/-/ci/editor?branch_name=main")
+ end
+ end
end
end
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 9093791421d..3f34c96ad8e 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -76,4 +76,104 @@ RSpec.describe EventPresenter do
.to have_attributes(note_target_type_name: 'issue')
end
end
+
+ describe '#push_activity_description' do
+ subject { event.present.push_activity_description }
+
+ context 'when event is a regular event' do
+ let(:event) { build(:event, project: project) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when event is a push event' do
+ let!(:push_event_payload) { build(:push_event_payload, event: event, ref_count: ref_count) }
+ let(:event) { build(:push_event, project: project) }
+
+ context 'when it is an individual event' do
+ let(:ref_count) { nil }
+
+ it { is_expected.to eq 'pushed to branch' }
+ end
+
+ context 'when it is a batch event' do
+ let(:ref_count) { 1 }
+
+ it { is_expected.to eq 'pushed to 1 branch' }
+ end
+ end
+ end
+
+ describe '#batch_push?' do
+ subject { event.present.batch_push? }
+
+ context 'when event is a regular event' do
+ let(:event) { build(:event, project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when event is a push event' do
+ let!(:push_event_payload) { build(:push_event_payload, event: event, ref_count: ref_count) }
+ let(:event) { build(:push_event, project: project) }
+
+ context 'when it is an individual event' do
+ let(:ref_count) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when it is a batch event' do
+ let(:ref_count) { 1 }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '#linked_to_reference?' do
+ subject { event.present.linked_to_reference? }
+
+ context 'when event is a regular event' do
+ let(:event) { build(:event, project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when event is a push event' do
+ let!(:push_event_payload) { build(:push_event_payload, event: event, ref: ref, ref_type: ref_type) }
+ let(:ref) { 'master' }
+ let(:ref_type) { :branch }
+
+ context 'when event belongs to group' do
+ let(:event) { build(:push_event, group: group) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when event belongs to project' do
+ let(:event) { build(:push_event, project: project) }
+
+ it { is_expected.to be_falsey }
+
+ context 'when matching tag exists' do
+ let(:ref_type) { :tag }
+
+ before do
+ allow(project.repository).to receive(:tag_exists?).with(ref).and_return(true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when matching branch exists' do
+ before do
+ allow(project.repository).to receive(:branch_exists?).with(ref).and_return(true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
end
diff --git a/spec/presenters/gitlab/blame_presenter_spec.rb b/spec/presenters/gitlab/blame_presenter_spec.rb
index b3b9e133a73..409d2de3fc1 100644
--- a/spec/presenters/gitlab/blame_presenter_spec.rb
+++ b/spec/presenters/gitlab/blame_presenter_spec.rb
@@ -70,4 +70,33 @@ RSpec.describe Gitlab::BlamePresenter do
end
end
end
+
+ describe '#groups_commit_data' do
+ shared_examples 'groups_commit_data' do
+ it 'combines group and commit data' do
+ data = subject.groups_commit_data
+
+ aggregate_failures do
+ expect(data.size).to eq 18
+ expect(data.first[:commit].sha).to eq("913c66a37b4a45b9769037c55c2d238bd0942d2e")
+ expect(data.first[:lines].size).to eq 3
+ expect(data.first[:commit_data].author_avatar).to include('src="https://www.gravatar.com/')
+ end
+ end
+ end
+
+ it_behaves_like 'groups_commit_data'
+
+ context 'when page is not sent as attribute' do
+ subject { described_class.new(blame, project: project, path: path) }
+
+ it_behaves_like 'groups_commit_data'
+ end
+
+ context 'when project is not sent as attribute' do
+ subject { described_class.new(blame, path: path, page: 1) }
+
+ it_behaves_like 'groups_commit_data'
+ end
+ end
end
diff --git a/spec/presenters/issue_presenter_spec.rb b/spec/presenters/issue_presenter_spec.rb
index f9a3be9bbed..99ab8582f77 100644
--- a/spec/presenters/issue_presenter_spec.rb
+++ b/spec/presenters/issue_presenter_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe IssuePresenter do
context 'when issue is a service desk issue' do
let(:service_desk_issue) do
- create(:issue, project: project, author: User.support_bot, service_desk_reply_to: email)
+ create(:issue, project: project, author: Users::Internal.support_bot, service_desk_reply_to: email)
end
let(:user) { nil }
diff --git a/spec/presenters/packages/composer/packages_presenter_spec.rb b/spec/presenters/packages/composer/packages_presenter_spec.rb
index ae88acea61d..a3a1a4c4e85 100644
--- a/spec/presenters/packages/composer/packages_presenter_spec.rb
+++ b/spec/presenters/packages/composer/packages_presenter_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe ::Packages::Composer::PackagesPresenter do
let_it_be(:package_name) { 'sample-project' }
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+ let_it_be(:project) { create(:project, :public, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
let!(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let!(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
diff --git a/spec/presenters/projects/security/configuration_presenter_spec.rb b/spec/presenters/projects/security/configuration_presenter_spec.rb
index 4fe459a798a..beabccf6639 100644
--- a/spec/presenters/projects/security/configuration_presenter_spec.rb
+++ b/spec/presenters/projects/security/configuration_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Security::ConfigurationPresenter do
+RSpec.describe Projects::Security::ConfigurationPresenter, feature_category: :software_composition_analysis do
include Gitlab::Routing.url_helpers
using RSpec::Parameterized::TableSyntax
@@ -87,6 +87,7 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
expect(feature['available']).to eq(true)
expect(feature['can_enable_by_merge_request']).to eq(true)
expect(feature['meta_info_path']).to be_nil
+ expect(feature['on_demand_available']).to eq(false)
end
context 'when checking features configured status' do
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index cdd02241fbf..5777d78608e 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe SnippetBlobPresenter do
let(:blob) { blob_at(file) }
it 'returns rich markdown content' do
- expect(subject).to include('file-content md')
+ expect(subject).to include('file-content js-markup-content md')
end
end
diff --git a/spec/rake_helper.rb b/spec/rake_helper.rb
index 0386fef5134..53bd36542b7 100644
--- a/spec/rake_helper.rb
+++ b/spec/rake_helper.rb
@@ -6,7 +6,7 @@ require 'rake'
RSpec.configure do |config|
config.include RakeHelpers
- config.before(:all) do
+ config.before(:all, type: :task) do
Rake.application.rake_require 'tasks/gitlab/helpers'
Rake::Task.define_task :environment
end
diff --git a/spec/requests/admin/abuse_reports_controller_spec.rb b/spec/requests/admin/abuse_reports_controller_spec.rb
index c443a441af8..c25fb18e5b8 100644
--- a/spec/requests/admin/abuse_reports_controller_spec.rb
+++ b/spec/requests/admin/abuse_reports_controller_spec.rb
@@ -30,16 +30,14 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
expect(assigns(:abuse_reports).first.closed?).to eq true
end
- context 'when abuse_reports_list flag is disabled' do
- before do
- stub_feature_flags(abuse_reports_list: false)
- end
+ it 'labels does not introduce N+1 queries' do
+ get admin_abuse_reports_path # warm up
- it 'returns all reports by default' do
- get admin_abuse_reports_path
+ control = ActiveRecord::QueryRecorder.new { get admin_abuse_reports_path }
- expect(assigns(:abuse_reports).count).to eq 2
- end
+ create_list(:abuse_report, 2)
+
+ expect { get admin_abuse_reports_path }.to issue_same_number_of_queries_as(control).ignoring_cached_queries
end
end
@@ -53,13 +51,62 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
end
end
- shared_examples 'moderates user' do
+ describe 'PUT #update' do
+ let_it_be(:report) { create(:abuse_report) }
+ let_it_be(:label1) { create(:abuse_report_label, title: 'Uno') }
+
+ let(:params) { { label_ids: [Gitlab::GlobalId.build(label1, id: label1.id).to_s] } }
+ let(:expected_params) { ActionController::Parameters.new(params).permit! }
+
+ subject(:request) { put admin_abuse_report_path(report, params) }
+
+ it 'invokes the Admin::AbuseReports::UpdateService' do
+ expect_next_instance_of(Admin::AbuseReports::UpdateService, report, admin, expected_params) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ request
+ end
+
+ context 'when the service response is a success' do
+ before do
+ allow_next_instance_of(Admin::AbuseReports::UpdateService, report, admin, expected_params) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ request
+ end
+
+ it 'returns with a success status' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when the service response is an error' do
+ let(:error_message) { 'Error updating abuse report' }
+
+ before do
+ allow_next_instance_of(Admin::AbuseReports::UpdateService, report, admin, expected_params) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: error_message))
+ end
+
+ request
+ end
+
+ it 'returns the service response message with a failed status' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+
+ describe 'PUT #moderate_user' do
let(:report) { create(:abuse_report) }
let(:params) { { user_action: 'block_user', close: 'true', reason: 'spam', comment: 'obvious spam' } }
let(:expected_params) { ActionController::Parameters.new(params).permit! }
let(:message) { 'Service response' }
- subject(:request) { put path }
+ subject(:request) { put moderate_user_admin_abuse_report_path(report, params) }
it 'invokes the Admin::AbuseReports::ModerateUserService' do
expect_next_instance_of(Admin::AbuseReports::ModerateUserService, report, admin, expected_params) do |service|
@@ -100,18 +147,6 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
end
end
- describe 'PUT #update' do
- let(:path) { admin_abuse_report_path(report, params) }
-
- it_behaves_like 'moderates user'
- end
-
- describe 'PUT #moderate_user' do
- let(:path) { moderate_user_admin_abuse_report_path(report, params) }
-
- it_behaves_like 'moderates user'
- end
-
describe 'DELETE #destroy' do
let!(:report) { create(:abuse_report) }
let(:params) { {} }
diff --git a/spec/requests/admin/users_controller_spec.rb b/spec/requests/admin/users_controller_spec.rb
index 21cf8ab2c79..e525d615b50 100644
--- a/spec/requests/admin/users_controller_spec.rb
+++ b/spec/requests/admin/users_controller_spec.rb
@@ -10,6 +10,29 @@ RSpec.describe Admin::UsersController, :enable_admin_mode, feature_category: :us
sign_in(admin)
end
+ describe 'PATCH #update' do
+ let(:user) { create(:user) }
+
+ context "when admin changes user email" do
+ let(:new_email) { 'new-email@example.com' }
+
+ subject(:request) { patch admin_user_path(user), params: { user: { email: new_email } } }
+
+ it 'allows change user email', :aggregate_failures do
+ expect { request }
+ .to change { user.reload.email }.from(user.email).to(new_email)
+
+ expect(response).to redirect_to(admin_user_path(user))
+ expect(flash[:notice]).to eq('User was successfully updated.')
+ end
+
+ it 'does not email the user with confirmation_instructions' do
+ expect { request }
+ .not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+ end
+ end
+
describe 'PUT #block' do
context 'when request format is :json' do
subject(:request) { put block_admin_user_path(user, format: :json) }
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index fdbfbf052d0..8aad56c9fc3 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -254,11 +254,10 @@ RSpec.describe API::BulkImports, feature_category: :importers do
request
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to include('entities[0][destination_namespace] must have a relative path ' \
- 'structure with no HTTP protocol characters, or leading or ' \
- 'trailing forward slashes. Path segments must not start or end ' \
- 'with a special character, and must not contain consecutive ' \
- 'special characters.')
+ expect(json_response['error']).to include('entities[0][destination_namespace] must be a relative path ' \
+ 'and not include protocol, sub-domain, or domain information. ' \
+ "For example, 'destination/full/path' not " \
+ "'https://example.com/destination/full/path'")
end
end
@@ -311,12 +310,11 @@ RSpec.describe API::BulkImports, feature_category: :importers do
}
end
- it 'returns blocked url message in the error' do
+ it 'returns blocked url message in the error', :aggregate_failures do
request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
-
- expect(json_response['message']).to include("Url is blocked: Only allowed schemes are http, https")
+ expect(json_response['message']).to eq("URL is blocked: Only allowed schemes are http, https")
end
end
@@ -336,16 +334,16 @@ RSpec.describe API::BulkImports, feature_category: :importers do
}
end
- it 'returns blocked url error' do
+ it 'returns blocked url error', :aggregate_failures do
stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=access_token")
- .to_return(status: 404, body: "", headers: {})
+ .to_return(status: 404, body: "{'error':'404 Not Found'}")
request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
-
- expect(json_response['message']).to include("Group import disabled on source or destination instance. " \
- "Ask an administrator to enable it on both instances and try again.")
+ expect(json_response['message']).to eq(
+ "Unsuccessful response 404 from /api/v4/groups/full_path/export_relations/status. Body: {'error':'404 Not Found'}"
+ )
end
end
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index c7b7131a600..19ac673308b 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -463,6 +463,14 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
+
+ it_behaves_like 'an endpoint with keyset pagination' do
+ let_it_be(:another_build) { create(:ci_build, :success, :tags, project: project, pipeline: pipeline) }
+
+ let(:first_record) { project.builds.last }
+ let(:second_record) { project.builds.first }
+ let(:api_call) { api("/projects/#{project.id}/jobs", user) }
+ end
end
context 'unauthorized user' do
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index ca57208eb1d..7f9c9a13311 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -260,7 +260,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(runner.reload.platform).to eq('darwin')
expect(json_response['id']).to eq(job.id)
expect(json_response['token']).to eq(job.token)
- expect(json_response['job_info']).to eq(expected_job_info)
+ expect(json_response['job_info']).to include(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
expect(json_response['image']).to eq(
{ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil }
@@ -672,7 +672,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(runner.reload.platform).to eq('darwin')
expect(json_response['id']).to eq(job.id)
expect(json_response['token']).to eq(job.token)
- expect(json_response['job_info']).to eq(expected_job_info)
+ expect(json_response['job_info']).to include(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
expect(json_response['artifacts']).to eq(expected_artifacts)
end
@@ -785,6 +785,63 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
end
+
+ describe 'time_in_queue_seconds support' do
+ let(:job) do
+ create(:ci_build, :pending, :queued, pipeline: pipeline,
+ name: 'spinach', stage: 'test', stage_idx: 0,
+ queued_at: 60.seconds.ago)
+ end
+
+ it 'presents the time_in_queue_seconds info in the payload' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['job_info']['time_in_queue_seconds']).to be >= 60.seconds
+ end
+ end
+
+ describe 'project_jobs_running_on_instance_runners_count support' do
+ context 'when runner is not instance_type' do
+ it 'presents the project_jobs_running_on_instance_runners_count info in the payload as +Inf' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('+Inf')
+ end
+ end
+
+ context 'when runner is instance_type' do
+ let(:project) { create(:project, namespace: group, shared_runners_enabled: true) }
+ let(:runner) { create(:ci_runner, :instance) }
+
+ context 'when less than Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET running jobs assigned to an instance runner are on the list' do
+ it 'presents the project_jobs_running_on_instance_runners_count info in the payload as a correct number in a string format' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('0')
+ end
+ end
+
+ context 'when at least Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET running jobs assigned to an instance runner are on the list' do
+ let(:other_runner) { create(:ci_runner, :instance) }
+
+ before do
+ stub_const('Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET', 1)
+
+ create(:ci_running_build, runner: other_runner, runner_type: other_runner.runner_type, project: project)
+ end
+
+ it 'presents the project_jobs_running_on_instance_runners_count info in the payload as Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET+' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('1+')
+ end
+ end
+ end
+ end
end
describe 'port support' do
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 2f0e64cd4da..9247d9366b2 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
+RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:commit) { project.repository.commit }
let_it_be(:guest) { create_user(:guest) }
@@ -120,6 +120,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it "does not return project commits" do
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
@@ -139,7 +140,8 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
context 'developer user' do
context 'uses only required parameters' do
- %w[pending running success failed canceled].each do |status|
+ valid_statues = %w[pending running success failed canceled]
+ valid_statues.each do |status|
context "for #{status}" do
context 'when pipeline for sha does not exists' do
it 'creates commit status and sets pipeline iid' do
@@ -248,12 +250,13 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
end
end
- context 'transitions status from pending' do
+ context 'when status transitions from pending' do
before do
post api(post_url, developer), params: { state: 'pending' }
end
- %w[running success failed canceled].each do |status|
+ valid_statues = %w[running success failed canceled]
+ valid_statues.each do |status|
it "to #{status}" do
expect { post api(post_url, developer), params: { state: status } }.not_to change { CommitStatus.count }
@@ -366,6 +369,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
send_request
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("Cannot transition status via :run from :running (Reason(s): Status cannot transition via \"run\")")
commit_status = project.commit_statuses.find_by!(name: 'coverage')
@@ -440,6 +444,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'does not create commit status' do
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(nil)
end
end
@@ -450,6 +455,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'does not create commit status' do
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(nil)
end
end
@@ -464,6 +470,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'does not create commit status' do
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
@@ -485,15 +492,16 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'returns not found error' do
expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Commit Not Found')
end
end
context 'when target URL is an invalid address' do
before do
post api(post_url, developer), params: {
- state: 'pending',
- target_url: 'invalid url'
- }
+ state: 'pending',
+ target_url: 'invalid url'
+ }
end
it 'responds with bad request status and validation errors' do
@@ -506,9 +514,9 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
context 'when target URL is an unsupported scheme' do
before do
post api(post_url, developer), params: {
- state: 'pending',
- target_url: 'git://example.com'
- }
+ state: 'pending',
+ target_url: 'git://example.com'
+ }
end
it 'responds with bad request status and validation errors' do
@@ -562,6 +570,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'does not create commit status' do
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
@@ -572,6 +581,7 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do
it 'does not create commit status' do
expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 687ce333ca5..8b9ac7cd588 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -589,7 +589,7 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
let(:namespace) { project.namespace.reload }
let(:label) { 'counts.web_ide_commits' }
let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.web_ide_commits').to_context.to_json]
+ [Gitlab::Usage::MetricDefinition.context_for('counts.web_ide_commits').to_context.to_json]
end
end
end
diff --git a/spec/requests/api/discussions_spec.rb b/spec/requests/api/discussions_spec.rb
index a65dc6e0175..aebdcebbc5a 100644
--- a/spec/requests/api/discussions_spec.rb
+++ b/spec/requests/api/discussions_spec.rb
@@ -116,6 +116,17 @@ RSpec.describe API::Discussions, feature_category: :team_planning do
it_behaves_like 'diff discussions API', 'projects', 'merge_requests', 'iid'
it_behaves_like 'resolvable discussions API', 'projects', 'merge_requests', 'iid'
+ context "when position_type is file" do
+ it "creates a new diff note" do
+ position = diff_note.position.to_h.merge({ position_type: 'file' }).except(:ignore_whitespace_change)
+
+ post api("/projects/#{parent.id}/merge_requests/#{noteable['iid']}/discussions", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
context "when position is for a previous commit on the merge request" do
it "returns a 400 bad request error because the line_code is old" do
# SHA taken from an earlier commit listed in spec/factories/merge_requests.rb
diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb
index 69e3633de57..4fb0dfbb070 100644
--- a/spec/requests/api/feature_flags_spec.rb
+++ b/spec/requests/api/feature_flags_spec.rb
@@ -111,7 +111,57 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
'scopes' => [{
'id' => scope.id,
'environment_scope' => 'production'
- }]
+ }],
+ 'user_list' => nil
+ }]
+ }])
+ end
+ end
+
+ context 'with user_list strategy feature flags' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature1')
+ end
+
+ let!(:user_list) do
+ create(:operations_feature_flag_user_list, project: project)
+ end
+
+ let!(:strategy) do
+ create(:operations_strategy, :gitlab_userlist, user_list: user_list, feature_flag: feature_flag, name: 'gitlabUserList', parameters: {})
+ end
+
+ let!(:scope) do
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ end
+
+ it 'returns the feature flags', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response).to eq([{
+ 'name' => 'feature1',
+ 'description' => nil,
+ 'active' => true,
+ 'version' => 'new_version_flag',
+ 'updated_at' => feature_flag.updated_at.as_json,
+ 'created_at' => feature_flag.created_at.as_json,
+ 'scopes' => [],
+ 'strategies' => [{
+ 'id' => strategy.id,
+ 'name' => 'gitlabUserList',
+ 'parameters' => {},
+ 'scopes' => [{
+ 'id' => scope.id,
+ 'environment_scope' => 'production'
+ }],
+ 'user_list' => {
+ 'id' => user_list.id,
+ 'iid' => user_list.iid,
+ 'name' => user_list.name,
+ 'user_xids' => user_list.user_xids
+ }
}]
}])
end
@@ -162,7 +212,57 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
'scopes' => [{
'id' => scope.id,
'environment_scope' => 'production'
- }]
+ }],
+ 'user_list' => nil
+ }]
+ })
+ end
+ end
+
+ context 'with user_list strategy feature flag' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature1')
+ end
+
+ let(:user_list) do
+ create(:operations_feature_flag_user_list, project: project)
+ end
+
+ let!(:strategy) do
+ create(:operations_strategy, :gitlab_userlist, user_list: user_list, feature_flag: feature_flag, name: 'gitlabUserList', parameters: {})
+ end
+
+ let!(:scope) do
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ end
+
+ it 'returns the feature flag', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response).to eq({
+ 'name' => 'feature1',
+ 'description' => nil,
+ 'active' => true,
+ 'version' => 'new_version_flag',
+ 'updated_at' => feature_flag.updated_at.as_json,
+ 'created_at' => feature_flag.created_at.as_json,
+ 'scopes' => [],
+ 'strategies' => [{
+ 'id' => strategy.id,
+ 'name' => 'gitlabUserList',
+ 'parameters' => {},
+ 'scopes' => [{
+ 'id' => scope.id,
+ 'environment_scope' => 'production'
+ }],
+ 'user_list' => {
+ 'id' => user_list.id,
+ 'iid' => user_list.iid,
+ 'name' => user_list.name,
+ 'user_xids' => user_list.user_xids
+ }
}]
})
end
@@ -224,6 +324,10 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
end
context 'when creating a version 2 feature flag' do
+ let(:user_list) do
+ create(:operations_feature_flag_user_list, project: project)
+ end
+
it 'creates a new feature flag' do
params = {
name: 'new-feature',
@@ -348,6 +452,32 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
environment_scope: 'staging'
}])
end
+
+ it 'creates a new feature flag with user list strategy', :aggregate_failures do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag',
+ strategies: [{
+ name: 'gitlabUserList',
+ parameters: {},
+ user_list_id: user_list.id
+ }]
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.version).to eq('new_version_flag')
+ expect(feature_flag.strategies.map { |s| s.slice(:name, :parameters).deep_symbolize_keys }).to eq([{
+ name: 'gitlabUserList',
+ parameters: {}
+ }])
+ expect(feature_flag.strategies.first.user_list).to eq(user_list)
+ end
end
context 'when given invalid parameters' do
@@ -369,6 +499,10 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
project: project, active: true, name: 'feature1', description: 'old description')
end
+ let(:user_list) do
+ create(:operations_feature_flag_user_list, project: project)
+ end
+
it 'returns a 404 if the feature flag does not exist' do
params = { description: 'new description' }
@@ -537,6 +671,30 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
}])
end
+ it 'updates an existing feature flag strategy to be gitlab user list strategy', :aggregate_failures do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ id: strategy.id,
+ name: 'gitlabUserList',
+ user_list_id: user_list.id,
+ parameters: {}
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies.map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: strategy.id,
+ name: 'gitlabUserList',
+ parameters: {}
+ }])
+ expect(feature_flag.strategies.first.user_list).to eq(user_list)
+ end
+
it 'adds a new gradual rollout strategy to a feature flag' do
strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
params = {
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 2571e3b1e6a..d922947f3e9 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feature_flags do
+RSpec.describe API::Features, :clean_gitlab_redis_feature_flag, stub_feature_flags: false, feature_category: :feature_flags do
let_it_be(:user) { create(:user) }
let_it_be(:opted_out) { create(:user) }
let_it_be(:admin) { create(:admin) }
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 756fcd8b7cd..ab2ebf134d7 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -139,7 +139,10 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
let(:pipeline) do
pipeline = create(:ci_pipeline, project: project, user: user)
stage = create(:ci_stage, project: project, pipeline: pipeline, name: 'first', position: 1)
- create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job', scheduling_type: :stage)
+ create(
+ :ci_build, pipeline: pipeline, name: 'my test job',
+ scheduling_type: :stage, stage_id: stage.id, stage_idx: stage.position
+ )
pipeline
end
@@ -180,10 +183,10 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
previousStageJobsOrNeeds {
nodes {
... on CiBuildNeed {
- #{all_graphql_fields_for('CiBuildNeed')}
+ name
}
... on CiJob {
- #{all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis])}
+ name
}
}
}
@@ -211,10 +214,12 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
before do
build_stage = create(:ci_stage, position: 2, name: 'build', project: project, pipeline: pipeline)
test_stage = create(:ci_stage, position: 3, name: 'test', project: project, pipeline: pipeline)
+ deploy_stage = create(:ci_stage, position: 4, name: 'deploy', project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, ci_stage: build_stage, stage_idx: build_stage.position)
create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, ci_stage: test_stage, stage_idx: test_stage.position)
+ create(:ci_build, pipeline: pipeline, name: 'deploy', scheduling_type: :stage, ci_stage: deploy_stage, stage_idx: deploy_stage.position)
test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, ci_stage: test_stage, stage_idx: test_stage.position)
create(:ci_build_need, build: test_job, name: 'my test job')
@@ -255,6 +260,14 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
'previousStageJobsOrNeeds' => { 'nodes' => [
a_hash_including('name' => 'my test job')
] }
+ ),
+ a_hash_including(
+ 'name' => 'deploy',
+ 'needs' => { 'nodes' => [] },
+ 'previousStageJobsOrNeeds' => { 'nodes' => [
+ a_hash_including('name' => 'rspec 1 2'),
+ a_hash_including('name' => 'rspec 2 2')
+ ] }
)
)
end
@@ -613,3 +626,87 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
end
end
end
+
+RSpec.describe 'previousStageJobs', feature_category: :pipeline_composition do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:query) do
+ <<~QUERY
+ {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ nodes {
+ groups {
+ nodes {
+ jobs {
+ nodes {
+ name
+ previousStageJobs {
+ nodes {
+ name
+ downstreamPipeline {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ QUERY
+ end
+
+ it 'does not produce N+1 queries', :request_store, :use_sql_query_cache do
+ user1 = create(:user)
+ user2 = create(:user)
+
+ create_stage_with_build_and_bridge('build', 0)
+ create_stage_with_build_and_bridge('test', 1)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: user1)
+ end
+
+ expect(graphql_data_previous_stage_jobs).to eq(
+ 'build_build' => [],
+ 'test_build' => %w[build_build]
+ )
+
+ create_stage_with_build_and_bridge('deploy', 2)
+
+ expect do
+ post_graphql(query, current_user: user2)
+ end.to issue_same_number_of_queries_as(control)
+
+ expect(graphql_data_previous_stage_jobs).to eq(
+ 'build_build' => [],
+ 'test_build' => %w[build_build],
+ 'deploy_build' => %w[test_build]
+ )
+ end
+
+ def create_stage_with_build_and_bridge(stage_name, stage_position)
+ stage = create(:ci_stage, position: stage_position, name: "#{stage_name}_stage", project: project, pipeline: pipeline)
+
+ create(:ci_build, pipeline: pipeline, name: "#{stage_name}_build", ci_stage: stage, stage_idx: stage.position)
+ end
+
+ def graphql_data_previous_stage_jobs
+ stages = graphql_data.dig('project', 'pipeline', 'stages', 'nodes')
+ groups = stages.flat_map { |stage| stage.dig('groups', 'nodes') }
+ jobs = groups.flat_map { |group| group.dig('jobs', 'nodes') }
+
+ jobs.each_with_object({}) do |job, previous_stage_jobs|
+ previous_stage_jobs[job['name']] = job.dig('previousStageJobs', 'nodes').pluck('name')
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 3cfb98c57fd..6f1eb77fa9b 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -2,9 +2,11 @@
require 'spec_helper'
-RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
+RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :runner_fleet do
include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:user) { create(:user, :admin) }
let_it_be(:another_admin) { create(:user, :admin) }
let_it_be_with_reload(:group) { create(:group) }
@@ -144,23 +146,6 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
)
expect(runner_data['tagList']).to match_array runner.tag_list
end
-
- it 'does not execute more queries per runner', :use_sql_query_cache, :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: user)
- args = { current_user: user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runner)).not_to be_nil
-
- personal_access_token = create(:personal_access_token, user: another_admin)
- args = { current_user: another_admin, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(query, **args) }
-
- create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: another_admin)
- create(:ci_runner, :project, version: '14.0.1', projects: [project1], tag_list: %w[tag3 tag8], creator: another_admin)
-
- expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control)
- end
end
shared_examples 'retrieval with no admin url' do
@@ -228,7 +213,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
end
- context 'with build running', :freeze_time do
+ context 'with build running' do
let!(:pipeline) { create(:ci_pipeline, project: project1) }
let!(:runner_manager) do
create(:ci_runner_machine,
@@ -248,23 +233,25 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
describe 'for project runner' do
- describe 'locked' do
- using RSpec::Parameterized::TableSyntax
+ let_it_be_with_refind(:project_runner) do
+ create(:ci_runner, :project,
+ description: 'Runner 3',
+ contacted_at: 1.day.ago,
+ active: false,
+ locked: false,
+ version: 'adfe157',
+ revision: 'b',
+ ip_address: '10.10.10.10',
+ access_level: 1,
+ run_untagged: true)
+ end
+ describe 'locked' do
where(is_locked: [true, false])
with_them do
- let(:project_runner) do
- create(:ci_runner, :project,
- description: 'Runner 3',
- contacted_at: 1.day.ago,
- active: false,
- locked: is_locked,
- version: 'adfe157',
- revision: 'b',
- ip_address: '10.10.10.10',
- access_level: 1,
- run_untagged: true)
+ before do
+ project_runner.update!(locked: is_locked)
end
let(:query) do
@@ -357,6 +344,109 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
)
end
end
+
+ describe 'jobs' do
+ let(:query) do
+ %(
+ query {
+ runner(id: "#{project_runner.to_global_id}") { #{runner_query_fragment} }
+ }
+ )
+ end
+
+ context 'with a job from a non-owned project' do
+ let(:runner_query_fragment) do
+ %(
+ id
+ jobs {
+ nodes {
+ id status shortSha finishedAt duration queuedDuration tags webPath
+ project { id }
+ runner { id }
+ }
+ }
+ )
+ end
+
+ let_it_be(:owned_project_owner) { create(:user) }
+ let_it_be(:owned_project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:project_runner) { create(:ci_runner, :project_type, projects: [other_project, owned_project]) }
+ let_it_be(:owned_project_pipeline) { create(:ci_pipeline, project: owned_project) }
+ let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: other_project) }
+ let_it_be(:owned_build) do
+ create(:ci_build, :running, runner: project_runner, pipeline: owned_project_pipeline,
+ tag_list: %i[a b c], created_at: 1.hour.ago, started_at: 59.minutes.ago, finished_at: 30.minutes.ago)
+ end
+
+ let_it_be(:other_build) do
+ create(:ci_build, :success, runner: project_runner, pipeline: other_project_pipeline,
+ tag_list: %i[d e f], created_at: 30.minutes.ago, started_at: 19.minutes.ago, finished_at: 1.minute.ago)
+ end
+
+ before_all do
+ owned_project.add_owner(owned_project_owner)
+ end
+
+ it 'returns empty values for sensitive fields in non-owned jobs' do
+ post_graphql(query, current_user: owned_project_owner)
+
+ jobs_data = graphql_data_at(:runner, :jobs, :nodes)
+ expect(jobs_data).not_to be_nil
+ expect(jobs_data).to match([
+ a_graphql_entity_for(other_build,
+ status: other_build.status.upcase,
+ project: nil, tags: nil, web_path: nil,
+ runner: a_graphql_entity_for(project_runner),
+ short_sha: 'Unauthorized', finished_at: other_build.finished_at&.iso8601,
+ duration: a_value_within(0.001).of(other_build.duration),
+ queued_duration: a_value_within(0.001).of((other_build.started_at - other_build.queued_at).to_f)),
+ a_graphql_entity_for(owned_build,
+ status: owned_build.status.upcase,
+ project: a_graphql_entity_for(owned_project),
+ tags: owned_build.tag_list.map(&:to_s),
+ web_path: ::Gitlab::Routing.url_helpers.project_job_path(owned_project, owned_build),
+ runner: a_graphql_entity_for(project_runner),
+ short_sha: owned_build.short_sha,
+ finished_at: owned_build.finished_at&.iso8601,
+ duration: a_value_within(0.001).of(owned_build.duration),
+ queued_duration: a_value_within(0.001).of((owned_build.started_at - owned_build.queued_at).to_f))
+ ])
+ end
+ end
+ end
+
+ describe 'a query fetching all fields' do
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: project_runner.to_global_id.to_s }]
+ ]
+ end
+
+ it 'does not execute more queries per runner', :use_sql_query_cache, :aggregate_failures do
+ create(:ci_build, :failed, runner: project_runner)
+ create(:ci_runner_machine, runner: project_runner, version: '16.4.0')
+
+ # warm-up license cache and so on:
+ personal_access_token = create(:personal_access_token, user: user)
+ args = { current_user: user, token: { personal_access_token: personal_access_token } }
+ post_graphql(query, **args)
+ expect(graphql_data_at(:runner)).not_to be_nil
+
+ personal_access_token = create(:personal_access_token, user: another_admin)
+ args = { current_user: another_admin, token: { personal_access_token: personal_access_token } }
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(query, **args) }
+
+ create(:ci_build, :failed, runner: project_runner)
+ create(:ci_runner_machine, runner: project_runner, version: '16.4.1')
+
+ expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control)
+ end
+ end
end
describe 'for inactive runner' do
@@ -501,8 +591,14 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
describe 'for runner with status' do
- let_it_be(:stale_runner) { create(:ci_runner, description: 'Stale runner 1', created_at: 3.months.ago) }
- let_it_be(:never_contacted_instance_runner) { create(:ci_runner, description: 'Missing runner 1', created_at: 1.month.ago, contacted_at: nil) }
+ let_it_be(:stale_runner) do
+ create(:ci_runner, description: 'Stale runner 1',
+ created_at: (3.months + 1.second).ago, contacted_at: (3.months + 1.second).ago)
+ end
+
+ let_it_be(:never_contacted_instance_runner) do
+ create(:ci_runner, description: 'Missing runner 1', created_at: 1.month.ago, contacted_at: nil)
+ end
let(:query) do
%(
@@ -918,8 +1014,6 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
context 'when requesting individual fields' do
- using RSpec::Parameterized::TableSyntax
-
where(:field) do
[
'detailedStatus { id detailsPath group icon text }',
diff --git a/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
index e84a1ca4cc4..76e2dda4ce2 100644
--- a/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
@@ -25,16 +25,22 @@ RSpec.describe 'RunnerWebUrlEdge', feature_category: :runner_fleet do
GQL
end
- before do
+ subject(:request) do
post_graphql(query, current_user: user, variables: { path: group.full_path })
end
context 'with an authorized user' do
let(:user) { create_default(:user, :admin) }
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query' do
+ before do
+ request
+ end
+ end
it 'returns correct URLs' do
+ request
+
expect(edges_graphql_data).to match_array [
{
'editUrl' => Gitlab::Routing.url_helpers.edit_group_runner_url(group, group_runner),
@@ -47,10 +53,14 @@ RSpec.describe 'RunnerWebUrlEdge', feature_category: :runner_fleet do
context 'with an unauthorized user' do
let(:user) { create(:user) }
- it_behaves_like 'a working graphql query'
+ it 'returns nil runners and an error' do
+ request
- it 'returns no edges' do
- expect(edges_graphql_data).to be_empty
+ expect(graphql_data.dig('group', 'runners')).to be_nil
+ expect(graphql_errors).to contain_exactly(a_hash_including(
+ 'message' => a_string_including("you don't have permission to perform this action"),
+ 'path' => %w[group runners]
+ ))
end
end
end
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index 3f6d39435fd..c5571086700 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -72,10 +72,16 @@ RSpec.describe 'Query.runners', feature_category: :runner_fleet do
args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
- create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
- create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
+ runner2 = create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
+ runner3 = create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
creator: current_user)
+ create(:ci_build, :failed, runner: runner2)
+ create(:ci_runner_machine, runner: runner2, version: '16.4.1')
+
+ create(:ci_build, :failed, runner: runner3)
+ create(:ci_runner_machine, runner: runner3, version: '16.4.0')
+
expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
index 961de84234c..869147f17b3 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe 'getting dependency proxy blobs in a group', feature_category: :d
#{query_graphql_field('dependency_proxy_blobs', {}, dependency_proxy_blob_fields)}
dependencyProxyBlobCount
dependencyProxyTotalSize
- dependencyProxyTotalSizeInBytes
dependencyProxyTotalSizeBytes
GQL
end
@@ -44,7 +43,7 @@ RSpec.describe 'getting dependency proxy blobs in a group', feature_category: :d
let(:dependency_proxy_blobs_response) { graphql_data.dig('group', 'dependencyProxyBlobs', 'edges') }
let(:dependency_proxy_blob_count_response) { graphql_data.dig('group', 'dependencyProxyBlobCount') }
let(:dependency_proxy_total_size_response) { graphql_data.dig('group', 'dependencyProxyTotalSize') }
- let(:dependency_proxy_total_size_in_bytes_response) { graphql_data.dig('group', 'dependencyProxyTotalSizeInBytes') }
+ let(:dependency_proxy_total_size_bytes_response) { graphql_data.dig('group', 'dependencyProxyTotalSizeBytes') }
before do
stub_config(dependency_proxy: { enabled: true })
@@ -131,7 +130,7 @@ RSpec.describe 'getting dependency proxy blobs in a group', feature_category: :d
it 'returns the total size in bytes' do
subject
expected_size = blobs.inject(0) { |sum, blob| sum + blob.size }
- expect(dependency_proxy_total_size_in_bytes_response).to eq(expected_size)
+ expect(dependency_proxy_total_size_bytes_response.to_i).to eq(expected_size)
end
context 'with a giant size blob' do
diff --git a/spec/requests/api/graphql/group/work_item_spec.rb b/spec/requests/api/graphql/group/work_item_spec.rb
new file mode 100644
index 00000000000..9adb0acbf6b
--- /dev/null
+++ b/spec/requests/api/graphql/group/work_item_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting a single work item associated with a group', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
+
+ let_it_be(:group_work_item) do
+ create(
+ :work_item,
+ namespace: group,
+ author: reporter
+ )
+ end
+
+ let_it_be(:confidential_work_item) do
+ create(:work_item, :confidential, namespace: group, author: reporter)
+ end
+
+ let(:work_item_data) { graphql_data.dig('group', 'workItem') }
+ let(:query_group) { group }
+ let(:query_work_item) { group_work_item }
+ let(:params) { { iid: query_work_item.iid.to_s } }
+ let(:query) do
+ graphql_query_for(
+ 'group',
+ { 'fullPath' => query_group.full_path },
+ query_graphql_field('workItem', params, all_graphql_fields_for('workItems'.classify, max_depth: 2))
+ )
+ end
+
+ context 'when the user cannot read the work item' do
+ let(:current_user) { user }
+ let(:query_work_item) { confidential_work_item }
+
+ it 'returns does not return the work item' do
+ post_graphql(query, current_user: current_user)
+
+ expect(work_item_data).to be_nil
+ end
+ end
+
+ context 'when the user can read the work item' do
+ let(:current_user) { reporter }
+
+ it 'returns the work item' do
+ post_graphql(query, current_user: current_user)
+
+ expect(work_item_data).to include(
+ 'id' => query_work_item.to_gid.to_s,
+ 'iid' => query_work_item.iid.to_s
+ )
+ end
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'does not return the work item' do
+ post_graphql(query, current_user: current_user)
+
+ expect(work_item_data).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/group/work_items_spec.rb b/spec/requests/api/graphql/group/work_items_spec.rb
index f6dad577b5e..ef96ef4754a 100644
--- a/spec/requests/api/graphql/group/work_items_spec.rb
+++ b/spec/requests/api/graphql/group/work_items_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'getting a work_item list for a group', feature_category: :team_p
let_it_be(:other_work_item) { create(:work_item) }
let(:work_items_data) { graphql_data['group']['workItems']['nodes'] }
- let(:work_item_filter_params) { {} }
+ let(:item_filter_params) { {} }
let(:current_user) { user }
let(:query_group) { group }
@@ -47,6 +47,28 @@ RSpec.describe 'getting a work_item list for a group', feature_category: :team_p
QUERY
end
+ it_behaves_like 'graphql work item list request spec' do
+ let_it_be(:container_build_params) { { namespace: group } }
+ let(:work_item_node_path) { %w[group workItems nodes] }
+
+ def post_query(request_user = current_user)
+ post_graphql(query, current_user: request_user)
+ end
+ end
+
+ context 'when filtering by search' do
+ let(:item_filter_params) { { search: 'search_term', in: [:DESCRIPTION] } }
+
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/work_items/393126
+ it 'returns an error since search is not implemented at the group level yet' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including('message' => 'Searching is not available for work items at the namespace level yet')
+ )
+ end
+ end
+
context 'when the user can not see confidential work_items' do
it_behaves_like 'a working graphql query' do
before do
@@ -66,12 +88,6 @@ RSpec.describe 'getting a work_item list for a group', feature_category: :team_p
context 'when the user can see confidential work_items' do
let(:current_user) { reporter }
- it_behaves_like 'a working graphql query' do
- before do
- post_graphql(query, current_user: current_user)
- end
- end
-
it 'returns also confidential work_items' do
post_graphql(query, current_user: current_user)
@@ -96,7 +112,7 @@ RSpec.describe 'getting a work_item list for a group', feature_category: :team_p
graphql_dig_at(work_items_data, :id)
end
- def query(params = work_item_filter_params)
+ def query(params = item_filter_params)
graphql_query_for(
'group',
{ 'fullPath' => query_group.full_path },
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index 6debe2d3d67..1dcbc44c587 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'getting group information', :with_license, feature_category: :gr
# similar to the API "GET /groups/:id"
describe "Query group(fullPath)" do
def group_query(group)
- fields = all_graphql_fields_for('Group')
+ fields = all_graphql_fields_for('Group', excluded: %w[runners])
# TODO: Set required timelogs args elsewhere https://gitlab.com/gitlab-org/gitlab/-/issues/325499
fields.selection['timelogs(startDate: "2021-03-01" endDate: "2021-03-30")'] = fields.selection.delete('timelogs')
diff --git a/spec/requests/api/graphql/groups_query_spec.rb b/spec/requests/api/graphql/groups_query_spec.rb
index 460cb40b68a..7310382553f 100644
--- a/spec/requests/api/graphql/groups_query_spec.rb
+++ b/spec/requests/api/graphql/groups_query_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'searching groups', :with_license, feature_category: :groups_and_
let(:fields) do
<<~FIELDS
nodes {
- #{all_graphql_fields_for('Group')}
+ #{all_graphql_fields_for('Group', excluded: %w[runners])}
}
FIELDS
end
diff --git a/spec/requests/api/graphql/jobs_query_spec.rb b/spec/requests/api/graphql/jobs_query_spec.rb
index 4248a03fa74..7607aeac6e0 100644
--- a/spec/requests/api/graphql/jobs_query_spec.rb
+++ b/spec/requests/api/graphql/jobs_query_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'getting job information', feature_category: :continuous_integrat
:jobs, {}, %(
count
nodes {
- #{all_graphql_fields_for(::Types::Ci::JobType, max_depth: 1, excluded: %w[aiFailureAnalysis])}
+ #{all_graphql_fields_for(::Types::Ci::JobType, max_depth: 1)}
})
)
end
diff --git a/spec/requests/api/graphql/merge_requests/codequality_reports_comparer_spec.rb b/spec/requests/api/graphql/merge_requests/codequality_reports_comparer_spec.rb
new file mode 100644
index 00000000000..2939e9307e9
--- /dev/null
+++ b/spec/requests/api/graphql/merge_requests/codequality_reports_comparer_spec.rb
@@ -0,0 +1,185 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.project.mergeRequest.codequalityReportsComparer', feature_category: :code_quality do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
+
+ let(:mock_report) do
+ {
+ status: :parsed,
+ data: {
+ status: 'failed',
+ new_errors: [
+ {
+ description: "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ fingerprint: "15cdb5c53afd42bc22f8ca366a08d547",
+ severity: "major",
+ file_path: "foo.rb",
+ line: 10,
+ engine_name: "structure"
+ },
+ {
+ description: "Method `backwards_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ fingerprint: "f3bdc1e8c102ba5fbd9e7f6cda51c95e",
+ severity: "major",
+ file_path: "foo.rb",
+ line: 14,
+ engine_name: "structure"
+ },
+ {
+ description: "Avoid parameter lists longer than 5 parameters. [12/5]",
+ fingerprint: "ab5f8b935886b942d621399f5a2ca16e",
+ severity: "minor",
+ file_path: "foo.rb",
+ line: 14,
+ engine_name: "rubocop"
+ }
+ ],
+ resolved_errors: [],
+ existing_errors: [],
+ summary: {
+ total: 3,
+ resolved: 0,
+ errored: 3
+ }
+ }.deep_stringify_keys
+ }
+ end
+
+ let(:codequality_reports_comparer_fields) do
+ <<~QUERY
+ codequalityReportsComparer {
+ report {
+ status
+ newErrors {
+ description
+ fingerprint
+ severity
+ filePath
+ line
+ webUrl
+ engineName
+ }
+ resolvedErrors {
+ description
+ fingerprint
+ severity
+ filePath
+ line
+ webUrl
+ engineName
+ }
+ existingErrors {
+ description
+ fingerprint
+ severity
+ filePath
+ line
+ webUrl
+ engineName
+ }
+ summary {
+ errored
+ resolved
+ total
+ }
+ }
+ }
+ QUERY
+ end
+
+ let(:merge_request_fields) do
+ query_graphql_field(:merge_request, { iid: merge_request.iid.to_s }, codequality_reports_comparer_fields)
+ end
+
+ let(:query) { graphql_query_for(:project, { full_path: project.full_path }, merge_request_fields) }
+
+ subject(:result) { graphql_data_at(:project, :merge_request, :codequality_reports_comparer) }
+
+ before do
+ allow_next_found_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:compare_codequality_reports).and_return(mock_report)
+ end
+ end
+
+ context 'when the user is not authorized to read the field' do
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when the user is authorized to read the field' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ context 'when when sast_reports_in_inline_diff FF is disabled' do
+ before_all do
+ stub_feature_flags(sast_reports_in_inline_diff: false)
+ end
+
+ it 'returns null for codequality_reports_comparer field' do
+ expect(result).to be_nil
+ end
+ end
+
+ it 'returns expected data' do
+ expect(result).to match(
+ a_hash_including(
+ {
+ report: {
+ status: 'FAILED',
+ newErrors: [
+ {
+ description: 'Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.',
+ fingerprint: '15cdb5c53afd42bc22f8ca366a08d547',
+ severity: 'MAJOR',
+ filePath: 'foo.rb',
+ line: 10,
+ webUrl: nil,
+ engineName: 'structure'
+ },
+ {
+ description: 'Method `backwards_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.',
+ fingerprint: 'f3bdc1e8c102ba5fbd9e7f6cda51c95e',
+ severity: 'MAJOR',
+ filePath: 'foo.rb',
+ line: 14,
+ webUrl: nil,
+ engineName: 'structure'
+ },
+ {
+ description: 'Avoid parameter lists longer than 5 parameters. [12/5]',
+ fingerprint: 'ab5f8b935886b942d621399f5a2ca16e',
+ severity: 'MINOR',
+ filePath: 'foo.rb',
+ line: 14,
+ webUrl: nil,
+ engineName: 'rubocop'
+ }
+ ],
+ resolvedErrors: [],
+ existingErrors: [],
+ summary: {
+ errored: 3,
+ resolved: 0,
+ total: 3
+ }
+ }
+ }.deep_stringify_keys
+ )
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/admin/abuse_report_labels/create_spec.rb b/spec/requests/api/graphql/mutations/admin/abuse_report_labels/create_spec.rb
new file mode 100644
index 00000000000..2a20d96d9c8
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/admin/abuse_report_labels/create_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Admin::AbuseReportLabels::Create, feature_category: :insider_threat do
+ include GraphqlHelpers
+
+ let(:params) do
+ {
+ 'title' => 'foo',
+ 'color' => '#FF0000'
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:abuse_report_label_create, params) }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:abuse_report_label_create)
+ end
+
+ context 'when the user does not have permission to create a label', :enable_admin_mode do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not create the label' do
+ expect { subject }.not_to change { Admin::AbuseReportLabel.count }
+ end
+ end
+
+ context 'when the user has permission to create a label', :enable_admin_mode do
+ let_it_be(:current_user) { create(:admin) }
+
+ it 'creates the label' do
+ expect { subject }.to change { Admin::AbuseReportLabel.count }.to(1)
+
+ expect(mutation_response).to include('label' => a_hash_including(params))
+ end
+
+ context 'when there are errors' do
+ it 'does not create the label', :aggregate_failures do
+ create(:abuse_report_label, title: params['title'])
+
+ expect { subject }.not_to change { Label.count }
+
+ expect(mutation_response).to include({
+ 'label' => nil,
+ 'errors' => ['Title has already been taken']
+ })
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
index b2fe2754198..0e49fc389c8 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/create_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
let(:mutation) do
@@ -56,30 +56,21 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_create) }
context 'when unauthorized' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: user)
-
- expect(graphql_errors).not_to be_empty
- expect(graphql_errors[0]['message'])
- .to eq(
- "The resource that you are attempting to access does not exist " \
- "or you don't have permission to perform this action"
- )
- end
+ it_behaves_like 'a mutation on an unauthorized resource'
end
context 'when authorized' do
before_all do
- project.add_developer(user)
+ project.add_developer(current_user)
end
context 'when success' do
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['pipelineSchedule']['owner']['id']).to eq(user.to_global_id.to_s)
+ expect(mutation_response['pipelineSchedule']['owner']['id']).to eq(current_user.to_global_id.to_s)
%w[description cron cronTimezone active].each do |key|
expect(mutation_response['pipelineSchedule'][key]).to eq(pipeline_schedule_parameters[key.to_sym])
@@ -90,7 +81,7 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
expect(mutation_response['pipelineSchedule']['variables']['nodes'][0]['key']).to eq('AAA')
expect(mutation_response['pipelineSchedule']['variables']['nodes'][0]['value']).to eq('AAA123')
- expect(mutation_response['pipelineSchedule']['owner']['id']).to eq(user.to_global_id.to_s)
+ expect(mutation_response['pipelineSchedule']['owner']['id']).to eq(current_user.to_global_id.to_s)
expect(mutation_response['errors']).to eq([])
end
@@ -110,7 +101,7 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
end
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
@@ -134,7 +125,7 @@ RSpec.describe 'PipelineSchedulecreate', feature_category: :continuous_integrati
end
it 'returns error' do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/delete_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/delete_spec.rb
index e79395bb52c..7b1a21971df 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/delete_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'PipelineScheduleDelete', feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: current_user) }
let(:mutation) do
graphql_mutation(
@@ -23,26 +23,17 @@ RSpec.describe 'PipelineScheduleDelete', feature_category: :continuous_integrati
let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_delete) }
context 'when unauthorized' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- expect(graphql_errors[0]['message'])
- .to eq(
- "The resource that you are attempting to access does not exist " \
- "or you don't have permission to perform this action"
- )
- end
+ it_behaves_like 'a mutation on an unauthorized resource'
end
context 'when authorized' do
before_all do
- project.add_maintainer(user)
+ project.add_maintainer(current_user)
end
context 'when success' do
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['errors']).to eq([])
@@ -58,7 +49,7 @@ RSpec.describe 'PipelineScheduleDelete', feature_category: :continuous_integrati
end
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
@@ -70,7 +61,7 @@ RSpec.describe 'PipelineScheduleDelete', feature_category: :continuous_integrati
let(:pipeline_schedule_id) { 'gid://gitlab/Ci::PipelineSchedule/0' }
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_errors).not_to be_empty
expect(graphql_errors[0]['message'])
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/play_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/play_spec.rb
index 55ecf8f287e..28e8913d262 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/play_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/play_spec.rb
@@ -5,14 +5,14 @@ require 'spec_helper'
RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline_schedule) do
create(
:ci_pipeline_schedule,
:every_minute,
project: project,
- owner: user
+ owner: current_user
)
end
@@ -30,21 +30,12 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration
let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_play) }
context 'when unauthorized' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- expect(graphql_errors[0]['message'])
- .to eq(
- "The resource that you are attempting to access does not exist " \
- "or you don't have permission to perform this action"
- )
- end
+ it_behaves_like 'a mutation on an unauthorized resource'
end
context 'when authorized', :sidekiq_inline do
before_all do
- project.add_maintainer(user)
+ project.add_maintainer(current_user)
pipeline_schedule.update_columns(next_run_at: 2.hours.ago)
end
@@ -54,7 +45,7 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration
it do
expect(Ci::CreatePipelineService).to receive_message_chain(:new, :execute).and_return(service_response)
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['pipelineSchedule']['id']).to include(pipeline_schedule.id.to_s)
new_next_run_at = DateTime.parse(mutation_response['pipelineSchedule']['nextRunAt'])
@@ -68,9 +59,9 @@ RSpec.describe 'PipelineSchedulePlay', feature_category: :continuous_integration
it do
expect(RunPipelineScheduleWorker)
.to receive(:perform_async)
- .with(pipeline_schedule.id, user.id).and_return(nil)
+ .with(pipeline_schedule.id, current_user.id).and_return(nil)
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['pipelineSchedule']).to be_nil
expect(mutation_response['errors']).to match_array(['Unable to schedule a pipeline to run immediately.'])
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/update_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/update_spec.rb
index ec1595f393f..12f6e09913a 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_schedule/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule/update_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: current_user) }
let_it_be(:variable_one) do
create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
@@ -51,21 +51,12 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_update) }
context 'when unauthorized' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- expect(graphql_errors[0]['message'])
- .to eq(
- "The resource that you are attempting to access does not exist " \
- "or you don't have permission to perform this action"
- )
- end
+ it_behaves_like 'a mutation on an unauthorized resource'
end
context 'when authorized' do
before_all do
- project.add_developer(user)
+ project.add_developer(current_user)
end
context 'when success' do
@@ -83,7 +74,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
end
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
@@ -117,7 +108,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
end
it 'processes variables correctly' do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
@@ -145,7 +136,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
end
it do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
@@ -172,7 +163,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
end
it 'returns error' do
- post_graphql_mutation(mutation, current_user: user)
+ post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb
index 1af12d51e1e..2711bb9a4bd 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_trigger/create_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'PipelineTriggerCreate', feature_category: :continuous_integration do
include GraphqlHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be_with_reload(:project) { create(:project) }
let(:mutation) { graphql_mutation(:pipeline_trigger_create, params) }
@@ -19,24 +19,15 @@ RSpec.describe 'PipelineTriggerCreate', feature_category: :continuous_integratio
}
end
- subject { post_graphql_mutation(mutation, current_user: user) }
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
context 'when unauthorized' do
- it 'returns an error' do
- subject
-
- expect(graphql_errors).not_to be_empty
- expect(graphql_errors[0]['message'])
- .to eq(
- "The resource that you are attempting to access does not exist " \
- "or you don't have permission to perform this action"
- )
- end
+ it_behaves_like 'a mutation on an unauthorized resource'
end
context 'when authorized' do
before_all do
- project.add_owner(user)
+ project.add_owner(current_user)
end
context 'when the params are invalid' do
@@ -60,9 +51,9 @@ RSpec.describe 'PipelineTriggerCreate', feature_category: :continuous_integratio
expect(graphql_data_at(:pipeline_trigger_create, :pipeline_trigger)).to match a_hash_including(
'owner' => a_hash_including(
- 'id' => user.to_global_id.to_s,
- 'username' => user.username,
- 'name' => user.name
+ 'id' => current_user.to_global_id.to_s,
+ 'username' => current_user.username,
+ 'name' => current_user.name
),
'description' => description,
"canAccessProject" => true,
diff --git a/spec/requests/api/graphql/mutations/merge_requests/update_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/update_spec.rb
new file mode 100644
index 00000000000..48db23569b6
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/update_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update of an existing merge request', feature_category: :code_review_workflow do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:input) { { 'iid' => merge_request.iid.to_s } }
+ let(:extra_params) { { project_path: project.full_path } }
+ let(:input_params) { input.merge(extra_params) }
+ let(:mutation) { graphql_mutation(:merge_request_update, input_params, nil, ['productAnalyticsState']) }
+ let(:mutation_response) { graphql_mutation_response(:merge_request_update) }
+
+ context 'when the user is not allowed to update the merge request' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when the user has permissions to update the merge request' do
+ before_all do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'updating time estimate' do
+ let(:resource) { merge_request }
+ let(:mutation_name) { 'mergeRequestUpdate' }
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
index 0e55b6f2c9f..d05cc19de96 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
@@ -19,10 +19,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
graphql_mutation_response(:create_annotation)
end
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
specify { expect(described_class).to require_graphql_authorizations(:admin_metrics_dashboard_annotation) }
context 'when annotation source is environment' do
@@ -38,18 +34,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
graphql_mutation(:create_annotation, variables)
end
- context 'when the user does not have permission' do
- before do
- project.add_reporter(current_user)
- end
-
- it 'does not create the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.not_to change { Metrics::Dashboard::Annotation.count }
- end
- end
-
context 'when the user has permission' do
before do
project.add_developer(current_user)
@@ -67,7 +51,8 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
graphql_mutation(:create_annotation, variables)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: [described_class::ANNOTATION_SOURCE_ARGUMENT_ERROR]
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
context 'when environment_id is invalid' do
@@ -87,10 +72,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
end
context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
it_behaves_like 'a mutation that returns top-level errors',
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
@@ -127,19 +108,8 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
graphql_mutation(:create_annotation, variables)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: [described_class::ANNOTATION_SOURCE_ARGUMENT_ERROR]
- end
- end
-
- context 'without permission' do
- before do
- project.add_guest(current_user)
- end
-
- it 'does not create the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.not_to change { Metrics::Dashboard::Annotation.count }
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
end
@@ -174,7 +144,8 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create, feature_categ
graphql_mutation(:create_annotation, variables)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: [described_class::ANNOTATION_SOURCE_ARGUMENT_ERROR]
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
[:environment_id, :cluster_id].each do |arg_name|
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
index c81f6381398..6768998b31c 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/delete_spec.rb
@@ -7,19 +7,14 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository) }
- let_it_be(:annotation) { create(:metrics_dashboard_annotation) }
- let(:variables) { { id: GitlabSchema.id_from_object(annotation).to_s } }
+ let(:variables) { { id: 'ids-dont-matter' } }
let(:mutation) { graphql_mutation(:delete_annotation, variables) }
def mutation_response
graphql_mutation_response(:delete_annotation)
end
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
specify { expect(described_class).to require_graphql_authorizations(:admin_metrics_dashboard_annotation) }
context 'when the user has permission to delete the annotation' do
@@ -30,16 +25,11 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
context 'with invalid params' do
let(:variables) { { id: GitlabSchema.id_from_object(project).to_s } }
- it_behaves_like 'a mutation that returns top-level errors' do
- let(:match_errors) { contain_exactly(include('invalid value for id')) }
- end
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
it_behaves_like 'a mutation that returns top-level errors',
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
end
@@ -51,11 +41,5 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Delete, feature_categ
end
it_behaves_like 'a mutation that returns top-level errors', errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
-
- it 'does not delete the annotation' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- end.not_to change { Metrics::Dashboard::Annotation.count }
- end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
index f18e0e44905..f30b7d0ea73 100644
--- a/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/linked_items/add_spec.rb
@@ -104,18 +104,15 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
context 'when there are more than the max allowed items to link' do
let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
- let(:error_msg) { "No more than #{max_work_items} work items can be linked at the same time." }
-
- before do
- max_work_items.times { |i| ids_to_link.push("gid://gitlab/WorkItem/#{i}") }
- end
+ let(:ids_to_link) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }
+ let(:error_msg) { "No more than #{max_work_items} work items can be modified at the same time." }
it 'returns an error message' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
end.not_to change { WorkItems::RelatedWorkItemLink.count }
- expect_graphql_errors_to_include("No more than #{max_work_items} work items can be linked at the same time.")
+ expect_graphql_errors_to_include(error_msg)
end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/linked_items/remove_spec.rb b/spec/requests/api/graphql/mutations/work_items/linked_items/remove_spec.rb
new file mode 100644
index 00000000000..2ed4e1b4602
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/linked_items/remove_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Remove items linked to a work item", feature_category: :portfolio_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:guest) { create(:user).tap { |user| project.add_guest(user) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:related1) { create(:work_item, project: project) }
+ let_it_be(:related2) { create(:work_item, project: project) }
+ let_it_be(:link1) { create(:work_item_link, source: work_item, target: related1) }
+ let_it_be(:link2) { create(:work_item_link, source: work_item, target: related2) }
+
+ let(:mutation_response) { graphql_mutation_response(:work_item_remove_linked_items) }
+ let(:mutation) { graphql_mutation(:workItemRemoveLinkedItems, input, fields) }
+ let(:ids_to_unlink) { [related1.to_global_id.to_s, related2.to_global_id.to_s] }
+ let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => ids_to_unlink } }
+
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetLinkedItems {
+ linkedItems {
+ edges {
+ node {
+ linkType
+ workItem {
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ errors
+ message
+ FIELDS
+ end
+
+ context 'when the user is not allowed to read the work item' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to read the work item' do
+ let(:current_user) { guest }
+
+ it 'unlinks the work items' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { WorkItems::RelatedWorkItemLink.count }.by(-2)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
+ expect(mutation_response['message']).to eq("Successfully unlinked IDs: #{related1.id} and #{related2.id}.")
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'linkedItems' => { 'edges' => [] }, 'type' => 'LINKED_ITEMS'
+ }
+ )
+ end
+
+ context 'when some items fail' do
+ let_it_be(:other_project) { create(:project, :private) }
+ let_it_be(:not_related) { create(:work_item, project: project) }
+ let_it_be(:no_access) { create(:work_item, project: other_project) }
+ let_it_be(:no_access_link) { create(:work_item_link, source: work_item, target: no_access) }
+
+ let(:ids_to_unlink) { [related1.to_global_id.to_s, not_related.to_global_id.to_s, no_access.to_global_id.to_s] }
+ let(:error_msg) do
+ "Successfully unlinked IDs: #{related1.id}. " \
+ "IDs with errors: #{no_access.id} could not be removed due to insufficient permissions, " \
+ "#{not_related.id} could not be removed due to not being linked."
+ end
+
+ it 'remove valid item and include failing ids in response message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { WorkItems::RelatedWorkItemLink.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['message']).to eq(error_msg)
+ end
+ end
+
+ context 'when there are more than the max allowed items to unlink' do
+ let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
+ let(:ids_to_unlink) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }
+
+ it 'returns an error message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { WorkItems::RelatedWorkItemLink.count }
+
+ expect_graphql_errors_to_include("No more than #{max_work_items} work items can be modified at the same time.")
+ end
+ end
+
+ context 'when workItemsIds is empty' do
+ let(:ids_to_unlink) { [] }
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: ['workItemsIds cannot be empty']
+ end
+
+ context 'when `linked_work_items` feature flag is disabled' do
+ before do
+ stub_feature_flags(linked_work_items: false)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index cff21c10a5a..c7c68696888 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -1431,6 +1431,20 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
'User has not awarded emoji of type thumbsdown on the awardable'
end
end
+
+ context 'when toggling award emoji' do
+ let(:award_action) { 'TOGGLE' }
+
+ context 'when emoji award is present' do
+ let(:award_name) { 'thumbsup' }
+
+ it_behaves_like 'request that removes emoji'
+ end
+
+ context 'when emoji award is not present' do
+ it_behaves_like 'request that adds emoji'
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/organizations/organization_query_spec.rb b/spec/requests/api/graphql/organizations/organization_query_spec.rb
new file mode 100644
index 00000000000..d02158382eb
--- /dev/null
+++ b/spec/requests/api/graphql/organizations/organization_query_spec.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting organization information', feature_category: :cell do
+ include GraphqlHelpers
+
+ let(:query) { graphql_query_for(:organization, { id: organization.to_global_id }, organization_fields) }
+ let(:current_user) { user }
+ let(:groups) { graphql_data_at(:organization, :groups, :nodes) }
+ let(:organization_fields) do
+ <<~FIELDS
+ id
+ path
+ groups {
+ nodes {
+ id
+ }
+ }
+ FIELDS
+ end
+
+ let_it_be(:organization_user) { create(:organization_user) }
+ let_it_be(:organization) { organization_user.organization }
+ let_it_be(:user) { organization_user.user }
+ let_it_be(:public_group) { create(:group, name: 'public-group', organization: organization) }
+ let_it_be(:other_group) { create(:group, name: 'other-group', organization: organization) }
+ let_it_be(:outside_organization_group) { create(:group) }
+
+ let_it_be(:private_group) do
+ create(:group, :private, name: 'private-group', organization: organization)
+ end
+
+ let_it_be(:no_access_group_in_org) do
+ create(:group, :private, name: 'no-access', organization: organization)
+ end
+
+ before_all do
+ private_group.add_developer(user)
+ public_group.add_developer(user)
+ other_group.add_developer(user)
+ outside_organization_group.add_developer(user)
+ end
+
+ subject(:request_organization) { post_graphql(query, current_user: current_user) }
+
+ context 'when the user does not have access to the organization' do
+ let(:current_user) { create(:user) }
+
+ it 'returns the organization as all organizations are public' do
+ request_organization
+
+ expect(graphql_data_at(:organization, :id)).to eq(organization.to_global_id.to_s)
+ end
+ end
+
+ context 'when user has access to the organization' do
+ it_behaves_like 'a working graphql query' do
+ before do
+ request_organization
+ end
+ end
+
+ context 'when resolve_organization_groups feature flag is disabled' do
+ before do
+ stub_feature_flags(resolve_organization_groups: false)
+ end
+
+ it 'returns no groups' do
+ request_organization
+
+ expect(graphql_data_at(:organization)).not_to be_nil
+ expect(graphql_data_at(:organization, :groups, :nodes)).to be_empty
+ end
+ end
+
+ context 'when requesting organization user' do
+ let(:organization_fields) do
+ <<~FIELDS
+ organizationUsers {
+ nodes {
+ badges
+ id
+ user {
+ id
+ }
+ }
+ }
+ FIELDS
+ end
+
+ it 'returns correct organization user fields' do
+ request_organization
+
+ organization_user_node = graphql_data_at(:organization, :organizationUsers, :nodes).first
+ expected_attributes = {
+ "badges" => ["It's you!"],
+ "id" => organization_user.to_global_id.to_s,
+ "user" => { "id" => user.to_global_id.to_s }
+ }
+ expect(organization_user_node).to match(expected_attributes)
+ end
+
+ it 'avoids N+1 queries for all the fields' do
+ base_query_count = ActiveRecord::QueryRecorder.new { run_query }
+
+ organization_user_2 = create(:organization_user, organization: organization)
+ other_group.add_developer(organization_user_2.user)
+
+ expect { run_query }.not_to exceed_query_limit(base_query_count)
+ end
+
+ private
+
+ def run_query
+ run_with_clean_state(query, context: { current_user: current_user })
+ end
+ end
+
+ context 'with `search` argument' do
+ let(:search) { 'oth' }
+ let(:organization_fields) do
+ <<~FIELDS
+ id
+ path
+ groups(search: "#{search}") {
+ nodes {
+ id
+ name
+ }
+ }
+ FIELDS
+ end
+
+ it 'filters groups by name' do
+ request_organization
+
+ expect(groups).to contain_exactly(a_graphql_entity_for(other_group))
+ end
+ end
+
+ context 'with `sort` argument' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:authorized_groups) { [public_group, private_group, other_group] }
+
+ where(:field, :direction, :sorted_groups) do
+ 'id' | 'asc' | lazy { authorized_groups.sort_by(&:id) }
+ 'id' | 'desc' | lazy { authorized_groups.sort_by(&:id).reverse }
+ 'name' | 'asc' | lazy { authorized_groups.sort_by(&:name) }
+ 'name' | 'desc' | lazy { authorized_groups.sort_by(&:name).reverse }
+ 'path' | 'asc' | lazy { authorized_groups.sort_by(&:path) }
+ 'path' | 'desc' | lazy { authorized_groups.sort_by(&:path).reverse }
+ end
+
+ with_them do
+ let(:sort) { "#{field}_#{direction}".upcase }
+ let(:organization_fields) do
+ <<~FIELDS
+ id
+ path
+ groups(sort: #{sort}) {
+ nodes {
+ id
+ }
+ }
+ FIELDS
+ end
+
+ it 'sorts the groups' do
+ request_organization
+
+ expect(groups.pluck('id')).to eq(sorted_groups.map(&:to_global_id).map(&:to_s))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb
index 7610a4aaac1..c8cef07c4ff 100644
--- a/spec/requests/api/graphql/packages/package_spec.rb
+++ b/spec/requests/api/graphql/packages/package_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'package details', feature_category: :package_registry do
end
let(:depth) { 3 }
- let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles runners] }
let(:metadata) { query_graphql_fragment('ComposerMetadata') }
let(:package_files) { all_graphql_fields_for('PackageFile') }
let(:package_global_id) { global_id_of(composer_package) }
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 80c7258c05d..9ca5df95d30 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -21,8 +21,11 @@ RSpec.describe 'getting merge request information nested in a project', feature_
end
it_behaves_like 'a working graphql query' do
- # we exclude Project.pipeline because it needs arguments
- let(:mr_fields) { all_graphql_fields_for('MergeRequest', excluded: %w[jobs pipeline]) }
+ # we exclude Project.pipeline because it needs arguments,
+ # codequalityReportsComparer because no pipeline exist yet
+ # and runners because the user is not an admin and therefore has no access
+ let(:excluded) { %w[jobs pipeline runners codequalityReportsComparer] }
+ let(:mr_fields) { all_graphql_fields_for('MergeRequest', excluded: excluded) }
before do
post_graphql(query, current_user: current_user)
diff --git a/spec/requests/api/graphql/project/runners_spec.rb b/spec/requests/api/graphql/project/runners_spec.rb
index bee7ce2e372..68e95de49bc 100644
--- a/spec/requests/api/graphql/project/runners_spec.rb
+++ b/spec/requests/api/graphql/project/runners_spec.rb
@@ -28,6 +28,8 @@ RSpec.describe 'Project.runners', feature_category: :runner do
)
end
+ subject(:request) { post_graphql(query, current_user: user) }
+
context 'when the user is a project admin' do
before do
project.add_maintainer(user)
@@ -36,7 +38,7 @@ RSpec.describe 'Project.runners', feature_category: :runner do
let(:expected_ids) { [project_runner, group_runner, instance_runner].map { |g| g.to_global_id.to_s } }
it 'returns all runners available to project' do
- post_graphql(query, current_user: user)
+ request
expect(graphql_data_at(:project, :runners, :nodes).pluck('id')).to match_array(expected_ids)
end
@@ -47,10 +49,14 @@ RSpec.describe 'Project.runners', feature_category: :runner do
project.add_developer(user)
end
- it 'returns no runners' do
- post_graphql(query, current_user: user)
+ it 'returns nil runners and an error' do
+ request
- expect(graphql_data_at(:project, :runners, :nodes)).to be_empty
+ expect(graphql_data_at(:project, :runners)).to be_nil
+ expect(graphql_errors).to contain_exactly(a_hash_including(
+ 'message' => a_string_including("you don't have permission to perform this action"),
+ 'path' => %w[project runners]
+ ))
end
end
end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 4aba83dae92..d5d3d6c578f 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -30,16 +30,14 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
let_it_be(:confidential_item) { create(:work_item, confidential: true, project: project, title: 'item3') }
let_it_be(:other_item) { create(:work_item) }
- let(:items_data) { graphql_data['project']['workItems']['edges'] }
+ let(:items_data) { graphql_data['project']['workItems']['nodes'] }
let(:item_filter_params) { {} }
let(:fields) do
<<~QUERY
- edges {
- node {
+ nodes {
#{all_graphql_fields_for('workItems'.classify, max_depth: 2)}
}
- }
QUERY
end
@@ -69,6 +67,15 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
end
+ it_behaves_like 'graphql work item list request spec' do
+ let_it_be(:container_build_params) { { project: project } }
+ let(:work_item_node_path) { %w[project workItems nodes] }
+
+ def post_query(request_user = current_user)
+ post_graphql(query, current_user: request_user)
+ end
+ end
+
describe 'N + 1 queries' do
context 'when querying root fields' do
it_behaves_like 'work items resolver without N + 1 queries'
@@ -199,12 +206,6 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
end
- it_behaves_like 'a working graphql query' do
- before do
- post_graphql(query, current_user: current_user)
- end
- end
-
context 'when the user does not have access to the item' do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
@@ -237,25 +238,12 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
context 'when filtering by search' do
it_behaves_like 'query with a search term' do
- let(:issuable_data) { items_data }
+ let(:ids) { item_ids }
let(:user) { current_user }
let_it_be(:issuable) { create(:work_item, project: project, description: 'bar') }
end
end
- context 'when filtering by author username' do
- let_it_be(:author) { create(:author) }
- let_it_be(:item_3) { create(:work_item, project: project, author: author) }
-
- let(:item_filter_params) { { author_username: item_3.author.username } }
-
- it 'returns correct results' do
- post_graphql(query, current_user: current_user)
-
- expect(item_ids).to match_array([item_3.to_global_id.to_s])
- end
- end
-
describe 'sorting and pagination' do
let(:data_path) { [:project, :work_items] }
@@ -415,7 +403,7 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
def item_ids
- graphql_dig_at(items_data, :node, :id)
+ graphql_dig_at(items_data, :id)
end
def query(params = item_filter_params)
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 783e96861b1..2d9c6367676 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -56,15 +56,21 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(baseline)
end
- context 'when other project member is not authorized to see the full token' do
+ context 'when another project member or owner who is not also the token owner' do
before do
- project.add_maintainer(other_user)
+ project.add_owner(other_user)
post_graphql(query, current_user: other_user)
end
- it 'shows truncated token' do
- expect(graphql_data_at(:project, :pipeline_triggers,
- :nodes).first['token']).to eql pipeline_trigger.token[0, 4]
+ it 'is not authorized and shows truncated token' do
+ expect(graphql_data_at(:project, :pipeline_triggers, :nodes).first).to match({
+ 'id' => pipeline_trigger.to_global_id.to_s,
+ 'canAccessProject' => true,
+ 'description' => pipeline_trigger.description,
+ 'hasTokenExposed' => false,
+ 'lastUsed' => nil,
+ 'token' => pipeline_trigger.token[0, 4]
+ })
end
end
@@ -199,7 +205,7 @@ RSpec.describe 'getting project information', feature_category: :groups_and_proj
context 'when the project is a catalog resource' do
before do
- create(:catalog_resource, project: project)
+ create(:ci_catalog_resource, project: project)
end
it 'is true' do
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index fa354bc1f66..3691e023a53 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -70,7 +70,8 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
'adminWorkItem' => true,
'adminParentLink' => true,
'setWorkItemMetadata' => true,
- 'createNote' => true
+ 'createNote' => true,
+ 'adminWorkItemLink' => true
},
'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path)
)
@@ -541,13 +542,10 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
end
describe 'linked items widget' do
- let_it_be(:related_item1) { create(:work_item, project: project) }
- let_it_be(:related_item2) { create(:work_item, project: project) }
- let_it_be(:related_item3) { create(:work_item) }
- let_it_be(:link1) { create(:work_item_link, source: work_item, target: related_item1, link_type: 'relates_to') }
- let_it_be(:link2) { create(:work_item_link, source: work_item, target: related_item2, link_type: 'relates_to') }
- let_it_be(:link3) { create(:work_item_link, source: work_item, target: related_item3, link_type: 'relates_to') }
-
+ let_it_be(:related_item) { create(:work_item, project: project) }
+ let_it_be(:blocked_item) { create(:work_item, project: project) }
+ let_it_be(:link1) { create(:work_item_link, source: work_item, target: related_item, link_type: 'relates_to') }
+ let_it_be(:link2) { create(:work_item_link, source: work_item, target: blocked_item, link_type: 'blocks') }
let(:work_item_fields) do
<<~GRAPHQL
id
@@ -580,12 +578,12 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
hash_including(
'linkId' => link1.to_gid.to_s, 'linkType' => 'relates_to',
'linkCreatedAt' => link1.created_at.iso8601, 'linkUpdatedAt' => link1.updated_at.iso8601,
- 'workItem' => { 'id' => related_item1.to_gid.to_s }
+ 'workItem' => { 'id' => related_item.to_gid.to_s }
),
hash_including(
- 'linkId' => link2.to_gid.to_s, 'linkType' => 'relates_to',
+ 'linkId' => link2.to_gid.to_s, 'linkType' => 'blocks',
'linkCreatedAt' => link2.created_at.iso8601, 'linkUpdatedAt' => link2.updated_at.iso8601,
- 'workItem' => { 'id' => related_item2.to_gid.to_s }
+ 'workItem' => { 'id' => blocked_item.to_gid.to_s }
)
]
) }
@@ -594,6 +592,30 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
)
end
+ context 'when filtering by link type' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ widgets {
+ type
+ ... on WorkItemWidgetLinkedItems {
+ linkedItems(filter: RELATED) {
+ nodes {
+ linkType
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns items with specified type' do
+ widget_data = work_item_data["widgets"].find { |widget| widget.key?("linkedItems") }["linkedItems"]
+
+ expect(widget_data["nodes"].size).to eq(1)
+ expect(widget_data.dig("nodes", 0, "linkType")).to eq('relates_to')
+ end
+ end
+
context 'when `linked_work_items` feature flag is disabled' do
before do
stub_feature_flags(linked_work_items: false)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 5296a8b3e93..7b1da1c691d 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
include GroupAPIHelpers
include UploadHelpers
include WorkhorseHelpers
- include KeysetPaginationHelpers
let_it_be(:user1) { create(:user, can_create_group: false) }
let_it_be(:user2) { create(:user) }
@@ -196,37 +195,10 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
end
end
- context 'keyset pagination' do
- context 'on making requests with supported ordering structure' do
- it 'paginates the records correctly', :aggregate_failures do
- # first page
- get api('/groups'), params: { pagination: 'keyset', per_page: 1 }
-
- expect(response).to have_gitlab_http_status(:ok)
- records = json_response
- expect(records.size).to eq(1)
- expect(records.first['id']).to eq(group_1.id)
-
- params_for_next_page = pagination_params_from_next_url(response)
- expect(params_for_next_page).to include('cursor')
-
- get api('/groups'), params: params_for_next_page
-
- expect(response).to have_gitlab_http_status(:ok)
- records = Gitlab::Json.parse(response.body)
- expect(records.size).to eq(1)
- expect(records.first['id']).to eq(group_2.id)
- end
- end
-
- context 'on making requests with unsupported ordering structure' do
- it 'returns error', :aggregate_failures do
- get api('/groups'), params: { pagination: 'keyset', per_page: 1, order_by: 'path', sort: 'desc' }
-
- expect(response).to have_gitlab_http_status(:method_not_allowed)
- expect(json_response['error']).to eq('Keyset pagination is not yet available for this type of request')
- end
- end
+ it_behaves_like 'an endpoint with keyset pagination', invalid_order: 'path' do
+ let(:first_record) { group_1 }
+ let(:second_record) { group_2 }
+ let(:api_call) { api('/groups') }
end
end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index fa35e367420..cf0cd9a2e85 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -881,7 +881,6 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
end
context "custom action" do
- let(:access_checker) { double(Gitlab::GitAccess) }
let(:payload) do
{
'action' => 'geo_proxy_to_primary',
@@ -898,7 +897,8 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
before do
project.add_guest(user)
- expect(Gitlab::GitAccess).to receive(:new).with(
+
+ expect_next_instance_of(Gitlab::GitAccess,
key,
project,
'ssh',
@@ -907,11 +907,12 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
repository_path: "#{project.full_path}.git",
redirected_path: nil
}
- ).and_return(access_checker)
- expect(access_checker).to receive(:check).with(
- 'git-receive-pack',
- 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
- ).and_return(custom_action_result)
+ ) do |access_checker|
+ expect(access_checker).to receive(:check).with(
+ 'git-receive-pack',
+ 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+ ).and_return(custom_action_result)
+ end
end
context "git push" do
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index ec30840dfd8..1e8397773be 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -131,7 +131,8 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
k8s_api_proxy_request: 5,
flux_git_push_notifications_total: 42,
k8s_api_proxy_requests_via_ci_access: 43,
- k8s_api_proxy_requests_via_user_access: 44
+ k8s_api_proxy_requests_via_user_access: 44,
+ k8s_api_proxy_requests_via_pat_access: 45
}
unique_counters = {
agent_users_using_ci_tunnel: [10, 999, 777, 10],
@@ -139,6 +140,8 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
k8s_api_proxy_requests_unique_agents_via_ci_access: [10, 999, 777, 10],
k8s_api_proxy_requests_unique_users_via_user_access: [10, 999, 777, 10],
k8s_api_proxy_requests_unique_agents_via_user_access: [10, 999, 777, 10],
+ k8s_api_proxy_requests_unique_users_via_pat_access: [10, 999, 777, 10],
+ k8s_api_proxy_requests_unique_agents_via_pat_access: [10, 999, 777, 10],
flux_git_push_notified_unique_projects: [10, 999, 777, 10]
}
expected_counters = {
@@ -146,7 +149,8 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
kubernetes_agent_k8s_api_proxy_request: request_count * counters[:k8s_api_proxy_request],
kubernetes_agent_flux_git_push_notifications_total: request_count * counters[:flux_git_push_notifications_total],
kubernetes_agent_k8s_api_proxy_requests_via_ci_access: request_count * counters[:k8s_api_proxy_requests_via_ci_access],
- kubernetes_agent_k8s_api_proxy_requests_via_user_access: request_count * counters[:k8s_api_proxy_requests_via_user_access]
+ kubernetes_agent_k8s_api_proxy_requests_via_user_access: request_count * counters[:k8s_api_proxy_requests_via_user_access],
+ kubernetes_agent_k8s_api_proxy_requests_via_pat_access: request_count * counters[:k8s_api_proxy_requests_via_pat_access]
}
request_count.times do
@@ -492,73 +496,125 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
Clusters::Agents::Authorizations::UserAccess::RefreshService.new(agent, config: user_access_config).execute
end
- it 'returns 400 when cookie is invalid' do
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: '123', csrf_token: mask_token(new_token) })
+ context 'when the access type is access_token' do
+ let(:personal_access_token) { create(:personal_access_token, user: user, scopes: [Gitlab::Auth::K8S_PROXY_SCOPE]) }
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ it 'returns 200 when the user has access' do
+ deployment_project.add_member(user, :developer)
- it 'returns 401 when session is not found' do
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id('abc')
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(new_token) })
+ send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ expect(response).to have_gitlab_http_status(:success)
+ end
- it 'returns 401 when CSRF token does not match' do
- public_id = stub_user_session(user, new_token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(new_token) })
+ it 'returns 400 when the feature flag is disabled' do
+ deployment_project.add_member(user, :developer)
+ stub_feature_flags(k8s_proxy_pat: false)
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- it 'returns 404 for non-existent agent' do
- token = new_token
- public_id = stub_user_session(user, token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: non_existing_record_id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it 'returns 403 when user has no access' do
+ send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- it 'returns 403 when user has no access' do
- token = new_token
- public_id = stub_user_session(user, token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it 'returns 403 when user has incorrect token scope' do
+ personal_access_token.update!(scopes: [Gitlab::Auth::READ_API_SCOPE])
+ deployment_project.add_member(user, :developer)
- it 'returns 200 when user has access' do
- deployment_project.add_member(user, :developer)
- token = new_token
- public_id = stub_user_session(user, token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+ send_request(params: { agent_id: agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- expect(response).to have_gitlab_http_status(:success)
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 403 when user has no access to requested agent' do
+ deployment_project.add_member(user, :developer)
+
+ send_request(params: { agent_id: another_agent.id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- it 'returns 401 when user has valid KAS cookie and CSRF token but has no access to requested agent' do
- deployment_project.add_member(user, :developer)
- token = new_token
- public_id = stub_user_session(user, token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: another_agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 404 for non-existent agent' do
+ send_request(params: { agent_id: non_existing_record_id, access_type: 'personal_access_token', access_key: personal_access_token.token })
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
- it 'returns 401 when user id is not found in session' do
- deployment_project.add_member(user, :developer)
- token = new_token
- public_id = stub_user_session_with_no_user_id(user, token)
- access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
- send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+ context 'when the access type is session_cookie' do
+ it 'returns 400 when cookie is invalid' do
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: '123', csrf_token: mask_token(new_token) })
- expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns 401 when session is not found' do
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id('abc')
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(new_token) })
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'returns 401 when CSRF token does not match' do
+ public_id = stub_user_session(user, new_token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(new_token) })
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'returns 404 for non-existent agent' do
+ token = new_token
+ public_id = stub_user_session(user, token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: non_existing_record_id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 403 when user has no access' do
+ token = new_token
+ public_id = stub_user_session(user, token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 200 when user has access' do
+ deployment_project.add_member(user, :developer)
+ token = new_token
+ public_id = stub_user_session(user, token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'returns 401 when user has valid KAS cookie and CSRF token but has no access to requested agent' do
+ deployment_project.add_member(user, :developer)
+ token = new_token
+ public_id = stub_user_session(user, token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: another_agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 401 when user id is not found in session' do
+ deployment_project.add_member(user, :developer)
+ token = new_token
+ public_id = stub_user_session_with_no_user_id(user, token)
+ access_key = Gitlab::Kas::UserAccess.encrypt_public_session_id(public_id)
+ send_request(params: { agent_id: agent.id, access_type: 'session_cookie', access_key: access_key, csrf_token: mask_token(token) })
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 4edcd66e91a..d3f8aeb3e76 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -113,32 +113,12 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
end
context 'with merge status recheck projection' do
- context 'with batched_api_mergeability_checks FF on' do
- it 'checks mergeability asynchronously in batch', :sidekiq_inline do
- get(api(endpoint_path, user2), params: { with_merge_status_recheck: true })
-
- expect_successful_response_with_paginated_array
-
- expect(merge_request.reload.merge_status).to eq('can_be_merged')
- end
- end
-
- context 'with batched_api_mergeability_checks FF off' do
- before do
- stub_feature_flags(batched_api_mergeability_checks: false)
- end
-
- it 'checks mergeability asynchronously' do
- expect_next_instances_of(check_service_class, (1..2)) do |service|
- expect(service).not_to receive(:execute)
- expect(service).to receive(:async_execute).and_call_original
- end
+ it 'checks mergeability asynchronously in batch', :sidekiq_inline do
+ get(api(endpoint_path, user2), params: { with_merge_status_recheck: true })
- get(api(endpoint_path, user2), params: { with_merge_status_recheck: true })
+ expect_successful_response_with_paginated_array
- expect_successful_response_with_paginated_array
- expect(mr_entity['merge_status']).to eq('checking')
- end
+ expect(merge_request.reload.merge_status).to eq('can_be_merged')
end
end
diff --git a/spec/requests/api/metadata_spec.rb b/spec/requests/api/metadata_spec.rb
index e15186c48a5..b81fe3f51b5 100644
--- a/spec/requests/api/metadata_spec.rb
+++ b/spec/requests/api/metadata_spec.rb
@@ -41,6 +41,22 @@ RSpec.describe API::Metadata, feature_category: :shared do
end
end
+ context 'with ai_features scope' do
+ let(:scopes) { %i(ai_features) }
+
+ it 'returns the metadata information' do
+ get api(endpoint, personal_access_token: personal_access_token)
+
+ expect_metadata
+ end
+
+ it 'returns "200" response on head requests' do
+ head api(endpoint, personal_access_token: personal_access_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
context 'with read_user scope' do
let(:scopes) { %i(read_user) }
@@ -57,7 +73,7 @@ RSpec.describe API::Metadata, feature_category: :shared do
end
end
- context 'with neither api nor read_user scope' do
+ context 'with neither api, ai_features nor read_user scope' do
let(:scopes) { %i(read_repository) }
it 'returns authorization error' do
diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb
index 6000fc2a6b7..0d4a112c527 100644
--- a/spec/requests/api/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb
@@ -10,13 +10,12 @@ RSpec.describe API::Metrics::Dashboard::Annotations, feature_category: :metrics
let(:dashboard) { 'config/prometheus/common_metrics.yml' }
let(:starting_at) { Time.now.iso8601 }
let(:ending_at) { 1.hour.from_now.iso8601 }
- let(:params) { attributes_for(:metrics_dashboard_annotation, environment: environment, starting_at: starting_at, ending_at: ending_at, dashboard_path: dashboard) }
+ let(:params) { { environment: environment, starting_at: starting_at, ending_at: ending_at, dashboard_path: dashboard, description: 'desc' } }
shared_examples 'POST /:source_type/:id/metrics_dashboard/annotations' do |source_type|
let(:url) { "/#{source_type.pluralize}/#{source.id}/metrics_dashboard/annotations" }
before do
- stub_feature_flags(remove_monitor_metrics: false)
project.add_developer(user)
end
diff --git a/spec/requests/api/metrics/user_starred_dashboards_spec.rb b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
index bdeba777350..d42cec7af30 100644
--- a/spec/requests/api/metrics/user_starred_dashboards_spec.rb
+++ b/spec/requests/api/metrics/user_starred_dashboards_spec.rb
@@ -58,11 +58,6 @@ RSpec.describe API::Metrics::UserStarredDashboards, feature_category: :metrics d
end
describe 'DELETE /projects/:id/metrics/user_starred_dashboards' do
- let_it_be(:user_starred_dashboard_1) { create(:metrics_users_starred_dashboard, user: user, project: project, dashboard_path: dashboard) }
- let_it_be(:user_starred_dashboard_2) { create(:metrics_users_starred_dashboard, user: user, project: project) }
- let_it_be(:other_user_starred_dashboard) { create(:metrics_users_starred_dashboard, project: project) }
- let_it_be(:other_project_starred_dashboard) { create(:metrics_users_starred_dashboard, user: user) }
-
before do
project.add_reporter(user)
end
diff --git a/spec/requests/api/ml/mlflow/experiments_spec.rb b/spec/requests/api/ml/mlflow/experiments_spec.rb
index fc2e814752c..409b4529699 100644
--- a/spec/requests/api/ml/mlflow/experiments_spec.rb
+++ b/spec/requests/api/ml/mlflow/experiments_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
end
end
@@ -203,7 +203,7 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|Bad Request on missing required', [:key, :value]
end
end
diff --git a/spec/requests/api/ml/mlflow/runs_spec.rb b/spec/requests/api/ml/mlflow/runs_spec.rb
index 45479666e9a..af04c387830 100644
--- a/spec/requests/api/ml/mlflow/runs_spec.rb
+++ b/spec/requests/api/ml/mlflow/runs_spec.rb
@@ -129,7 +129,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
end
end
@@ -185,6 +185,132 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
end
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/runs/search' do
+ let_it_be(:search_experiment) { create(:ml_experiments, user: nil, project: project) }
+ let_it_be(:first_candidate) do
+ create(:ml_candidates, experiment: search_experiment, name: 'c', user: nil).tap do |c|
+ c.metrics.create!(name: 'metric1', value: 0.3)
+ end
+ end
+
+ let_it_be(:second_candidate) do
+ create(:ml_candidates, experiment: search_experiment, name: 'a', user: nil).tap do |c|
+ c.metrics.create!(name: 'metric1', value: 0.2)
+ end
+ end
+
+ let_it_be(:third_candidate) do
+ create(:ml_candidates, experiment: search_experiment, name: 'b', user: nil).tap do |c|
+ c.metrics.create!(name: 'metric1', value: 0.6)
+ end
+ end
+
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/search" }
+ let(:order_by) { nil }
+ let(:default_params) do
+ {
+ 'max_results' => 2,
+ 'experiment_ids' => [search_experiment.iid],
+ 'order_by' => order_by
+ }
+ end
+
+ it 'searches runs for a project', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to match_response_schema('ml/search_runs')
+ end
+
+ describe 'pagination and ordering' do
+ RSpec.shared_examples 'a paginated search runs request with order' do
+ it 'paginates respecting the provided order by' do
+ first_page_runs = json_response['runs']
+ expect(first_page_runs.size).to eq(2)
+
+ expect(first_page_runs[0]['info']['run_id']).to eq(expected_order[0].eid)
+ expect(first_page_runs[1]['info']['run_id']).to eq(expected_order[1].eid)
+
+ params = default_params.merge(page_token: json_response['next_page_token'])
+
+ get api(route), params: params, headers: headers
+
+ second_page_response = Gitlab::Json.parse(response.body)
+ second_page_runs = second_page_response['runs']
+
+ expect(second_page_response['next_page_token']).to be_nil
+ expect(second_page_runs.size).to eq(1)
+ expect(second_page_runs[0]['info']['run_id']).to eq(expected_order[2].eid)
+ end
+ end
+
+ let(:default_order) { [third_candidate, second_candidate, first_candidate] }
+
+ context 'when ordering is not provided' do
+ let(:expected_order) { default_order }
+
+ it_behaves_like 'a paginated search runs request with order'
+ end
+
+ context 'when order by column is provided', 'and column exists' do
+ let(:order_by) { 'name ASC' }
+ let(:expected_order) { [second_candidate, third_candidate, first_candidate] }
+
+ it_behaves_like 'a paginated search runs request with order'
+ end
+
+ context 'when order by column is provided', 'and column does not exist' do
+ let(:order_by) { 'something DESC' }
+ let(:expected_order) { default_order }
+
+ it_behaves_like 'a paginated search runs request with order'
+ end
+
+ context 'when order by metric is provided', 'and metric exists' do
+ let(:order_by) { 'metrics.metric1' }
+ let(:expected_order) { [third_candidate, first_candidate, second_candidate] }
+
+ it_behaves_like 'a paginated search runs request with order'
+ end
+
+ context 'when order by metric is provided', 'and metric does not exist' do
+ let(:order_by) { 'metrics.something' }
+
+ it 'returns no results' do
+ expect(json_response['runs']).to be_empty
+ end
+ end
+
+ context 'when order by params is provided' do
+ let(:order_by) { 'params.something' }
+ let(:expected_order) { default_order }
+
+ it_behaves_like 'a paginated search runs request with order'
+ end
+ end
+
+ describe 'Error States' do
+ context 'when experiment_ids is not passed' do
+ let(:default_params) { {} }
+
+ it_behaves_like 'MLflow|Bad Request'
+ end
+
+ context 'when experiment_ids is empty' do
+ let(:default_params) { { 'experiment_ids' => [] } }
+
+ it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
+ end
+
+ context 'when experiment_ids is invalid' do
+ let(:default_params) { { 'experiment_ids' => [non_existing_record_id] } }
+
+ it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
+ end
+
+ it_behaves_like 'MLflow|shared error cases'
+ it_behaves_like 'MLflow|Requires read_api scope'
+ end
+ end
+
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/update' do
let(:default_params) { { run_id: candidate.eid.to_s, status: 'FAILED', end_time: Time.now.to_i } }
let(:request) { post api(route), params: params, headers: headers }
@@ -220,7 +346,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|run_id param error cases'
end
end
@@ -238,7 +364,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
describe 'Error Cases' do
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|run_id param error cases'
it_behaves_like 'MLflow|Bad Request on missing required', [:key, :value, :timestamp]
end
@@ -263,7 +389,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|run_id param error cases'
it_behaves_like 'MLflow|Bad Request on missing required', [:key, :value]
end
@@ -288,7 +414,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|run_id param error cases'
it_behaves_like 'MLflow|Bad Request on missing required', [:key, :value]
end
@@ -358,7 +484,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it_behaves_like 'MLflow|shared error cases'
- it_behaves_like 'MLflow|Requires api scope'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
it_behaves_like 'MLflow|run_id param error cases'
end
end
diff --git a/spec/requests/api/npm_group_packages_spec.rb b/spec/requests/api/npm_group_packages_spec.rb
index fe0bf1d8b46..7fba75b0630 100644
--- a/spec/requests/api/npm_group_packages_spec.rb
+++ b/spec/requests/api/npm_group_packages_spec.rb
@@ -11,43 +11,12 @@ RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
let(:url) { api("/groups/#{group.id}/-/packages/npm/#{package_name}") }
it_behaves_like 'handling get metadata requests', scope: :group
-
- context 'with a duplicate package name in another project' do
+ it_behaves_like 'rejects invalid package names' do
subject { get(url) }
-
- before do
- group.add_developer(user)
- end
-
- let_it_be(:project2) { create(:project, :public, namespace: namespace) }
- let_it_be(:package2) do
- create(:npm_package,
- project: project2,
- name: "@#{group.path}/scoped_package",
- version: '1.2.0')
- end
-
- it_behaves_like 'rejects invalid package names'
-
- it 'includes all matching package versions in the response' do
- subject
-
- expect(json_response['versions'].keys).to match_array([package.version, package2.version])
- end
-
- context 'with the feature flag disabled' do
- before do
- stub_feature_flags(npm_allow_packages_in_multiple_projects: false)
- end
-
- it 'returns matching package versions from only one project' do
- subject
-
- expect(json_response['versions'].keys).to match_array([package2.version])
- end
- end
end
+ it_behaves_like 'handling get metadata requests for packages in multiple projects'
+
context 'with mixed group and project visibilities' do
subject { get(url, headers: headers) }
@@ -162,13 +131,13 @@ RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
end
end
- describe 'GET /api/v4/packages/npm/-/package/*package_name/dist-tags' do
+ describe 'GET /api/v4/groups/:id/-/packages/npm/-/package/*package_name/dist-tags' do
it_behaves_like 'handling get dist tags requests', scope: :group do
let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags") }
end
end
- describe 'PUT /api/v4/packages/npm/-/package/*package_name/dist-tags/:tag' do
+ describe 'PUT /api/v4/groups/:id/-/packages/npm/-/package/*package_name/dist-tags/:tag' do
it_behaves_like 'handling create dist tag requests', scope: :group do
let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags/#{tag_name}") }
end
@@ -183,7 +152,7 @@ RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
end
end
- describe 'DELETE /api/v4/packages/npm/-/package/*package_name/dist-tags/:tag' do
+ describe 'DELETE /api/v4/groups/:id/-/packages/npm/-/package/*package_name/dist-tags/:tag' do
it_behaves_like 'handling delete dist tag requests', scope: :group do
let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags/#{tag_name}") }
end
diff --git a/spec/requests/api/npm_instance_packages_spec.rb b/spec/requests/api/npm_instance_packages_spec.rb
index 4f965d86d66..7b74a052860 100644
--- a/spec/requests/api/npm_instance_packages_spec.rb
+++ b/spec/requests/api/npm_instance_packages_spec.rb
@@ -17,34 +17,7 @@ RSpec.describe API::NpmInstancePackages, feature_category: :package_registry do
it_behaves_like 'handling get metadata requests', scope: :instance
it_behaves_like 'rejects invalid package names'
-
- context 'with a duplicate package name in another project' do
- let_it_be(:project2) { create(:project, :public, namespace: namespace) }
- let_it_be(:package2) do
- create(:npm_package,
- project: project2,
- name: "@#{group.path}/scoped_package",
- version: '1.2.0')
- end
-
- it 'includes all matching package versions in the response' do
- subject
-
- expect(json_response['versions'].keys).to match_array([package.version, package2.version])
- end
-
- context 'with the feature flag disabled' do
- before do
- stub_feature_flags(npm_allow_packages_in_multiple_projects: false)
- end
-
- it 'returns matching package versions from only one project' do
- subject
-
- expect(json_response['versions'].keys).to match_array([package2.version])
- end
- end
- end
+ it_behaves_like 'handling get metadata requests for packages in multiple projects'
context 'when metadata cache exists' do
let_it_be(:npm_metadata_cache) { create(:npm_metadata_cache, package_name: package.name, project_id: project.id) }
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index da74409cd77..b55d992c1e4 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -34,6 +34,37 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
it_behaves_like 'returning response status', :ok
end
+ shared_examples 'nuget serialize odata package endpoint' do
+ subject { get api(url), params: params }
+
+ it { is_expected.to have_request_urgency(:low) }
+
+ it_behaves_like 'returning response status', :success
+
+ it 'returns a valid xml response and invokes OdataPackageEntryService' do
+ expect(Packages::Nuget::OdataPackageEntryService).to receive(:new).with(target, service_params).and_call_original
+
+ subject
+
+ expect(response.media_type).to eq('application/xml')
+ end
+
+ [nil, '', '%20', '..%2F..', '../..'].each do |value|
+ context "with invalid package name #{value}" do
+ let(:package_name) { value }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
+
+ context 'with missing required params' do
+ let(:params) { {} }
+ let(:package_version) { nil }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
+
describe 'GET /api/v4/projects/:id/packages/nuget' do
let(:url) { "/projects/#{target.id}/packages/nuget/index.json" }
@@ -228,6 +259,43 @@ RSpec.describe API::NugetProjectPackages, feature_category: :package_registry do
it_behaves_like 'rejects nuget access with invalid target id'
end
+ describe 'GET /api/v4/projects/:id/packages/nuget/v2/FindPackagesById()' do
+ it_behaves_like 'nuget serialize odata package endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2/FindPackagesById()" }
+ let(:params) { { id: "'#{package_name}'" } }
+ let(:service_params) { { package_name: package_name } }
+ end
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/nuget/v2/Packages()' do
+ it_behaves_like 'nuget serialize odata package endpoint' do
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2/Packages()" }
+ let(:params) { { '$filter' => "(tolower(Id) eq '#{package_name&.downcase}')" } }
+ let(:service_params) { { package_name: package_name&.downcase } }
+ end
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/nuget/v2/Packages(Id=\'*\',Version=\'*\')' do
+ let(:package_version) { '1.0.0' }
+ let(:url) { "/projects/#{target.id}/packages/nuget/v2/Packages(Id='#{package_name}',Version='#{package_version}')" }
+ let(:params) { {} }
+ let(:service_params) { { package_name: package_name, package_version: package_version } }
+
+ it_behaves_like 'nuget serialize odata package endpoint'
+
+ context 'with invalid package version' do
+ subject { get api(url) }
+
+ ['', '1', '1./2.3', '%20', '..%2F..', '../..'].each do |value|
+ context "with invalid package version #{value}" do
+ let(:package_version) { value }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+ end
+ end
+ end
+
describe 'PUT /api/v4/projects/:id/packages/nuget/authorize' do
it_behaves_like 'nuget authorize upload endpoint' do
let(:url) { "/projects/#{target.id}/packages/nuget/authorize" }
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index aa8568d4951..d95f96c25d6 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -44,6 +44,7 @@ itself: # project
- storage_version
- topic_list
- verification_checksum
+ - organization_id
remapped_attributes:
avatar: avatar_url
build_allow_git_fetch: build_git_strategy
@@ -93,6 +94,7 @@ ci_cd_settings:
- id
- project_id
- merge_trains_enabled
+ - merge_trains_skip_train_allowed
- merge_pipelines_enabled
- auto_rollback_enabled
- inbound_job_token_scope_enabled
@@ -167,6 +169,7 @@ project_setting:
- allow_pipeline_trigger_approve_deployment
- pages_unique_domain_enabled
- pages_unique_domain
+ - pages_multiple_versions_enabled
- runner_registration_enabled
- product_analytics_instrumentation_key
- jitsu_host
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 4496e3aa7c3..49471b98eba 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -470,12 +470,14 @@ RSpec.describe API::ProjectImport, :aggregate_failures, feature_category: :impor
end
describe 'GET /projects/:id/import' do
- it 'public project accessible for an unauthenticated user' do
- project = create(:project, :public)
+ context 'with an unauthenticated user' do
+ it 'returns unauthorized response for public project import status' do
+ project = create(:project, :public)
- get api("/projects/#{project.id}/import", nil)
+ get api("/projects/#{project.id}/import", nil)
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
it 'returns the import status' do
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 09991be998a..219c748c9a6 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -553,6 +553,12 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
let(:per_page) { 2 }
+ it_behaves_like 'an endpoint with keyset pagination' do
+ let(:first_record) { pipeline3 }
+ let(:second_record) { pipeline2 }
+ let(:api_call) { api(package_pipelines_url, user) }
+ end
+
context 'with no cursor supplied' do
subject { get api(package_pipelines_url, user), params: { per_page: per_page } }
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 601e8cb3081..e3e8df79a1d 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -713,7 +713,9 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
let!(:project9) { create(:project, :public, path: 'gitlab9') }
before do
- user.update!(starred_projects: [project5, project7, project8, project9])
+ [project5, project7, project8, project9].each do |project|
+ user.users_star_projects.create!(project_id: project.id)
+ end
end
context 'including owned filter' do
@@ -4657,8 +4659,8 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
before do
- user.update!(starred_projects: [public_project])
- private_user.update!(starred_projects: [public_project])
+ user.users_star_projects.create!(project_id: public_project.id)
+ private_user.users_star_projects.create!(project_id: public_project.id)
end
it 'returns not_found(404) for not existing project' do
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 0feff90d088..6a57cf52466 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -473,6 +473,21 @@ RSpec.describe API::Search, :clean_gitlab_redis_rate_limiting, feature_category:
get api(endpoint, current_user), params: { scope: 'users', search: 'foo@bar.com' }
end
end
+
+ context 'when request exceeds the rate limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ before do
+ stub_application_setting(search_rate_limit: 1)
+ end
+
+ it 'allows user whose username is in the allowlist' do
+ stub_application_setting(search_rate_limit_allowlist: [user.username])
+
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
describe "GET /groups/:id/search" do
@@ -658,6 +673,21 @@ RSpec.describe API::Search, :clean_gitlab_redis_rate_limiting, feature_category:
get api(endpoint, current_user), params: { scope: 'users', search: 'foo@bar.com' }
end
end
+
+ context 'when request exceeds the rate limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ before do
+ stub_application_setting(search_rate_limit: 1)
+ end
+
+ it 'allows user whose username is in the allowlist' do
+ stub_application_setting(search_rate_limit_allowlist: [user.username])
+
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
end
@@ -1057,6 +1087,21 @@ RSpec.describe API::Search, :clean_gitlab_redis_rate_limiting, feature_category:
get api(endpoint, current_user), params: { scope: 'users', search: 'foo@bar.com' }
end
end
+
+ context 'when request exceeds the rate limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ before do
+ stub_application_setting(search_rate_limit: 1)
+ end
+
+ it 'allows user whose username is in the allowlist' do
+ stub_application_setting(search_rate_limit_allowlist: [user.username])
+
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+ get api(endpoint, user), params: { scope: 'users', search: 'foo@bar.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 12af1fc1b79..ad52076523c 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['secret_detection_token_revocation_url']).to be_nil
expect(json_response['secret_detection_revocation_token_types_url']).to be_nil
expect(json_response['sourcegraph_public_only']).to be_truthy
+ expect(json_response['decompress_archive_file_timeout']).to eq(210)
expect(json_response['default_preferred_language']).to be_a String
expect(json_response['default_project_visibility']).to be_a String
expect(json_response['default_snippet_visibility']).to be_a String
@@ -153,6 +154,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
enforce_terms: true,
terms: 'Hello world!',
performance_bar_allowed_group_path: group.full_path,
+ decompress_archive_file_timeout: 60,
diff_max_patch_bytes: 300_000,
diff_max_files: 2000,
diff_max_lines: 50000,
@@ -234,6 +236,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['enforce_terms']).to be(true)
expect(json_response['terms']).to eq('Hello world!')
expect(json_response['performance_bar_allowed_group_id']).to eq(group.id)
+ expect(json_response['decompress_archive_file_timeout']).to eq(60)
expect(json_response['diff_max_patch_bytes']).to eq(300_000)
expect(json_response['diff_max_files']).to eq(2000)
expect(json_response['diff_max_lines']).to eq(50000)
@@ -851,7 +854,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
sentry_enabled: true,
sentry_dsn: 'http://sentry.example.com',
sentry_clientside_dsn: 'http://sentry.example.com',
- sentry_environment: 'production'
+ sentry_environment: 'production',
+ sentry_clientside_traces_sample_rate: 0.25
}
end
diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb
index 584b0f31a07..fdd186439a6 100644
--- a/spec/requests/api/usage_data_queries_spec.rb
+++ b/spec/requests/api/usage_data_queries_spec.rb
@@ -6,8 +6,8 @@ require 'rake_helper'
RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :service_ping do
include UsageDataHelpers
- let_it_be(:admin) { create(:user, admin: true) }
- let_it_be(:user) { create(:user) }
+ let!(:admin) { create(:user, admin: true) }
+ let!(:user) { create(:user) }
before do
stub_usage_data_connections
@@ -70,7 +70,7 @@ RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :se
end
end
- context 'when querying sql metrics' do
+ context 'when querying sql metrics', type: :task do
let(:file) { Rails.root.join('tmp', 'test', 'sql_metrics_queries.json') }
before do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 81881532240..5973649a9d7 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile do
include WorkhorseHelpers
+ include KeysetPaginationHelpers
let_it_be(:admin) { create(:admin) }
let_it_be(:user, reload: true) { create(:user, username: 'user.withdot') }
@@ -258,6 +259,48 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end.not_to exceed_all_query_limit(control_count)
end
end
+
+ context 'when api_keyset_pagination_multi_order FF is enabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: true)
+ end
+
+ it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
+ let(:first_record) { user }
+ let(:second_record) { admin }
+ let(:api_call) { api(path, user) }
+ end
+
+ it 'still supports offset pagination when keyset pagination params are not provided' do
+ get api(path, user)
+
+ expect(response).to include_pagination_headers
+ end
+ end
+
+ context 'when api_keyset_pagination_multi_order FF is disabled' do
+ before do
+ stub_feature_flags(api_keyset_pagination_multi_order: false)
+ end
+
+ it 'paginates the records correctly using offset pagination' do
+ get api(path, user), params: { pagination: 'keyset', per_page: 1 }
+
+ params_for_next_page = pagination_params_from_next_url(response)
+ expect(response).to include_pagination_headers
+ expect(params_for_next_page).not_to include('cursor')
+ end
+
+ context 'on making requests with unsupported ordering structure' do
+ it 'does not return error' do
+ get api(path, user),
+ params: { pagination: 'keyset', per_page: 1, order_by: 'created_at', sort: 'asc' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ end
+ end
+ end
end
end
@@ -494,7 +537,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
context "when admin" do
context 'exclude_internal param' do
- let_it_be(:internal_user) { User.alert_bot }
+ let_it_be(:internal_user) { Users::Internal.alert_bot }
it 'returns all users when it is not set' do
get api("/users?exclude_internal=false", admin)
@@ -3602,7 +3645,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
context 'for an internal user' do
- let(:user) { User.alert_bot }
+ let(:user) { Users::Internal.alert_bot }
it 'returns 403' do
deactivate
diff --git a/spec/requests/clusters/agents/dashboard_controller_spec.rb b/spec/requests/clusters/agents/dashboard_controller_spec.rb
new file mode 100644
index 00000000000..c3c16d9b385
--- /dev/null
+++ b/spec/requests/clusters/agents/dashboard_controller_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::DashboardController, feature_category: :deployment_management do
+ describe 'GET show' do
+ let_it_be(:organization) { create(:group) }
+ let_it_be(:agent_management_project) { create(:project, group: organization) }
+ let_it_be(:agent) { create(:cluster_agent, project: agent_management_project) }
+ let_it_be(:deployment_project) { create(:project, group: organization) }
+ let(:user) { create(:user) }
+ let(:stub_ff) { true }
+
+ before do
+ allow(::Gitlab::Kas).to receive(:enabled?).and_return(true)
+ end
+
+ context 'with authorized user' do
+ let!(:authorization) do
+ create(
+ :agent_user_access_project_authorization,
+ agent: agent,
+ project: deployment_project
+ )
+ end
+
+ before do
+ stub_feature_flags(k8s_dashboard: stub_ff)
+ deployment_project.add_member(user, :developer)
+ sign_in(user)
+ get kubernetes_dashboard_path(agent.id)
+ end
+
+ it 'sets the kas cookie' do
+ expect(
+ request.env['action_dispatch.cookies'][Gitlab::Kas::COOKIE_KEY]
+ ).to be_present
+ end
+
+ it 'returns not found' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'with k8s_dashboard feature flag disabled' do
+ let(:stub_ff) { false }
+
+ it 'does not set the kas cookie' do
+ expect(
+ request.env['action_dispatch.cookies'][Gitlab::Kas::COOKIE_KEY]
+ ).not_to be_present
+ end
+
+ it 'returns not found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with unauthorized user' do
+ before do
+ sign_in(user)
+ get kubernetes_dashboard_path(agent.id)
+ end
+
+ it 'does not set the kas cookie' do
+ expect(
+ request.env['action_dispatch.cookies'][Gitlab::Kas::COOKIE_KEY]
+ ).not_to be_present
+ end
+
+ it 'returns not found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/content_security_policy_spec.rb b/spec/requests/content_security_policy_spec.rb
index 3f0665f1ce5..3ce7e33d88a 100644
--- a/spec/requests/content_security_policy_spec.rb
+++ b/spec/requests/content_security_policy_spec.rb
@@ -7,6 +7,7 @@ require 'spec_helper'
# of testing in application_controller_spec.
RSpec.describe 'Content Security Policy', feature_category: :application_instrumentation do
let(:snowplow_host) { 'snowplow.example.com' }
+ let(:vite_origin) { "#{ViteRuby.instance.config.host}:#{ViteRuby.instance.config.port}" }
shared_examples 'snowplow is not in the CSP' do
it 'does not add the snowplow collector hostname to the CSP' do
@@ -46,5 +47,33 @@ RSpec.describe 'Content Security Policy', feature_category: :application_instrum
it_behaves_like 'snowplow is not in the CSP'
end
+
+ context 'when vite enabled during development',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
+ before do
+ stub_rails_env('development')
+ stub_feature_flags(vite: true)
+
+ get explore_root_url
+ end
+
+ it 'adds vite csp' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).to include(vite_origin)
+ end
+ end
+
+ context 'when vite disabled' do
+ before do
+ stub_feature_flags(vite: false)
+
+ get explore_root_url
+ end
+
+ it "doesn't add vite csp" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).not_to include(vite_origin)
+ end
+ end
end
end
diff --git a/spec/requests/groups/email_campaigns_controller_spec.rb b/spec/requests/groups/email_campaigns_controller_spec.rb
deleted file mode 100644
index b6e765eba37..00000000000
--- a/spec/requests/groups/email_campaigns_controller_spec.rb
+++ /dev/null
@@ -1,127 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Groups::EmailCampaignsController, feature_category: :navigation do
- using RSpec::Parameterized::TableSyntax
-
- describe 'GET #index', :snowplow do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:user) { create(:user) }
-
- let(:track) { 'create' }
- let(:series) { '0' }
- let(:schema) { described_class::EMAIL_CAMPAIGNS_SCHEMA_URL }
- let(:subject_line_text) { Gitlab::Email::Message::InProductMarketing.for(track.to_sym).new(group: group, user: user, series: series.to_i).subject_line }
- let(:data) do
- {
- namespace_id: group.id,
- track: track.to_sym,
- series: series.to_i,
- subject_line: subject_line_text
- }
- end
-
- before do
- sign_in(user)
- group.add_developer(user)
- end
-
- subject do
- get group_email_campaigns_url(group, track: track, series: series)
- response
- end
-
- shared_examples 'track and redirect' do
- it 'redirects' do
- expect(subject).to have_gitlab_http_status(:redirect)
- end
-
- context 'on SaaS', :saas do
- it 'emits a snowplow event', :snowplow do
- subject
-
- expect_snowplow_event(
- category: described_class.name,
- action: 'click',
- context: [{
- schema: described_class::EMAIL_CAMPAIGNS_SCHEMA_URL,
- data: { namespace_id: group.id, series: series.to_i, subject_line: subject_line_text, track: track.to_s }
- }],
- user: user,
- namespace: group
- )
- end
-
- it 'does not save the cta_click' do
- expect(Users::InProductMarketingEmail).not_to receive(:save_cta_click)
-
- subject
- end
- end
-
- context 'when not on.com' do
- it 'saves the cta_click' do
- expect(Users::InProductMarketingEmail).to receive(:save_cta_click)
-
- subject
- end
-
- it 'does not track snowplow events' do
- subject
-
- expect_no_snowplow_event
- end
- end
- end
-
- shared_examples 'no track and 404' do
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
-
- it 'does not emit a snowplow event', :snowplow do
- subject
-
- expect_no_snowplow_event
- end
- end
-
- describe 'track parameter' do
- context 'when valid' do
- where(track: Namespaces::InProductMarketingEmailsService::TRACKS.keys.without(:experience))
-
- with_them do
- it_behaves_like 'track and redirect'
- end
- end
-
- context 'when invalid' do
- where(track: [nil, 'xxxx'])
-
- with_them do
- it_behaves_like 'no track and 404'
- end
- end
- end
-
- describe 'series parameter' do
- context 'when valid' do
- where(series: (0..Namespaces::InProductMarketingEmailsService::TRACKS[:create][:interval_days].length - 1).to_a)
-
- with_them do
- it_behaves_like 'track and redirect'
- end
- end
-
- context 'when invalid' do
- where(series: [-1, nil, Namespaces::InProductMarketingEmailsService::TRACKS[:create][:interval_days].length])
-
- with_them do
- it_behaves_like 'no track and 404'
- end
- end
- end
- end
-end
diff --git a/spec/requests/groups/settings/access_tokens_controller_spec.rb b/spec/requests/groups/settings/access_tokens_controller_spec.rb
index 0204af8ea8e..8d386d8c1b7 100644
--- a/spec/requests/groups/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/groups/settings/access_tokens_controller_spec.rb
@@ -112,5 +112,27 @@ RSpec.describe Groups::Settings::AccessTokensController, feature_category: :syst
expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes)).to include(Gitlab::Auth::K8S_PROXY_SCOPE)
+ end
+
+ context 'with feature flag k8s_proxy_pat disabled' do
+ before do
+ stub_feature_flags(k8s_proxy_pat: false)
+ get group_settings_access_tokens_path(resource)
+ end
+
+ it 'includes details of the active group access tokens' do
+ active_access_tokens =
+ ::GroupAccessTokenSerializer.new.represent(resource_access_tokens.reverse, group: resource)
+
+ expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
+ end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes)).not_to include(Gitlab::Auth::K8S_PROXY_SCOPE)
+ end
+ end
end
end
diff --git a/spec/requests/groups/work_items_controller_spec.rb b/spec/requests/groups/work_items_controller_spec.rb
index c47b3f03ec1..e5dd88a5471 100644
--- a/spec/requests/groups/work_items_controller_spec.rb
+++ b/spec/requests/groups/work_items_controller_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Group Level Work Items', feature_category: :team_planning do
let_it_be(:group) { create(:group, :private) }
- let_it_be(:project) { create(:project, group: group) }
let_it_be(:developer) { create(:user).tap { |u| group.add_developer(u) } }
describe 'GET /groups/:group/-/work_items' do
@@ -46,4 +45,47 @@ RSpec.describe 'Group Level Work Items', feature_category: :team_planning do
end
end
end
+
+ describe 'GET /groups/:group/-/work_items/:iid' do
+ let_it_be(:work_item) { create(:work_item, :group_level, namespace: group) }
+ let(:work_items_path) do
+ url_for(controller: 'groups/work_items', action: :show, group_id: group.full_path, iid: work_item.iid)
+ end
+
+ before do
+ sign_in(current_user)
+ end
+
+ context 'when the user can read the group' do
+ let(:current_user) { developer }
+
+ it 'renders index' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when the namespace_level_work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(namespace_level_work_items: false)
+ end
+
+ it 'returns not found' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when the user cannot read the group' do
+ let(:current_user) { create(:user) }
+
+ it 'returns not found' do
+ get work_items_path
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index 217241200ff..6573fe570db 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -273,7 +273,7 @@ RSpec.describe 'OpenID Connect requests', feature_category: :system_access do
let(:expected_scopes) do
%w[
admin_mode api read_user read_api read_repository write_repository sudo openid profile email
- read_observability write_observability create_runner
+ read_observability write_observability create_runner k8s_proxy ai_features
]
end
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
index 788d740504a..953adb2cbf6 100644
--- a/spec/requests/organizations/organizations_controller_spec.rb
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -13,25 +13,30 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
end
end
- shared_examples 'action disabled by `ui_for_organizations` feature flag' do
- before do
- stub_feature_flags(ui_for_organizations: false)
- end
-
- it 'renders 404' do
+ shared_examples 'redirects to sign in page' do
+ it 'redirects to sign in page' do
gitlab_request
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to redirect_to(new_user_session_path)
end
end
- shared_examples 'basic organization controller action' do
- context 'when the user is not logged in' do
- it_behaves_like 'successful response'
- it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ shared_examples 'action disabled by `ui_for_organizations` feature flag' do
+ context 'when `ui_for_organizations` feature flag is disabled' do
+ before do
+ stub_feature_flags(ui_for_organizations: false)
+ end
+
+ it 'renders 404' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
+ end
- context 'when the user is logged in' do
+ shared_examples 'when the user is signed in' do
+ context 'when the user is signed in' do
before do
sign_in(user)
end
@@ -63,15 +68,52 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
end
end
+ shared_examples 'controller action that requires authentication' do
+ context 'when the user is not signed in' do
+ it_behaves_like 'redirects to sign in page'
+
+ context 'when `ui_for_organizations` feature flag is disabled' do
+ before do
+ stub_feature_flags(ui_for_organizations: false)
+ end
+
+ it_behaves_like 'redirects to sign in page'
+ end
+ end
+
+ it_behaves_like 'when the user is signed in'
+ end
+
+ shared_examples 'controller action that does not require authentication' do
+ context 'when the user is not logged in' do
+ it_behaves_like 'successful response'
+ it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
+ end
+
+ it_behaves_like 'when the user is signed in'
+ end
+
describe 'GET #show' do
subject(:gitlab_request) { get organization_path(organization) }
- it_behaves_like 'basic organization controller action'
+ it_behaves_like 'controller action that does not require authentication'
end
describe 'GET #groups_and_projects' do
subject(:gitlab_request) { get groups_and_projects_organization_path(organization) }
- it_behaves_like 'basic organization controller action'
+ it_behaves_like 'controller action that does not require authentication'
+ end
+
+ describe 'GET #new' do
+ subject(:gitlab_request) { get new_organization_path }
+
+ it_behaves_like 'controller action that requires authentication'
+ end
+
+ describe 'GET #index' do
+ subject(:gitlab_request) { get organizations_path }
+
+ it_behaves_like 'controller action that requires authentication'
end
end
diff --git a/spec/requests/projects/noteable_notes_spec.rb b/spec/requests/projects/noteable_notes_spec.rb
deleted file mode 100644
index a490e059680..00000000000
--- a/spec/requests/projects/noteable_notes_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Project noteable notes', feature_category: :team_planning do
- describe '#index' do
- let_it_be(:merge_request) { create(:merge_request) }
-
- let(:etag_store) { Gitlab::EtagCaching::Store.new }
- let(:notes_path) { project_noteable_notes_path(project, target_type: merge_request.class.name.underscore, target_id: merge_request.id) }
- let(:project) { merge_request.project }
- let(:user) { project.first_owner }
-
- let(:response_etag) { response.headers['ETag'] }
- let(:stored_etag) { "W/\"#{etag_store.get(notes_path)}\"" }
-
- let(:default_headers) { { 'X-Last-Fetched-At' => 0 } }
-
- before do
- login_as(user)
- end
-
- it 'does not set a Gitlab::EtagCaching ETag if there is a note' do
- create(:note_on_merge_request, noteable: merge_request, project: merge_request.project)
-
- get notes_path, headers: default_headers
-
- expect(response).to have_gitlab_http_status(:ok)
-
- # Rack::ETag will set an etag based on the body digest, but that doesn't
- # interfere with notes pagination
- expect(response_etag).not_to eq(stored_etag)
- end
-
- it 'sets a Gitlab::EtagCaching ETag if there is no note' do
- get notes_path, headers: default_headers
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response_etag).to eq(stored_etag)
- end
-
- it "instruments cache hits correctly" do
- etag_store.touch(notes_path)
-
- expect(Gitlab::Metrics::RailsSlis.request_apdex).to(
- receive(:increment).with(
- labels: {
- request_urgency: :medium,
- feature_category: "team_planning",
- endpoint_id: "Projects::NotesController#index"
- },
- success: be_in([true, false])
- )
- )
- allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
-
- expect(ActiveSupport::Notifications).to(
- receive(:instrument).with(
- 'process_action.action_controller',
- a_hash_including(
- {
- request_urgency: :medium,
- target_duration_s: 0.5,
- metadata: a_hash_including({
- 'meta.feature_category' => 'team_planning',
- 'meta.caller_id' => "Projects::NotesController#index"
- })
- }
- )
- )
- )
-
- get notes_path, headers: default_headers.merge("if-none-match": stored_etag)
-
- expect(response).to have_gitlab_http_status(:not_modified)
- end
- end
-end
diff --git a/spec/requests/projects/settings/access_tokens_controller_spec.rb b/spec/requests/projects/settings/access_tokens_controller_spec.rb
index 666dc42bcab..b4cfa964ac8 100644
--- a/spec/requests/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/projects/settings/access_tokens_controller_spec.rb
@@ -113,5 +113,27 @@ RSpec.describe Projects::Settings::AccessTokensController, feature_category: :sy
expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes)).to include(Gitlab::Auth::K8S_PROXY_SCOPE)
+ end
+
+ context 'with feature flag k8s_proxy_pat disabled' do
+ before do
+ stub_feature_flags(k8s_proxy_pat: false)
+ get project_settings_access_tokens_path(resource)
+ end
+
+ it 'includes details of the active project access tokens' do
+ active_access_tokens =
+ ::ProjectAccessTokenSerializer.new.represent(resource_access_tokens.reverse, project: resource)
+
+ expect(assigns(:active_access_tokens).to_json).to eq(active_access_tokens.to_json)
+ end
+
+ it 'sets available scopes' do
+ expect(assigns(:scopes)).not_to include(Gitlab::Auth::K8S_PROXY_SCOPE)
+ end
+ end
end
end
diff --git a/spec/requests/projects/tracing_controller_spec.rb b/spec/requests/projects/tracing_controller_spec.rb
deleted file mode 100644
index 8996ea7f8d6..00000000000
--- a/spec/requests/projects/tracing_controller_spec.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::TracingController, feature_category: :tracing do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:user) { create(:user) }
- let(:path) { nil }
- let(:observability_tracing_ff) { true }
-
- subject do
- get path
- response
- end
-
- before do
- stub_feature_flags(observability_tracing: observability_tracing_ff)
- sign_in(user)
- end
-
- shared_examples 'tracing route request' do
- it_behaves_like 'observability csp policy' do
- before_all do
- project.add_developer(user)
- end
-
- let(:tested_path) { path }
- end
-
- context 'when user does not have permissions' do
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when user has permissions' do
- before_all do
- project.add_developer(user)
- end
-
- it 'returns 200' do
- expect(subject).to have_gitlab_http_status(:ok)
- end
-
- context 'when feature is disabled' do
- let(:observability_tracing_ff) { false }
-
- it 'returns 404' do
- expect(subject).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-
- describe 'GET #index' do
- let(:path) { project_tracing_index_path(project) }
-
- it_behaves_like 'tracing route request'
-
- describe 'html response' do
- before_all do
- project.add_developer(user)
- end
-
- it 'renders the js-tracing element correctly' do
- element = Nokogiri::HTML.parse(subject.body).at_css('#js-tracing')
-
- expected_view_model = {
- tracingUrl: Gitlab::Observability.tracing_url(project),
- provisioningUrl: Gitlab::Observability.provisioning_url(project),
- oauthUrl: Gitlab::Observability.oauth_url
- }.to_json
- expect(element.attributes['data-view-model'].value).to eq(expected_view_model)
- end
- end
- end
-
- describe 'GET #show' do
- let(:path) { project_tracing_path(project, id: "test-trace-id") }
-
- it_behaves_like 'tracing route request'
-
- describe 'html response' do
- before_all do
- project.add_developer(user)
- end
-
- it 'renders the js-tracing element correctly' do
- element = Nokogiri::HTML.parse(subject.body).at_css('#js-tracing-details')
-
- expected_view_model = {
- tracingIndexUrl: project_tracing_index_path(project),
- traceId: 'test-trace-id',
- tracingUrl: Gitlab::Observability.tracing_url(project),
- provisioningUrl: Gitlab::Observability.provisioning_url(project),
- oauthUrl: Gitlab::Observability.oauth_url
- }.to_json
-
- expect(element.attributes['data-view-model'].value).to eq(expected_view_model)
- end
- end
- end
-end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 0dd8a15c3a4..3f5cd24f3dd 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -320,6 +320,120 @@ feature_category: :system_access do
end
end
+ describe 'protected paths for get' do
+ let(:request_method) { 'GET' }
+
+ context 'unauthenticated requests' do
+ let(:protected_path_for_get_request_that_does_not_require_authentication) do
+ '/users/sign_in'
+ end
+
+ def do_request
+ get protected_path_for_get_request_that_does_not_require_authentication
+ end
+
+ before do
+ settings_to_set[:throttle_protected_paths_requests_per_period] = requests_per_period # 1
+ settings_to_set[:throttle_protected_paths_period_in_seconds] = period_in_seconds # 10_000
+ settings_to_set[:protected_paths_for_get_request] = %w[/users/sign_in]
+ end
+
+ context 'when protected paths throttle is disabled' do
+ before do
+ settings_to_set[:throttle_protected_paths_enabled] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'when protected paths throttle is enabled' do
+ before do
+ settings_to_set[:throttle_protected_paths_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { get protected_path_for_get_request_that_does_not_require_authentication }
+ end
+
+ it 'allows GET requests to unprotected paths over the rate limit' do
+ (1 + requests_per_period).times do
+ get '/api/graphql'
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ it_behaves_like 'tracking when dry-run mode is set' do
+ let(:throttle_name) { 'throttle_unauthenticated_get_protected_paths' }
+ end
+ end
+ end
+
+ context 'API requests authenticated with personal access token', :api do
+ let(:user) { create(:user) }
+ let(:token) { create(:personal_access_token, user: user) }
+ let(:other_user) { create(:user) }
+ let(:other_user_token) { create(:personal_access_token, user: other_user) }
+ let(:throttle_setting_prefix) { 'throttle_protected_paths' }
+ let(:api_partial_url) { '/user/emails' }
+
+ let(:protected_paths_for_get_request) do
+ [
+ '/api/v4/user/emails'
+ ]
+ end
+
+ before do
+ settings_to_set[:protected_paths_for_get_request] = protected_paths_for_get_request
+ stub_application_setting(settings_to_set)
+ end
+
+ context 'with the token in the query string' do
+ let(:request_args) { [api(api_partial_url, personal_access_token: token), {}] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token), {}] }
+
+ it_behaves_like 'rate-limited user based token-authenticated requests'
+ end
+
+ context 'with the token in the headers' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited user based token-authenticated requests'
+ end
+ end
+
+ describe 'web requests authenticated with regular login' do
+ let(:throttle_setting_prefix) { 'throttle_protected_paths' }
+ let(:user) { create(:user) }
+ let(:url_that_requires_authentication) { '/users/confirmation' }
+
+ let(:protected_paths_for_get_request) do
+ [
+ url_that_requires_authentication
+ ]
+ end
+
+ before do
+ settings_to_set[:protected_paths_for_get_request] = protected_paths_for_get_request
+ stub_application_setting(settings_to_set)
+ end
+
+ it_behaves_like 'rate-limited web authenticated requests'
+ end
+ end
+
describe 'Packages API' do
let(:request_method) { 'GET' }
diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb
index 365b20ad4aa..37474aee1ee 100644
--- a/spec/requests/search_controller_spec.rb
+++ b/spec/requests/search_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
let_it_be(:projects) { create_list(:project, 5, :public, :repository, :wiki_repo) }
before do
+ stub_feature_flags(super_sidebar_nav_enrolled: false)
login_as(user)
end
diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb
index 8e069427678..3428e607305 100644
--- a/spec/requests/sessions_spec.rb
+++ b/spec/requests/sessions_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'Sessions', feature_category: :system_access do
include SessionHelpers
- context 'authentication', :allow_forgery_protection do
- let(:user) { create(:user) }
+ let(:user) { create(:user) }
+ context 'for authentication', :allow_forgery_protection do
it 'logout does not require a csrf token' do
login_as(user)
@@ -17,29 +17,36 @@ RSpec.describe 'Sessions', feature_category: :system_access do
end
end
- describe 'about_gitlab_active_user' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- end
-
- let(:user) { create(:user) }
+ describe 'gitlab_user cookie', :saas do
+ let_it_be(:user) { create(:user) }
context 'when user signs in' do
it 'sets marketing cookie' do
post user_session_path(user: { login: user.username, password: user.password })
- expect(response.cookies['about_gitlab_active_user']).to be_present
+ expect(response.cookies['gitlab_user']).to be_present
end
end
context 'when user uses remember_me' do
it 'sets marketing cookie' do
post user_session_path(user: { login: user.username, password: user.password, remember_me: true })
- expect(response.cookies['about_gitlab_active_user']).to be_present
+ expect(response.cookies['gitlab_user']).to be_present
+ end
+ end
+
+ context 'when user has pending invitations' do
+ it 'accepts the invitations and stores a user location' do
+ create(:group_member, :invited, invite_email: user.email)
+ member = create(:group_member, :invited, invite_email: user.email)
+
+ post user_session_path(user: { login: user.username, password: user.password })
+
+ expect(response).to redirect_to(activity_group_path(member.source))
end
end
context 'when using two-factor authentication via OTP' do
- let(:user) { create(:user, :two_factor, :invalid) }
+ let_it_be(:user) { create(:user, :two_factor, :invalid) }
let(:user_params) { { login: user.username, password: user.password } }
def authenticate_2fa(otp_attempt:)
@@ -67,17 +74,6 @@ RSpec.describe 'Sessions', feature_category: :system_access do
end
end
- context 'when user signs out' do
- before do
- post user_session_path(user: { login: user.username, password: user.password })
- end
-
- it 'deletes marketing cookie' do
- post(destroy_user_session_path)
- expect(response.cookies['about_gitlab_active_user']).to be_nil
- end
- end
-
context 'when user is not using GitLab SaaS' do
before do
allow(::Gitlab).to receive(:com?).and_return(false)
@@ -85,7 +81,7 @@ RSpec.describe 'Sessions', feature_category: :system_access do
it 'does not set marketing cookie' do
post user_session_path(user: { login: user.username, password: user.password })
- expect(response.cookies['about_gitlab_active_user']).to be_nil
+ expect(response.cookies['gitlab_user']).to be_nil
end
end
end
diff --git a/spec/requests/users/namespace_visits_controller_spec.rb b/spec/requests/users/namespace_visits_controller_spec.rb
new file mode 100644
index 00000000000..eeeffcce67d
--- /dev/null
+++ b/spec/requests/users/namespace_visits_controller_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::NamespaceVisitsController, type: :request, feature_category: :navigation do
+ describe "POST /" do
+ let_it_be(:path) { track_namespace_visits_path }
+ let_it_be(:request_params) { nil }
+
+ subject(:request) { post path, params: request_params }
+
+ context "when user is not signed-in" do
+ it 'throws an error 302' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
+ context "when user is signed-in" do
+ let_it_be(:user) { create(:user) }
+ let(:server_side_frecent_namespaces) { true }
+
+ before do
+ stub_feature_flags(server_side_frecent_namespaces: server_side_frecent_namespaces)
+ sign_in(user)
+ end
+
+ context "when the server_side_frecent_namespaces feature flag is disabled" do
+ let(:server_side_frecent_namespaces) { false }
+
+ it 'throws an error 302' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when entity type is not provided" do
+ let_it_be(:request_params) { { id: '1' } }
+
+ it 'responds with a code 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "when entity ID is not provided" do
+ let_it_be(:request_params) { { type: 'projects' } }
+
+ it 'responds with a code 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "when entity type and ID are provided" do
+ let_it_be(:request_params) { { type: 'projects', id: 1 } }
+
+ it 'calls the worker and responds with a code 200' do
+ expect(Users::TrackNamespaceVisitsWorker).to receive(:perform_async)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/verifies_with_email_spec.rb b/spec/requests/verifies_with_email_spec.rb
index cc85ebc7ade..c8a0c0975a3 100644
--- a/spec/requests/verifies_with_email_spec.rb
+++ b/spec/requests/verifies_with_email_spec.rb
@@ -171,8 +171,8 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
it 'adds a verification error message' do
expect(json_response)
- .to include('message' => (s_('IdentityVerification|The code is incorrect. '\
- 'Enter it again, or send a new code.')))
+ .to include('message' => s_('IdentityVerification|The code is incorrect. '\
+ 'Enter it again, or send a new code.'))
end
end
@@ -184,7 +184,7 @@ RSpec.describe 'VerifiesWithEmail', :clean_gitlab_redis_sessions, :clean_gitlab_
it 'adds a verification error message' do
expect(json_response)
- .to include('message' => (s_('IdentityVerification|The code has expired. Send a new code and try again.')))
+ .to include('message' => s_('IdentityVerification|The code has expired. Send a new code and try again.'))
end
end
diff --git a/spec/routing/organizations/organizations_controller_routing_spec.rb b/spec/routing/organizations/organizations_controller_routing_spec.rb
index 2b43f6d3afa..187553df2a1 100644
--- a/spec/routing/organizations/organizations_controller_routing_spec.rb
+++ b/spec/routing/organizations/organizations_controller_routing_spec.rb
@@ -10,6 +10,16 @@ RSpec.describe Organizations::OrganizationsController, :routing, feature_categor
.to route_to('organizations/organizations#show', organization_path: organization.path)
end
+ it 'routes to #new' do
+ expect(get("/-/organizations/new"))
+ .to route_to('organizations/organizations#new')
+ end
+
+ it 'routes to #index' do
+ expect(get("/-/organizations"))
+ .to route_to('organizations/organizations#index')
+ end
+
it 'routes to #groups_and_projects' do
expect(get("/-/organizations/#{organization.path}/groups_and_projects"))
.to route_to('organizations/organizations#groups_and_projects', organization_path: organization.path)
diff --git a/spec/rubocop/cop/capybara/testid_finders_spec.rb b/spec/rubocop/cop/capybara/testid_finders_spec.rb
new file mode 100644
index 00000000000..daedf5b8481
--- /dev/null
+++ b/spec/rubocop/cop/capybara/testid_finders_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require 'rspec-parameterized'
+require_relative '../../../../rubocop/cop/capybara/testid_finders'
+
+RSpec.describe RuboCop::Cop::Capybara::TestidFinders, feature_category: :shared do
+ let(:source_file) { 'spec/features/foo_spec.rb' }
+
+ describe 'good examples' do
+ where(:code) do
+ [
+ "find_by_testid('some-testid')",
+ "find_by_testid('\#{testid}')",
+ "find('[data-testid=\"some-testid\"] > input')",
+ "find('[data-tracking=\"render\"]')",
+ "within_testid('some-testid')",
+ "within_testid('\#{testid}')",
+ "within('[data-testid=\"some-testid\"] > input')",
+ "within('[data-tracking=\"render\"]')"
+ ]
+ end
+
+ with_them do
+ it 'does not register an offense' do
+ expect_no_offenses(code)
+ end
+ end
+ end
+
+ describe 'bad examples' do
+ where(:code) do
+ [
+ "find('[data-testid=\"some-testid\"]')",
+ "find(\"[data-testid='some-testid']\")",
+ "within('[data-testid=\"some-testid\"]')",
+ "within(\"[data-testid='some-testid']\")"
+ ]
+ end
+
+ with_them do
+ it 'does not register an offense' do
+ expect_offense(<<~CODE, node: code)
+ %{node}
+ ^{node} Prefer to use custom helper method[...]
+ CODE
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
deleted file mode 100644
index edd54a40b79..00000000000
--- a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
+++ /dev/null
@@ -1,168 +0,0 @@
-# frozen_string_literal: true
-
-require 'rubocop_spec_helper'
-require_relative '../../../../rubocop/cop/lint/last_keyword_argument'
-
-RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, :ruby27, feature_category: :shared do
- before do
- described_class.instance_variable_set(:@keyword_warnings, nil)
- allow(Dir).to receive(:glob).and_call_original
- allow(File).to receive(:read).and_call_original
- end
-
- context 'deprecation files does not exist' do
- before do
- allow(Dir).to receive(:glob).with(described_class::DEPRECATIONS_GLOB).and_return([])
- end
-
- it 'does not register an offense' do
- expect_no_offenses(<<~SOURCE)
- users.call(params)
- SOURCE
- end
- end
-
- context 'deprecation files does exist' do
- let(:create_spec_yaml) do
- <<~YAML
- ---
- test_mutations/boards/lists/create#resolve_with_proper_permissions_backlog_list_creates_one_and_only_one_backlog:
- - |
- DEPRECATION WARNING: /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/batch-loader-1.4.0/lib/batch_loader/graphql.rb:38: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
- /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/batch-loader-1.4.0/lib/batch_loader.rb:26: warning: The called method `batch' is defined here
- test_mutations/boards/lists/create#ready?_raises_an_error_if_required_arguments_are_missing:
- - |
- DEPRECATION WARNING: /Users/tkuah/code/ee-gdk/gitlab/create_service.rb:1: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
- /Users/tkuah/code/ee-gdk/gitlab/user.rb:17: warning: The called method `call' is defined here
- - |
- DEPRECATION WARNING: /Users/tkuah/code/ee-gdk/gitlab/other_warning_type.rb:1: warning: Some other warning type
- YAML
- end
-
- let(:projects_spec_yaml) do
- <<~YAML
- ---
- test_api/projects_get_/projects_when_unauthenticated_behaves_like_projects_response_returns_an_array_of_projects:
- - |
- DEPRECATION WARNING: /Users/tkuah/code/ee-gdk/gitlab/projects_spec.rb:1: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
- /Users/tkuah/code/ee-gdk/gitlab/lib/gitlab/project.rb:15: warning: The called method `initialize' is defined here
- - |
- DEPRECATION WARNING: /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/state_machines-activerecord-0.6.0/lib/state_machines/integrations/active_record.rb:511: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
- /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/activerecord-6.0.3.3/lib/active_record/suppressor.rb:43: warning: The called method `save' is defined here
- - |
- DEPRECATION WARNING: /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/rack-2.2.3/lib/rack/builder.rb:158: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
- /Users/tkuah/.rbenv/versions/2.7.2/lib/ruby/gems/2.7.0/gems/grape-1.4.0/lib/grape/middleware/error.rb:30: warning: The called method `initialize' is defined here
- YAML
- end
-
- before do
- allow(Dir).to receive(:glob).and_return(['deprecations/service/create_spec.yml', 'deprecations/api/projects_spec.yml'])
- allow(File).to receive(:read).with('deprecations/service/create_spec.yml').and_return(create_spec_yaml)
- allow(File).to receive(:read).with('deprecations/api/projects_spec.yml').and_return(projects_spec_yaml)
- end
-
- it 'registers an offense for last keyword warning' do
- expect_offense(<<~SOURCE, 'create_service.rb')
- users.call(params)
- ^^^^^^ Using the last argument as keyword parameters is deprecated
- SOURCE
-
- expect_correction(<<~SOURCE)
- users.call(**params)
- SOURCE
- end
-
- it 'does not register an offense for other warning types' do
- expect_no_offenses(<<~SOURCE, 'other_warning_type.rb')
- users.call(params)
- SOURCE
- end
-
- it 'registers an offense for the new method call' do
- expect_offense(<<~SOURCE, 'projects_spec.rb')
- Project.new(params)
- ^^^^^^ Using the last argument as keyword parameters is deprecated
- SOURCE
-
- expect_correction(<<~SOURCE)
- Project.new(**params)
- SOURCE
- end
-
- it 'registers an offense and corrects by converting hash to kwarg' do
- expect_offense(<<~SOURCE, 'create_service.rb')
- users.call(id, { a: :b, c: :d })
- ^^^^^^^^^^^^^^^^ Using the last argument as keyword parameters is deprecated
- SOURCE
-
- expect_correction(<<~SOURCE)
- users.call(id, a: :b, c: :d)
- SOURCE
- end
-
- it 'registers an offense on the last non-block argument' do
- expect_offense(<<~SOURCE, 'create_service.rb')
- users.call(id, params, &block)
- ^^^^^^ Using the last argument as keyword parameters is deprecated
- SOURCE
-
- expect_correction(<<~SOURCE)
- users.call(id, **params, &block)
- SOURCE
- end
-
- it 'does not register an offense if the only argument is a block argument' do
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.call(&block)
- SOURCE
- end
-
- it 'registers an offense and corrects by converting splat to double splat' do
- expect_offense(<<~SOURCE, 'create_service.rb')
- users.call(id, *params)
- ^^^^^^^ Using the last argument as keyword parameters is deprecated
- SOURCE
-
- expect_correction(<<~SOURCE)
- users.call(id, **params)
- SOURCE
- end
-
- it 'does not register an offense if already a kwarg', :aggregate_failures do
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.call(**params)
- SOURCE
-
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.call(id, a: :b, c: :d)
- SOURCE
- end
-
- it 'does not register an offense if the method name does not match' do
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.process(params)
- SOURCE
- end
-
- it 'does not register an offense if the line number does not match' do
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.process
- users.call(params)
- SOURCE
- end
-
- it 'does not register an offense if the filename does not match' do
- expect_no_offenses(<<~SOURCE, 'update_service.rb')
- users.call(params)
- SOURCE
- end
-
- context 'with Ruby 3.0', :ruby30 do
- it 'does not register an offense with known warning' do
- expect_no_offenses(<<~SOURCE, 'create_service.rb')
- users.call(params)
- SOURCE
- end
- end
- end
-end
diff --git a/spec/rubocop/cop/migration/versioned_migration_class_spec.rb b/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
index 332b02078f4..b92d9d21498 100644
--- a/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
+++ b/spec/rubocop/cop/migration/versioned_migration_class_spec.rb
@@ -82,6 +82,15 @@ RSpec.describe RuboCop::Cop::Migration::VersionedMigrationClass, feature_categor
end
RUBY
end
+
+ it 'excludes parentless classes defined inside the migration' do
+ expect_no_offenses(<<~RUBY)
+ class TestMigration < Gitlab::Database::Migration[2.1]
+ class TestClass
+ end
+ end
+ RUBY
+ end
end
end
end
diff --git a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
index 2747e1ec811..aa758e19dfa 100644
--- a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
+++ b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
@@ -235,7 +235,7 @@ RSpec.describe GenerateMessageToRunE2ePipeline, feature_category: :tooling do
Once done, apply the ✅ emoji on this comment.
- For any questions or help, reach out on the internal #quality Slack channel.
+ **Team members only:** for any questions or help, reach out on the internal `#quality` Slack channel.
<!-- Run e2e warning end -->
MARKDOWN
end
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index d3b520d385f..f46adb1a9f1 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -477,6 +477,18 @@ RSpec.describe Trigger, feature_category: :tooling do
end
end
+ describe "BRANCH_OPERATOR" do
+ before do
+ stub_env('CI_PROJECT_PATH', 'gitlab-org/cloud-native/gitlab-operator')
+ end
+
+ context 'when CI_PROJECT_PATH is gitlab-org/cloud-native/gitlab-operator' do
+ it 'sets BRANCH_OPERATOR to CI_COMMIT_REF_NAME' do
+ expect(subject.variables['BRANCH_OPERATOR']).to eq(env['CI_COMMIT_REF_NAME'])
+ end
+ end
+ end
+
describe "REVIEW_SLUG" do
before do
stub_env('CI_PROJECT_PATH', 'gitlab-org/gitlab-foss')
diff --git a/spec/serializers/activity_pub/activity_streams_serializer_spec.rb b/spec/serializers/activity_pub/activity_streams_serializer_spec.rb
new file mode 100644
index 00000000000..c74beba7a81
--- /dev/null
+++ b/spec/serializers/activity_pub/activity_streams_serializer_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ActivityStreamsSerializer, feature_category: :integrations do
+ let(:implementer_class) do
+ Class.new(described_class) do
+ include WithPagination
+ end
+ end
+
+ let(:entity_class) do
+ Class.new(Grape::Entity) do
+ expose :id do |*|
+ 'https://example.com/unique/url'
+ end
+
+ expose :type do |*|
+ 'Person'
+ end
+
+ expose :name do |*|
+ 'Alice'
+ end
+ end
+ end
+
+ shared_examples_for 'ActivityStreams document' do
+ it 'belongs to the ActivityStreams namespace' do
+ expect(subject['@context']).to eq 'https://www.w3.org/ns/activitystreams'
+ end
+
+ it 'has a unique identifier' do
+ expect(subject).to have_key 'id'
+ end
+
+ it 'has a type' do
+ expect(subject).to have_key 'type'
+ end
+ end
+
+ before do
+ implementer_class.entity entity_class
+ end
+
+ context 'when the serializer is not paginated' do
+ let(:resource) { build_stubbed(:release) }
+ let(:outbox_url) { 'https://example.com/unique/url/outbox' }
+
+ context 'with a valid represented entity' do
+ subject { implementer_class.new.represent(resource, outbox: outbox_url) }
+
+ it_behaves_like 'ActivityStreams document'
+
+ it 'exposes an outbox' do
+ expect(subject['outbox']).to eq 'https://example.com/unique/url/outbox'
+ end
+
+ it 'includes serialized data' do
+ expect(subject['name']).to eq 'Alice'
+ end
+ end
+
+ context 'when the represented entity provides no identifier' do
+ subject { implementer_class.new.represent(resource, outbox: outbox_url) }
+
+ before do
+ allow(entity_class).to receive(:represent).and_return({ type: 'Person' })
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingIdentifierError)
+ end
+ end
+
+ context 'when the represented entity provides no type' do
+ subject { implementer_class.new.represent(resource, outbox: outbox_url) }
+
+ before do
+ allow(entity_class).to receive(:represent).and_return({ id: 'https://example.com/unique/url' })
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingTypeError)
+ end
+ end
+
+ context 'when the caller provides no outbox parameter' do
+ subject { implementer_class.new.represent(resource) }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingOutboxError)
+ end
+ end
+ end
+
+ context 'when the serializer is paginated' do
+ let(:resources) { build_stubbed_list(:release, 3) }
+ let(:request) { ActionDispatch::Request.new(request_data) }
+ let(:response) { ActionDispatch::Response.new }
+ let(:url) { 'https://example.com/resource/url' }
+ let(:decorated) { implementer_class.new.with_pagination(request, response) }
+
+ before do
+ allow(resources).to receive(:page).and_return(resources)
+ allow(resources).to receive(:per).and_return(resources)
+ allow(resources).to receive(:current_page).and_return(2)
+ allow(resources).to receive(:total_pages).and_return(3)
+ allow(resources).to receive(:total_count).and_return(10)
+ allow(decorated.paginator).to receive(:paginate).and_return(resources)
+ end
+
+ context 'when no page parameter is provided' do
+ subject { decorated.represent(resources) }
+
+ let(:request_data) do
+ { "rack.url_scheme" => "https", "HTTP_HOST" => "example.com", "PATH_INFO" => '/resource/url' }
+ end
+
+ it_behaves_like 'ActivityStreams document'
+
+ it 'is an index document for the pagination' do
+ expect(subject['type']).to eq 'OrderedCollection'
+ end
+
+ it 'contains the total amount of items' do
+ expect(subject['totalItems']).to eq 10
+ end
+
+ it 'contains links to first and last page' do
+ expect(subject['first']).to eq "#{url}?page=1"
+ expect(subject['last']).to eq "#{url}?page=3"
+ end
+ end
+
+ context 'when a page parameter is provided' do
+ subject { decorated.represent(resources) }
+
+ let(:request_data) do
+ { 'rack.url_scheme' => 'https', 'HTTP_HOST' => 'example.com', 'PATH_INFO' => '/resource/url',
+ 'QUERY_STRING' => 'page=2&per_page=1' }
+ end
+
+ it_behaves_like 'ActivityStreams document'
+
+ it 'is a page document' do
+ expect(subject['type']).to eq 'OrderedCollectionPage'
+ end
+
+ it 'contains navigation links' do
+ expect(subject['prev']).to be_present
+ expect(subject['next']).to be_present
+ expect(subject['partOf']).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/project_entity_spec.rb b/spec/serializers/activity_pub/project_entity_spec.rb
new file mode 100644
index 00000000000..f273acace73
--- /dev/null
+++ b/spec/serializers/activity_pub/project_entity_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ProjectEntity, feature_category: :groups_and_projects do
+ let(:project) { build_stubbed(:project, name: 'Fooify', path: 'fooify') }
+ let(:entity) { described_class.new(project) }
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'has releases page as id' do
+ expect(subject[:id]).to match(%r{/fooify$})
+ end
+
+ it 'is an Application actor' do
+ expect(subject[:type]).to eq 'Application'
+ end
+
+ it 'provides project name' do
+ expect(subject[:name]).to eq project.name
+ end
+
+ it 'provides a description of the project' do
+ expect(subject[:summary]).to eq project.description
+ end
+
+ it 'provides an url for web content' do
+ expect(subject[:url]).to match(%r{/fooify$})
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/release_entity_spec.rb b/spec/serializers/activity_pub/release_entity_spec.rb
new file mode 100644
index 00000000000..a473fbcc2bd
--- /dev/null
+++ b/spec/serializers/activity_pub/release_entity_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ReleaseEntity, feature_category: :groups_and_projects do
+ let(:release) { build_stubbed(:release) }
+ let(:entity) { described_class.new(release, url: '/outbox') }
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'has tag as id' do
+ expect(subject[:id]).to match(/##{release.tag}$/)
+ end
+
+ it 'is a Create activity' do
+ expect(subject[:type]).to eq 'Create'
+ end
+
+ it 'is addressed to public' do
+ expect(subject[:to]).to eq 'https://www.w3.org/ns/activitystreams#Public'
+ end
+
+ it 'has an author' do
+ expect(subject[:actor]).to include(:id, :type, :name, :preferredUsername, :url)
+ end
+
+ it 'embeds the release as an Application actor' do
+ expect(subject[:object][:type]).to eq 'Application'
+ end
+
+ it 'provides release name' do
+ expect(subject[:object][:name]).to eq release.name
+ end
+
+ it 'provides release description' do
+ expect(subject[:object][:content]).to eq release.description
+ end
+
+ it 'provides an url for web content' do
+ expect(subject[:object][:url]).to include release.tag
+ end
+
+ it 'provides project data as context' do
+ expect(subject[:object][:context]).to include(:id, :type, :name, :summary, :url)
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/releases_actor_entity_spec.rb b/spec/serializers/activity_pub/releases_actor_entity_spec.rb
new file mode 100644
index 00000000000..fe388968867
--- /dev/null
+++ b/spec/serializers/activity_pub/releases_actor_entity_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ReleasesActorEntity, feature_category: :groups_and_projects do
+ let(:project) { build_stubbed(:project, name: 'Fooify', path: 'fooify') }
+ let(:releases) { build_stubbed_list(:release, 3, project: project) }
+
+ let(:entity) { described_class.new(project) }
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'has releases page as id' do
+ expect(subject[:id]).to include "/fooify/-/releases"
+ end
+
+ it 'is an Application actor' do
+ expect(subject[:type]).to eq 'Application'
+ end
+
+ it 'has a recognizable username' do
+ expect(subject[:preferredUsername]).to include 'releases'
+ end
+
+ it 'has a recognizable full name' do
+ expect(subject[:name]).to eq 'Releases - Fooify'
+ end
+
+ it 'provides a description of the project' do
+ expect(subject[:content]).to eq project.description
+ end
+
+ it 'provides project data as context' do
+ expect(subject[:context]).to include(:id, :type, :name, :summary, :url)
+ expect(subject[:context][:id]).to match(%r{/fooify$})
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/releases_actor_serializer_spec.rb b/spec/serializers/activity_pub/releases_actor_serializer_spec.rb
new file mode 100644
index 00000000000..bc754eabe5c
--- /dev/null
+++ b/spec/serializers/activity_pub/releases_actor_serializer_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ReleasesActorSerializer, feature_category: :groups_and_projects do
+ let(:project) { build_stubbed(:project, name: 'Fooify', path: 'fooify') }
+ let(:releases) { build_stubbed_list(:release, 3, project: project) }
+
+ context 'when there is a single object provided' do
+ subject { described_class.new.represent(project, outbox: '/outbox') }
+
+ it 'serializes the actor attributes' do
+ expect(subject).to include(:id, :type, :preferredUsername, :name, :content, :context)
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/releases_outbox_serializer_spec.rb b/spec/serializers/activity_pub/releases_outbox_serializer_spec.rb
new file mode 100644
index 00000000000..606b0130e0f
--- /dev/null
+++ b/spec/serializers/activity_pub/releases_outbox_serializer_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ReleasesOutboxSerializer, feature_category: :groups_and_projects do
+ let(:decorated) { described_class.new.with_pagination(request, response) }
+
+ let(:project) { build_stubbed(:project, name: 'Fooify', path: 'fooify') }
+ let(:releases) { build_stubbed_list(:release, 3, project: project) }
+
+ before do
+ allow(releases).to receive(:page).and_return(releases)
+ allow(releases).to receive(:per).and_return(releases)
+ allow(releases).to receive(:current_page).and_return(1)
+ allow(releases).to receive(:total_pages).and_return(1)
+ allow(decorated.paginator).to receive(:paginate).and_return(releases)
+ end
+
+ context 'when there is a list of objects provided' do
+ subject { decorated.represent(releases, url: '/outbox') }
+
+ let(:request) { ActionDispatch::Request.new({ 'QUERY_STRING' => 'page=1' }) }
+ let(:response) { ActionDispatch::Response.new }
+
+ it 'is a OrderedCollection document' do
+ expect(subject[:type]).to eq 'OrderedCollectionPage'
+ end
+
+ it 'serializes the releases' do
+ expect(subject[:orderedItems].count).to eq 3
+ expect(subject[:orderedItems][0]).to include(:id, :type, :to, :actor, :object)
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/user_entity_spec.rb b/spec/serializers/activity_pub/user_entity_spec.rb
new file mode 100644
index 00000000000..d9ab7a11ecf
--- /dev/null
+++ b/spec/serializers/activity_pub/user_entity_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::UserEntity, feature_category: :user_profile do
+ let(:user) { build_stubbed(:user, name: 'Alice', username: 'alice') }
+ let(:entity) { described_class.new(user) }
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'has releases page as id' do
+ expect(subject[:id]).to match(%r{/alice$})
+ end
+
+ it 'is a Person actor' do
+ expect(subject[:type]).to eq 'Person'
+ end
+
+ it 'provides project name' do
+ expect(subject[:name]).to eq 'Alice'
+ end
+
+ it 'provides an url for web content' do
+ expect(subject[:url]).to match(%r{/alice$})
+ end
+ end
+end
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index 727716d76a4..bed9775ac8c 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -5,10 +5,11 @@ require 'spec_helper'
RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threat do
include Gitlab::Routing
- let(:report) { build_stubbed(:abuse_report) }
- let(:user) { report.user }
- let(:reporter) { report.reporter }
- let!(:other_report) { create(:abuse_report, user: user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ let_it_be(:report) { build_stubbed(:abuse_report) }
+ let_it_be(:user) { report.user }
+ let_it_be(:reporter) { report.reporter }
+ let_it_be(:past_report) { create_default(:abuse_report, :closed, user: user) }
+ let_it_be(:similar_open_report) { create_default(:abuse_report, user: user, category: report.category) }
let(:entity) do
described_class.new(report)
@@ -18,11 +19,10 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
subject(:entity_hash) { entity.as_json }
it 'exposes correct attributes' do
- expect(entity_hash.keys).to include(
+ expect(entity_hash.keys).to match_array([
:user,
- :reporter,
:report
- )
+ ])
end
it 'correctly exposes `user`', :aggregate_failures do
@@ -39,7 +39,8 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:admin_path,
:plan,
:verification_state,
- :other_reports,
+ :past_closed_reports,
+ :similar_open_reports,
:most_used_ip,
:last_sign_in_ip,
:snippets_count,
@@ -53,11 +54,77 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:credit_card
])
- expect(user_hash[:other_reports][0].keys).to match_array([
+ expect(user_hash[:past_closed_reports][0].keys).to match_array([
:created_at,
:category,
:report_path
])
+
+ similar_open_report_hash = user_hash[:similar_open_reports][0]
+ expect(similar_open_report_hash.keys).to match_array([
+ :id,
+ :global_id,
+ :status,
+ :message,
+ :reported_at,
+ :category,
+ :type,
+ :content,
+ :url,
+ :screenshot,
+ :update_path,
+ :moderate_user_path,
+ :reporter
+ ])
+
+ similar_reporter_hash = similar_open_report_hash[:reporter]
+ expect(similar_reporter_hash.keys).to match_array([
+ :name,
+ :username,
+ :avatar_url,
+ :path
+ ])
+ end
+
+ context 'when report is closed' do
+ let(:report) { build_stubbed(:abuse_report, :closed) }
+
+ it 'does not expose `user.similar_open_reports`' do
+ user_hash = entity_hash[:user]
+
+ expect(user_hash).not_to include(:similar_open_reports)
+ end
+ end
+
+ it 'correctly exposes `report`', :aggregate_failures do
+ report_hash = entity_hash[:report]
+
+ expect(report_hash.keys).to match_array([
+ :id,
+ :global_id,
+ :status,
+ :message,
+ :reported_at,
+ :category,
+ :type,
+ :content,
+ :url,
+ :screenshot,
+ :update_path,
+ :moderate_user_path,
+ :reporter
+ ])
+ end
+
+ it 'correctly exposes `reporter`' do
+ reporter_hash = entity_hash[:report][:reporter]
+
+ expect(reporter_hash.keys).to match_array([
+ :name,
+ :username,
+ :avatar_url,
+ :path
+ ])
end
describe 'users plan' do
@@ -110,33 +177,5 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
end
end
end
-
- it 'correctly exposes `reporter`' do
- reporter_hash = entity_hash[:reporter]
-
- expect(reporter_hash.keys).to match_array([
- :name,
- :username,
- :avatar_url,
- :path
- ])
- end
-
- it 'correctly exposes `report`' do
- report_hash = entity_hash[:report]
-
- expect(report_hash.keys).to match_array([
- :status,
- :message,
- :reported_at,
- :category,
- :type,
- :content,
- :url,
- :screenshot,
- :update_path,
- :moderate_user_path
- ])
- end
end
end
diff --git a/spec/serializers/admin/abuse_report_details_serializer_spec.rb b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
index a42c56c0921..3bdd2e46ba3 100644
--- a/spec/serializers/admin/abuse_report_details_serializer_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
@@ -9,11 +9,10 @@ RSpec.describe Admin::AbuseReportDetailsSerializer, feature_category: :insider_t
describe '#represent' do
it 'serializes an abuse report' do
- is_expected.to include(
+ is_expected.to match_array([
:user,
- :reporter,
:report
- )
+ ])
end
end
end
diff --git a/spec/serializers/admin/abuse_report_entity_spec.rb b/spec/serializers/admin/abuse_report_entity_spec.rb
index c7f57258f40..e84cfe73b96 100644
--- a/spec/serializers/admin/abuse_report_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_entity_spec.rb
@@ -15,15 +15,16 @@ RSpec.describe Admin::AbuseReportEntity, feature_category: :insider_threat do
subject(:entity_hash) { entity.as_json }
it 'exposes correct attributes' do
- expect(entity_hash.keys).to include(
+ expect(entity_hash.keys).to match_array([
:category,
:created_at,
:updated_at,
:count,
+ :labels,
:reported_user,
:reporter,
:report_path
- )
+ ])
end
it 'correctly exposes `reported user`' do
@@ -37,5 +38,15 @@ RSpec.describe Admin::AbuseReportEntity, feature_category: :insider_threat do
it 'correctly exposes :report_path' do
expect(entity_hash[:report_path]).to eq admin_abuse_report_path(abuse_report)
end
+
+ context 'when abuse_report_labels feature flag is disabled' do
+ before do
+ stub_feature_flags(abuse_report_labels: false)
+ end
+
+ it 'does not expose labels' do
+ expect(entity_hash.keys).not_to include(:labels)
+ end
+ end
end
end
diff --git a/spec/serializers/admin/reported_content_entity_spec.rb b/spec/serializers/admin/reported_content_entity_spec.rb
new file mode 100644
index 00000000000..0af16561005
--- /dev/null
+++ b/spec/serializers/admin/reported_content_entity_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::ReportedContentEntity, feature_category: :insider_threat do
+ let_it_be(:report) { build_stubbed(:abuse_report) }
+
+ let(:entity) do
+ described_class.new(report)
+ end
+
+ describe '#as_json' do
+ subject(:entity_hash) { entity.as_json }
+
+ it 'exposes correct attributes' do
+ expect(entity_hash.keys).to match_array([
+ :id,
+ :global_id,
+ :status,
+ :message,
+ :reported_at,
+ :category,
+ :type,
+ :content,
+ :url,
+ :screenshot,
+ :reporter,
+ :update_path,
+ :moderate_user_path
+ ])
+ end
+
+ it 'includes correct value for global_id' do
+ allow(Gitlab::GlobalId).to receive(:build).with(report, { id: report.id }).and_return(:mock_global_id)
+
+ expect(entity_hash[:global_id]).to eq 'mock_global_id'
+ end
+
+ it 'correctly exposes `reporter`' do
+ reporter_hash = entity_hash[:reporter]
+
+ expect(reporter_hash.keys).to match_array([
+ :name,
+ :username,
+ :avatar_url,
+ :path
+ ])
+ end
+ end
+end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 86eaf160b38..a9d58b20861 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -298,5 +298,21 @@ RSpec.describe BuildDetailsEntity do
end
end
end
+
+ context 'when the build has annotations' do
+ let!(:build) { create(:ci_build) }
+ let!(:annotation) { create(:ci_job_annotation, job: build, name: 'external_links', data: [{ external_link: { label: 'URL', url: 'https://example.com/' } }]) }
+
+ it 'exposes job URLs' do
+ expect(subject[:annotations].count).to eq(1)
+ expect(subject[:annotations].first[:name]).to eq('external_links')
+ expect(subject[:annotations].first[:data]).to include(a_hash_including(
+ 'external_link' => a_hash_including(
+ 'label' => 'URL',
+ 'url' => 'https://example.com/'
+ )
+ ))
+ end
+ end
end
end
diff --git a/spec/serializers/ci/job_annotation_entity_spec.rb b/spec/serializers/ci/job_annotation_entity_spec.rb
new file mode 100644
index 00000000000..8aef6e8cce3
--- /dev/null
+++ b/spec/serializers/ci/job_annotation_entity_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobAnnotationEntity, feature_category: :build_artifacts do
+ let(:entity) { described_class.new(annotation) }
+
+ let(:job) { build(:ci_build) }
+ let(:annotation) do
+ build(:ci_job_annotation, job: job, name: 'external_links', data:
+ [{ external_link: { label: 'URL', url: 'https://example.com/' } }])
+ end
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'contains valid name' do
+ expect(subject[:name]).to eq 'external_links'
+ end
+
+ it 'contains external links' do
+ expect(subject[:data]).to include(a_hash_including(
+ 'external_link' => a_hash_including(
+ 'label' => 'URL',
+ 'url' => 'https://example.com/'
+ )
+ ))
+ end
+ end
+end
diff --git a/spec/serializers/codequality_degradation_entity_spec.rb b/spec/serializers/codequality_degradation_entity_spec.rb
index 32269e5475b..3d07564c5dc 100644
--- a/spec/serializers/codequality_degradation_entity_spec.rb
+++ b/spec/serializers/codequality_degradation_entity_spec.rb
@@ -2,18 +2,31 @@
require 'spec_helper'
-RSpec.describe CodequalityDegradationEntity do
+RSpec.describe CodequalityDegradationEntity, feature_category: :code_quality do
let(:entity) { described_class.new(codequality_degradation) }
describe '#as_json' do
subject { entity.as_json }
+ context 'when sast_reports_in_inline_diff is disabled' do
+ before do
+ stub_feature_flags(sast_reports_in_inline_diff: false)
+ end
+
+ let(:codequality_degradation) { build(:codequality_degradation_1) }
+
+ it 'does not contain fingerprint' do
+ expect(subject[:fingerprint]).to be_nil
+ end
+ end
+
context 'when codequality contains an error' do
context 'when line is included in location' do
let(:codequality_degradation) { build(:codequality_degradation_2) }
it 'contains correct codequality degradation details', :aggregate_failures do
expect(subject[:description]).to eq("Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.")
+ expect(subject[:fingerprint]).to eq("f3bdc1e8c102ba5fbd9e7f6cda51c95e")
expect(subject[:severity]).to eq("major")
expect(subject[:file_path]).to eq("file_a.rb")
expect(subject[:line]).to eq(10)
@@ -27,6 +40,7 @@ RSpec.describe CodequalityDegradationEntity do
it 'contains correct codequality degradation details', :aggregate_failures do
expect(subject[:description]).to eq("Avoid parameter lists longer than 5 parameters. [12/5]")
+ expect(subject[:fingerprint]).to eq("ab5f8b935886b942d621399f5a2ca16e")
expect(subject[:severity]).to eq("minor")
expect(subject[:file_path]).to eq("file_b.rb")
expect(subject[:line]).to eq(10)
@@ -44,6 +58,7 @@ RSpec.describe CodequalityDegradationEntity do
it 'lowercases severity', :aggregate_failures do
expect(subject[:description]).to eq("Avoid parameter lists longer than 5 parameters. [12/5]")
+ expect(subject[:fingerprint]).to eq("ab5f8b935886b942d621399f5a2ca16e")
expect(subject[:severity]).to eq("minor")
expect(subject[:file_path]).to eq("file_b.rb")
expect(subject[:line]).to eq(10)
diff --git a/spec/serializers/codequality_reports_comparer_serializer_spec.rb b/spec/serializers/codequality_reports_comparer_serializer_spec.rb
index 50c8a69737c..7d6f49bf41d 100644
--- a/spec/serializers/codequality_reports_comparer_serializer_spec.rb
+++ b/spec/serializers/codequality_reports_comparer_serializer_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe CodequalityReportsComparerSerializer do
- let(:project) { double(:project) }
+RSpec.describe CodequalityReportsComparerSerializer, feature_category: :code_quality do
+ let(:project) { build_stubbed(:project) }
let(:serializer) { described_class.new(project: project).represent(comparer) }
let(:comparer) { Gitlab::Ci::Reports::CodequalityReportsComparer.new(base_report, head_report) }
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index b0f3f328a4f..53f0e0c34e1 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -10,8 +10,11 @@ RSpec.describe DeploymentEntity do
let_it_be(:environment) { create(:environment, project: project) }
let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let_it_be_with_reload(:build) { create(:ci_build, :manual, :environment_with_deployment_tier, pipeline: pipeline) }
+ let_it_be_with_reload(:bridge) do
+ create(:ci_bridge, :manual, :environment_with_deployment_tier, pipeline: pipeline, downstream: project)
+ end
- let_it_be_with_refind(:deployment) { create(:deployment, deployable: build, environment: environment) }
+ let!(:deployment) { create(:deployment, deployable: job, environment: environment, project: project) }
let(:request) { double('request') }
let(:entity) { described_class.new(deployment, request: request) }
@@ -28,22 +31,33 @@ RSpec.describe DeploymentEntity do
allow(request).to receive(:project).and_return(project)
end
- it 'exposes fields', :aggregate_failures do
- expect(subject).to include(:iid)
- expect(subject[:ref][:name]).to eq 'master'
- expect(subject).to include(:status)
- expect(subject).to include(:created_at)
- expect(subject).to include(:deployed_at)
- expect(subject).to include(:is_last)
- expect(subject).to include(:tier_in_yaml)
+ shared_examples_for 'exposes fields' do
+ it 'exposes fields', :aggregate_failures do
+ expect(subject).to include(:iid)
+ expect(subject[:ref][:name]).to eq 'master'
+ expect(subject).to include(:status)
+ expect(subject).to include(:created_at)
+ expect(subject).to include(:deployed_at)
+ expect(subject).to include(:is_last)
+ expect(subject).to include(:tier_in_yaml)
+ end
+ end
+
+ context 'when deployable is build job' do
+ let(:job) { build }
+
+ it_behaves_like 'exposes fields'
+ end
+
+ context 'when deployable is bridge job' do
+ let(:job) { bridge }
+
+ it_behaves_like 'exposes fields'
end
context 'when deployable is nil' do
let(:entity) { described_class.new(deployment, request: request, deployment_details: false) }
-
- before do
- deployment.update!(deployable: nil)
- end
+ let(:job) { nil }
it 'does not expose deployable entry' do
expect(subject).not_to include(:deployable)
@@ -51,15 +65,17 @@ RSpec.describe DeploymentEntity do
end
context 'when the pipeline has another manual action' do
- let_it_be(:other_build) do
- create(:ci_build, :manual, name: 'another deploy', pipeline: pipeline, environment: build.environment)
+ let!(:other_job) do
+ create(:ci_build, :manual, name: 'another deploy', pipeline: pipeline, environment: job.environment)
end
- let_it_be(:other_deployment) { create(:deployment, deployable: build, environment: environment) }
+ let!(:other_deployment) { create(:deployment, deployable: job, environment: environment) }
+
+ let(:job) { build }
it 'returns another manual action' do
- expect(subject[:manual_actions].count).to eq(1)
- expect(subject[:manual_actions].pluck(:name)).to match_array(['another deploy'])
+ expect(subject[:manual_actions].count).to eq(2)
+ expect(subject[:manual_actions].pluck(:name)).to match_array(['another deploy', 'bridge'])
end
context 'when user is a reporter' do
@@ -82,18 +98,22 @@ RSpec.describe DeploymentEntity do
end
describe 'scheduled_actions' do
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
-
- before do
- deployment.update!(deployable: build)
- end
+ let(:job) { create(:ci_build, :success, pipeline: pipeline) }
context 'when the same pipeline has a scheduled action' do
- let(:other_build) { create(:ci_build, :schedulable, :success, pipeline: pipeline, name: 'other build') }
- let!(:other_deployment) { create(:deployment, deployable: other_build, environment: environment) }
+ let(:other_job) { create(:ci_build, :schedulable, :success, pipeline: pipeline, name: 'other job') }
+ let!(:other_deployment) { create(:deployment, deployable: other_job, environment: environment) }
it 'returns other scheduled actions' do
- expect(subject[:scheduled_actions][0][:name]).to eq 'other build'
+ expect(subject[:scheduled_actions][0][:name]).to eq 'other job'
+ end
+
+ context 'when deployable is bridge job' do
+ let(:job) { create(:ci_bridge, :success, pipeline: pipeline) }
+
+ it 'returns nil' do
+ expect(subject[:scheduled_actions]).to be_nil
+ end
end
end
@@ -115,10 +135,6 @@ RSpec.describe DeploymentEntity do
end
describe 'playable_build' do
- before do
- deployment.update!(deployable: job)
- end
-
context 'when the deployment has a playable deployable' do
context 'when this job is build and ready to be played' do
let(:job) { create(:ci_build, :playable, :scheduled, pipeline: pipeline) }
@@ -161,6 +177,8 @@ RSpec.describe DeploymentEntity do
described_class.new(deployment, request: request, deployment_details: false)
end
+ let(:job) { build }
+
it 'does not serialize deployment details' do
expect(subject.with_indifferent_access)
.not_to include(:commit, :manual_actions, :scheduled_actions)
@@ -172,5 +190,16 @@ RSpec.describe DeploymentEntity do
.to eq(name: 'test', build_path: path)
end
end
+
+ context 'when deployable is bridge' do
+ let(:job) { bridge }
+
+ it 'only exposes deployable name and path' do
+ project_job_path(project, deployment.deployable).tap do |path|
+ expect(subject.fetch(:deployable))
+ .to eq(name: 'bridge', build_path: path)
+ end
+ end
+ end
end
end
diff --git a/spec/serializers/import/github_realtime_repo_entity_spec.rb b/spec/serializers/import/github_realtime_repo_entity_spec.rb
index 7f137366be2..bbaeb5c4ea8 100644
--- a/spec/serializers/import/github_realtime_repo_entity_spec.rb
+++ b/spec/serializers/import/github_realtime_repo_entity_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Import::GithubRealtimeRepoEntity, feature_category: :importers do
subject(:entity) { described_class.new(project) }
- let(:import_state) { instance_double(ProjectImportState, failed?: false, in_progress?: true) }
+ let(:import_state) { instance_double(ProjectImportState, failed?: false, completed?: true) }
let(:import_failures) { [instance_double(ImportFailure, exception_message: 'test error')] }
let(:project) do
instance_double(
@@ -27,7 +27,7 @@ RSpec.describe Import::GithubRealtimeRepoEntity, feature_category: :importers do
end
context 'when import stats is failed' do
- let(:import_state) { instance_double(ProjectImportState, failed?: true, in_progress?: false) }
+ let(:import_state) { instance_double(ProjectImportState, failed?: true, completed?: true) }
it 'includes import_error' do
data = entity.as_json
diff --git a/spec/serializers/import/github_realtime_repo_serializer_spec.rb b/spec/serializers/import/github_realtime_repo_serializer_spec.rb
index b656132e332..825118d0f80 100644
--- a/spec/serializers/import/github_realtime_repo_serializer_spec.rb
+++ b/spec/serializers/import/github_realtime_repo_serializer_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Import::GithubRealtimeRepoSerializer, feature_category: :importer
end
describe '#represent' do
- let(:import_state) { instance_double(ProjectImportState, failed?: false, in_progress?: true) }
+ let(:import_state) { instance_double(ProjectImportState, failed?: false, completed?: false) }
let(:project) do
instance_double(
Project,
diff --git a/spec/serializers/profile/event_entity_spec.rb b/spec/serializers/profile/event_entity_spec.rb
index b1246e7e47d..3dade4210b3 100644
--- a/spec/serializers/profile/event_entity_spec.rb
+++ b/spec/serializers/profile/event_entity_spec.rb
@@ -153,6 +153,17 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
end
end
+ context 'without target' do
+ let(:event) do
+ build(:event, :destroyed, author: user, project: project, target_type: Milestone.to_s)
+ end
+
+ it 'only exposes target.type' do
+ expect(subject[:target][:type]).to eq(Milestone.to_s)
+ expect(subject[:target]).not_to include(:web_url)
+ end
+ end
+
context 'with resource parent' do
it 'exposes resource parent fields' do
resource_parent = event.resource_parent
diff --git a/spec/services/admin/abuse_report_labels/create_service_spec.rb b/spec/services/admin/abuse_report_labels/create_service_spec.rb
new file mode 100644
index 00000000000..168229d6ed9
--- /dev/null
+++ b/spec/services/admin/abuse_report_labels/create_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReportLabels::CreateService, feature_category: :insider_threat do
+ describe '#execute' do
+ let(:color) { 'red' }
+ let(:color_in_hex) { ::Gitlab::Color.of(color) }
+ let(:params) { { title: 'FancyLabel', color: color } }
+
+ subject(:execute) { described_class.new(params).execute }
+
+ shared_examples 'creates a label with the correct values' do
+ it 'creates a label with the correct values', :aggregate_failures do
+ expect { execute }.to change { Admin::AbuseReportLabel.count }.from(0).to(1)
+
+ label = Admin::AbuseReportLabel.last
+ expect(label.title).to eq params[:title]
+ expect(label.color).to eq color_in_hex
+ end
+
+ it 'returns the persisted label' do
+ result = execute
+ expect(result).to be_an_instance_of(Admin::AbuseReportLabel)
+ expect(result.persisted?).to eq true
+ end
+ end
+
+ it_behaves_like 'creates a label with the correct values'
+
+ context 'without color param' do
+ let(:params) { { title: 'FancyLabel' } }
+ let(:color_in_hex) { ::Gitlab::Color.of(Label::DEFAULT_COLOR) }
+
+ it_behaves_like 'creates a label with the correct values'
+ end
+
+ context 'with errors' do
+ let!(:existing_label) { create(:abuse_report_label, title: params[:title]) }
+
+ it 'does not create the label' do
+ expect { execute }.not_to change { Admin::AbuseReportLabel.count }
+ end
+
+ it 'returns the label with errors' do
+ label = execute
+ expect(label.errors.messages).to include({ title: ["has already been taken"] })
+ end
+ end
+ end
+end
diff --git a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
index 6e8a59f4e49..7e08db2b612 100644
--- a/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
+++ b/spec/services/admin/abuse_reports/moderate_user_service_spec.rb
@@ -4,6 +4,10 @@ require 'spec_helper'
RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :instance_resiliency do
let_it_be_with_reload(:abuse_report) { create(:abuse_report) }
+ let_it_be_with_reload(:similar_abuse_report) do
+ create(:abuse_report, user: abuse_report.user, category: abuse_report.category)
+ end
+
let(:action) { 'ban_user' }
let(:close) { true }
let(:reason) { 'spam' }
@@ -26,6 +30,12 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
it 'closes the report' do
expect { subject }.to change { abuse_report.closed? }.from(false).to(true)
end
+
+ context 'when similar open reports for the user exist' do
+ it 'closes the similar report' do
+ expect { subject }.to change { similar_abuse_report.reload.closed? }.from(false).to(true)
+ end
+ end
end
shared_examples 'does not close the report' do
@@ -33,6 +43,13 @@ RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :inst
subject
expect(abuse_report.closed?).to be(false)
end
+
+ context 'when similar open reports for the user exist' do
+ it 'does not close the similar report' do
+ subject
+ expect(similar_abuse_report.reload.closed?).to be(false)
+ end
+ end
end
shared_examples 'does not record an event' do
diff --git a/spec/services/admin/abuse_reports/update_service_spec.rb b/spec/services/admin/abuse_reports/update_service_spec.rb
new file mode 100644
index 00000000000..e53b40979ec
--- /dev/null
+++ b/spec/services/admin/abuse_reports/update_service_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::AbuseReports::UpdateService, feature_category: :instance_resiliency do
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:abuse_report) { create(:abuse_report) }
+ let_it_be(:label) { create(:abuse_report_label) }
+
+ let(:params) { {} }
+ let(:service) { described_class.new(abuse_report, current_user, params) }
+
+ describe '#execute', :enable_admin_mode do
+ subject { service.execute }
+
+ shared_examples 'returns an error response' do |error|
+ it 'returns an error response' do
+ expect(subject).to be_error
+ expect(subject.message).to eq error
+ end
+ end
+
+ context 'with invalid parameters' do
+ describe 'invalid user' do
+ describe 'when no user is given' do
+ let_it_be(:current_user) { nil }
+
+ it_behaves_like 'returns an error response', 'Admin is required'
+ end
+
+ describe 'when given user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it_behaves_like 'returns an error response', 'Admin is required'
+ end
+ end
+
+ describe 'invalid label_ids' do
+ let(:params) { { label_ids: ['invalid_global_id', non_existing_record_id] } }
+
+ it 'does not update the abuse report' do
+ expect { subject }.not_to change { abuse_report.labels }
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ describe 'with valid parameters' do
+ context 'when label_ids is empty' do
+ let(:params) { { label_ids: [] } }
+
+ context 'when abuse report has existing labels' do
+ before do
+ abuse_report.labels = [label]
+ end
+
+ it 'clears the abuse report labels' do
+ expect { subject }.to change { abuse_report.labels.count }.from(1).to(0)
+ end
+
+ it { is_expected.to be_success }
+ end
+
+ context 'when abuse report has no existing labels' do
+ it 'does not update the abuse report' do
+ expect { subject }.not_to change { abuse_report.labels }
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+
+ context 'when label_ids is not empty' do
+ let(:params) { { label_ids: [Gitlab::GlobalId.build(label, id: label.id).to_s] } }
+
+ it 'updates the abuse report' do
+ expect { subject }.to change { abuse_report.label_ids }.from([]).to([label.id])
+ end
+
+ it { is_expected.to be_success }
+ end
+ end
+ end
+end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 9d73a4a6cee..0b5ba1db9d4 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe ApplicationSettings::UpdateService do
+RSpec.describe ApplicationSettings::UpdateService, feature_category: :shared do
include ExternalAuthorizationServiceHelpers
- let(:application_settings) { create(:application_setting) }
+ let(:application_settings) { ::Gitlab::CurrentSettings.current_application_settings }
let(:admin) { create(:user, :admin) }
let(:params) { {} }
@@ -331,7 +331,8 @@ RSpec.describe ApplicationSettings::UpdateService do
throttle_protected_paths_enabled: 1,
throttle_protected_paths_period_in_seconds: 600,
throttle_protected_paths_requests_per_period: 100,
- protected_paths_raw: "/users/password\r\n/users/sign_in\r\n"
+ protected_paths_raw: "/users/password\r\n/users/sign_in\r\n",
+ protected_paths_for_get_request_raw: "/users/password\r\n/users/sign_up\r\n"
}
end
@@ -344,6 +345,7 @@ RSpec.describe ApplicationSettings::UpdateService do
expect(application_settings.throttle_protected_paths_period_in_seconds).to eq(600)
expect(application_settings.throttle_protected_paths_requests_per_period).to eq(100)
expect(application_settings.protected_paths).to eq(['/users/password', '/users/sign_in'])
+ expect(application_settings.protected_paths_for_get_request).to match_array(['/users/password', '/users/sign_up'])
end
end
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index d14470df9ee..be5b753f484 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -296,4 +296,9 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
end
end
end
+
+ describe '#process' do
+ specify { expect(service).to respond_to :process }
+ specify { expect { service.process(nil) }.to raise_error NotImplementedError }
+ end
end
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
deleted file mode 100644
index 9a74f5ca07a..00000000000
--- a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
+++ /dev/null
@@ -1,176 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::CreatePipelineTrackersService, feature_category: :importers do
- describe '#execute!' do
- context 'when entity is group' do
- it 'creates trackers for group entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
- )
- )
- end
- end
-
- context 'when entity is project' do
- it 'creates trackers for project entity' do
- bulk_import = create(:bulk_import)
- entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.to_a).to include(
- have_attributes(
- stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
- ),
- have_attributes(
- stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
- )
- )
- end
- end
-
- context 'when tracker configuration has a minimum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker mininum version' do
- let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
- let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
-
- it 'creates trackers as skipped if version requirement does not meet' do
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:skipped, 'PipelineClass4'],
- [:skipped, 'PipelineClass5']
- )
- end
-
- it 'logs an info message for the skipped pipelines' do
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass4',
- minimum_source_version: '15.1.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
-
- expect(logger).to receive(:info).with({
- message: 'Pipeline skipped as source instance version not compatible with pipeline',
- bulk_import_entity_id: entity.id,
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
- importer: 'gitlab_migration',
- pipeline_name: 'PipelineClass5',
- minimum_source_version: '16.0.0',
- maximum_source_version: nil,
- source_version: '15.0.0'
- })
- end
-
- described_class.new(entity).execute!
- end
- end
-
- context 'when the source instance version is undefined' do
- it 'creates trackers as created' do
- bulk_import = create(:bulk_import, source_version: nil)
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:created, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
- end
- end
-
- context 'when tracker configuration has a maximum version defined' do
- before do
- allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
- allow(stage).to receive(:config).and_return(
- {
- pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
- pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
- pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
- pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
- pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
- }
- )
- end
- end
-
- context 'when the source instance version is older than the tracker maximum version' do
- it 'creates trackers as skipped if version requirement does not meet' do
- bulk_import = create(:bulk_import, source_version: '15.0.0')
- entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
-
- described_class.new(entity).execute!
-
- expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
- [:created, 'PipelineClass1'],
- [:skipped, 'PipelineClass2'],
- [:created, 'PipelineClass3'],
- [:created, 'PipelineClass4'],
- [:created, 'PipelineClass5']
- )
- end
- end
-
- context 'when the source instance version is a patch version' do
- it 'creates trackers with the same status as the non-patch source version' do
- bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
- entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
-
- bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
- entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
-
- described_class.new(entity_1).execute!
- described_class.new(entity_2).execute!
-
- trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
- trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
-
- expect(trackers_1).to eq(trackers_2)
- end
- end
- end
- end
-end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 93feab97f44..20872623802 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -62,6 +62,24 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
end
end
+ # response when authorize_admin_project in API endpoint fails
+ context 'when direct transfer status query returns a 403' do
+ it 'raises a ServiceResponse::Error' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:validate_instance_version!).and_return(true)
+ expect(client).to receive(:get)
+ .with("/groups/full%2Fpath%2Fto%2Fgroup1/export_relations/status")
+ .and_raise(BulkImports::NetworkError, '403 Forbidden')
+ end
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("403 Forbidden")
+ end
+ end
+
context 'when direct transfer setting query returns a 404' do
it 'raises a ServiceResponse::Error' do
stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
@@ -313,7 +331,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
"Source full path must have a relative path structure with " \
"no HTTP protocol characters, or leading or trailing forward slashes. " \
"Path segments must not start or end with a special character, and " \
- "must not contain consecutive special characters.")
+ "must not contain consecutive special characters")
end
describe '#user-role' do
@@ -470,7 +488,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
context 'when the source_full_path contains only integer characters' do
let(:query_string) { BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil).to_s }
let(:graphql_response) do
- double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
+ double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
end
let(:params) do
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index c035eabf767..2197b0b4fac 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -82,8 +82,20 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
context 'when content-type is not valid' do
let(:content_type) { 'invalid' }
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
+
+ before do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:warn)
+ end
+
+ it 'logs and raises an error' do
+ expect(import_logger).to receive(:warn).once.with(
+ message: 'Invalid content type',
+ response_headers: headers,
+ importer: 'gitlab_migration'
+ )
- it 'raises an error' do
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content type')
end
end
diff --git a/spec/services/ci/components/fetch_service_spec.rb b/spec/services/ci/components/fetch_service_spec.rb
index 532098b3b20..21b7df19f4a 100644
--- a/spec/services/ci/components/fetch_service_spec.rb
+++ b/spec/services/ci/components/fetch_service_spec.rb
@@ -3,15 +3,35 @@
require 'spec_helper'
RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composition do
- let_it_be(:project) { create(:project, :repository, create_tag: 'v1.0') }
let_it_be(:user) { create(:user) }
let_it_be(:current_user) { user }
let_it_be(:current_host) { Gitlab.config.gitlab.host }
+ let_it_be(:content) do
+ <<~COMPONENT
+ job:
+ script: echo
+ COMPONENT
+ end
let(:service) do
described_class.new(address: address, current_user: current_user)
end
+ let_it_be(:project) do
+ project = create(
+ :project, :custom_repo,
+ files: {
+ 'template.yml' => content,
+ 'my-component/template.yml' => content,
+ 'my-dir/my-component/template.yml' => content
+ }
+ )
+
+ project.repository.add_tag(project.creator, 'v0.1', project.repository.commit.sha)
+
+ project
+ end
+
before do
project.add_developer(user)
end
@@ -22,19 +42,6 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
shared_examples 'an external component' do
shared_examples 'component address' do
context 'when content exists' do
- let(:sha) { project.commit(version).id }
-
- let(:content) do
- <<~COMPONENT
- job:
- script: echo
- COMPONENT
- end
-
- before do
- stub_project_blob(sha, component_yaml_path, content)
- end
-
it 'returns the content' do
expect(result).to be_success
expect(result.payload[:content]).to eq(content)
@@ -42,6 +49,8 @@ RSpec.describe Ci::Components::FetchService, feature_category: :pipeline_composi
end
context 'when content does not exist' do
+ let(:address) { "#{current_host}/#{component_path}@~version-does-not-exist" }
+
it 'returns an error' do
expect(result).to be_error
expect(result.reason).to eq(:content_not_found)
diff --git a/spec/services/ci/create_commit_status_service_spec.rb b/spec/services/ci/create_commit_status_service_spec.rb
new file mode 100644
index 00000000000..ec200e24c8f
--- /dev/null
+++ b/spec/services/ci/create_commit_status_service_spec.rb
@@ -0,0 +1,461 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreateCommitStatusService, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:response) { execute_service(params) }
+
+ let_it_be_with_refind(:project) { create(:project, :repository) }
+ let_it_be(:commit) { project.repository.commit }
+ let_it_be(:guest) { create_user(:guest) }
+ let_it_be(:reporter) { create_user(:reporter) }
+ let_it_be(:developer) { create_user(:developer) }
+
+ let(:user) { developer }
+ let(:sha) { commit.id }
+ let(:params) { { state: 'pending' } }
+ let(:job) { response.payload[:job] }
+
+ %w[pending running success failed canceled].each do |status|
+ context "for #{status}" do
+ let(:params) { { state: status } }
+
+ context 'when pipeline for sha does not exists' do
+ it 'creates commit status and sets pipeline iid' do
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq(status)
+ expect(job.name).to eq('default')
+ expect(job.ref).not_to be_empty
+ expect(job.target_url).to be_nil
+ expect(job.description).to be_nil
+ expect(job.pipeline_id).not_to be_nil
+
+ expect(CommitStatus.find(job.id)).to be_api_failure if status == 'failed'
+
+ expect(::Ci::Pipeline.last.iid).not_to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when status transitions from pending' do
+ before do
+ execute_service(state: 'pending')
+ end
+
+ %w[running success failed canceled].each do |status|
+ context "for #{status}" do
+ let(:params) { { state: status } }
+
+ it "changes to #{status}" do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_success
+ expect(job.status).to eq(status)
+ end
+ end
+ end
+
+ context 'for invalid transition' do
+ let(:params) { { state: 'pending' } }
+
+ it 'returns bad request and error message' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq(
+ "Cannot transition status via :enqueue from :pending (Reason(s): Status cannot transition via \"enqueue\")"
+ )
+ end
+ end
+ end
+
+ context 'with all optional parameters' do
+ context 'when creating a commit status' do
+ let(:params) do
+ {
+ sha: sha,
+ state: 'success',
+ context: 'coverage',
+ ref: 'master',
+ description: 'test',
+ coverage: 80.0,
+ target_url: 'http://gitlab.com/status'
+ }
+ end
+
+ it 'creates commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ expect(job.name).to eq('coverage')
+ expect(job.ref).to eq('master')
+ expect(job.coverage).to eq(80.0)
+ expect(job.description).to eq('test')
+ expect(job.target_url).to eq('http://gitlab.com/status')
+ end
+
+ context 'when merge request exists for given branch' do
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'develop')
+ end
+
+ it 'sets head pipeline' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ expect(merge_request.reload.head_pipeline).not_to be_nil
+ end
+ end
+ end
+
+ context 'when updating a commit status' do
+ let(:parameters) do
+ {
+ state: 'success',
+ name: 'coverage',
+ ref: 'master'
+ }
+ end
+
+ let(:updatable_optional_attributes) do
+ {
+ description: 'new description',
+ coverage: 90.0
+ }
+ end
+
+ let(:params) { parameters.merge(updatable_optional_attributes) }
+
+ # creating the initial commit status
+ before do
+ execute_service(
+ sha: sha,
+ state: 'running',
+ context: 'coverage',
+ ref: 'master',
+ description: 'coverage test',
+ coverage: 10.0,
+ target_url: 'http://gitlab.com/status'
+ )
+ end
+
+ it 'updates a commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ expect(job.name).to eq('coverage')
+ expect(job.ref).to eq('master')
+ expect(job.coverage).to eq(90.0)
+ expect(job.description).to eq('new description')
+ expect(job.target_url).to eq('http://gitlab.com/status')
+ end
+
+ context 'when the `state` parameter is sent the same' do
+ let(:parameters) do
+ {
+ sha: sha,
+ state: 'running',
+ name: 'coverage',
+ ref: 'master'
+ }
+ end
+
+ it 'does not update the commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(1)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq(
+ "Cannot transition status via :run from :running (Reason(s): Status cannot transition via \"run\")"
+ )
+
+ commit_status = project.commit_statuses.find_by!(name: 'coverage')
+
+ expect(commit_status.description).to eq('coverage test')
+ expect(commit_status.coverage).to eq(10.0)
+ end
+ end
+ end
+
+ context 'when a pipeline id is specified' do
+ let!(:first_pipeline) do
+ project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p|
+ p.ensure_project_iid! # Necessary to avoid cross-database modification error
+ p.save!
+ end
+ end
+
+ let!(:other_pipeline) do
+ project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p|
+ p.ensure_project_iid! # Necessary to avoid cross-database modification error
+ p.save!
+ end
+ end
+
+ let(:params) do
+ {
+ sha: sha,
+ pipeline_id: other_pipeline.id,
+ state: 'success',
+ ref: 'master'
+ }
+ end
+
+ it 'update the correct pipeline', :sidekiq_might_not_need_inline do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(2)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(first_pipeline.reload.status).to eq('created')
+ expect(other_pipeline.reload.status).to eq('success')
+ end
+ end
+ end
+
+ context 'when retrying a commit status' do
+ subject(:response) do
+ execute_service(state: 'failed', name: 'test', ref: 'master')
+
+ execute_service(state: 'success', name: 'test', ref: 'master')
+ end
+
+ it 'correctly posts a new commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(2)
+
+ expect(response).to be_success
+ expect(job.sha).to eq(commit.id)
+ expect(job.status).to eq('success')
+ end
+
+ it 'retries the commit status', :sidekiq_might_not_need_inline do
+ response
+
+ expect(CommitStatus.count).to eq 2
+ expect(CommitStatus.first).to be_retried
+ expect(CommitStatus.last.pipeline).to be_success
+ end
+ end
+
+ context 'when status is invalid' do
+ let(:params) { { state: 'invalid' } }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('invalid state')
+ end
+ end
+
+ context 'when request without a state made' do
+ let(:params) { {} }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(0)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('State is required')
+ end
+ end
+
+ context 'when updating a protected ref' do
+ let(:params) { { state: 'running', ref: 'master' } }
+
+ before do
+ create(:protected_branch, project: project, name: 'master')
+ end
+
+ context 'with user as developer' do
+ let(:user) { developer }
+
+ it 'does not create commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.message).to eq('403 Forbidden')
+ end
+ end
+
+ context 'with user as maintainer' do
+ let(:user) { create_user(:maintainer) }
+
+ it 'creates commit status' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+ end
+ end
+ end
+
+ context 'when commit SHA is invalid' do
+ let(:sha) { 'invalid_sha' }
+ let(:params) { { state: 'running', sha: sha } }
+
+ it 'returns not found error' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(0)
+ .and not_change { ::Ci::Stage.count }.from(0)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:not_found)
+ expect(response.message).to eq('404 Commit Not Found')
+ end
+ end
+
+ context 'when target URL is an invalid address' do
+ let(:params) { { state: 'pending', target_url: 'invalid url' } }
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:target_url])
+ .to include 'is blocked: Only allowed schemes are http, https'
+ end
+ end
+
+ context 'when target URL is an unsupported scheme' do
+ let(:params) { { state: 'pending', target_url: 'git://example.com' } }
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and not_change { ::CommitStatus.count }.from(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:target_url])
+ .to include 'is blocked: Only allowed schemes are http, https'
+ end
+ end
+
+ context 'when trying to update a status of a different type' do
+ let!(:pipeline) { create(:ci_pipeline, project: project, sha: sha, ref: 'ref') }
+ let!(:ci_build) { create(:ci_build, pipeline: pipeline, name: 'test-job') }
+ let(:params) { { state: 'pending', name: 'test-job' } }
+
+ before do
+ execute_service(params)
+ end
+
+ it 'responds with bad request status and validation errors' do
+ expect { response }
+ .to not_change { ::Ci::Pipeline.count }.from(1)
+ .and not_change { ::Ci::Stage.count }.from(2)
+ .and not_change { ::CommitStatus.count }.from(1)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message[:name])
+ .to include 'has already been taken'
+ end
+ end
+
+ context 'with partitions', :ci_partitionable do
+ let(:current_partition_id) { ci_testing_partition_id }
+ let(:params) { { state: 'running' } }
+
+ before do
+ allow(Ci::Pipeline)
+ .to receive(:current_partition_value) { current_partition_id }
+ end
+
+ it 'creates records in the current partition' do
+ expect { response }
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(1)
+
+ expect(response).to be_success
+
+ status = CommitStatus.find(job.id)
+
+ expect(status.partition_id).to eq(current_partition_id)
+ expect(status.pipeline.partition_id).to eq(current_partition_id)
+ end
+ end
+
+ context 'for race condition' do
+ let(:licenses_snyk_params) { { state: 'running', name: 'licenses', description: 'testing' } }
+ let(:security_snyk_params) { { state: 'running', name: 'security', description: 'testing' } }
+ let(:snyk_params_list) { [licenses_snyk_params, security_snyk_params] }
+
+ it 'creates one pipeline and two jobs (one for licenses, one for security)' do
+ expect do
+ snyk_params_list.map do |snyk_params|
+ Thread.new do
+ response = execute_service(snyk_params)
+ expect(response).to be_success
+ end
+ end.each(&:join)
+ end
+ .to change { ::Ci::Pipeline.count }.by(1)
+ .and change { ::Ci::Stage.count }.by(1)
+ .and change { ::CommitStatus.count }.by(2)
+ end
+ end
+
+ def create_user(access_level_trait)
+ user = create(:user)
+ create(:project_member, access_level_trait, user: user, project: project)
+ user
+ end
+
+ def execute_service(params = self.params)
+ described_class
+ .new(project, user, params)
+ .execute(optional_commit_status_params: params.slice(*%i[target_url description coverage]))
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index e6bdb2a3fc6..07bc3aa28cf 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -21,6 +21,23 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
create_gitlab_ci_yml(downstream_project, downstream_config)
end
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let(:execute_service) { service.execute(:push) }
+ let(:upstream_config) { config }
+ let(:expected_deployable_class) { Ci::Bridge }
+ let(:expected_deployment_status) { 'running' }
+ let(:expected_job_status) { 'running' }
+ let(:downstream_config) { YAML.dump({ deploy: { script: 'deploy' } }) }
+ let(:base_config) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ strategy: 'depend'
+ }
+ }
+ end
+ end
+
context 'with resource group', :aggregate_failures do
let(:upstream_config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index 96e54af43cd..e900f4ba10c 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -14,6 +14,26 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
project.add_developer(developer)
end
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let!(:project) { create(:project, :repository) }
+ let(:execute_service) { service.execute(:push) }
+ let(:expected_deployable_class) { Ci::Build }
+ let(:expected_deployment_status) { 'created' }
+ let(:expected_job_status) { 'pending' }
+ let(:expected_tag_names) { %w[hello] }
+ let(:base_config) do
+ {
+ script: 'deploy',
+ tags: ['hello']
+ }
+ end
+
+ before do
+ project.add_developer(developer) # rubocop:disable RSpec/BeforeAllRoleAssignment
+ project.repository.create_file(developer, '.gitlab-ci.yml', config, branch_name: 'master', message: 'test')
+ end
+ end
+
describe '#execute' do
subject { service.execute(:push).payload }
@@ -104,6 +124,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline).to be_created_successfully
expect(Environment.find_by_name('test/deploy/2')).to be_persisted
expect(pipeline.builds.size).to eq(1)
+ # Clearing cache of BatchLoader in `build.persisted_environment` for fetching fresh data.
+ BatchLoader::Executor.clear_current
expect(build.persisted_environment.name).to eq('test/deploy/2')
expect(build.name).to eq('deploy-review-app-2')
expect(build.environment).to eq('test/$CI_JOB_STAGE/2')
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index 6a1987fcc7c..6b4a1809d9a 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -139,74 +139,5 @@ RSpec.describe Ci::CreatePipelineService, # rubocop: disable RSpec/FilePath
expect(pipeline).to be_created_successfully
end
end
-
- describe 'pipeline includes count' do
- before do
- stub_const('Gitlab::Ci::Config::External::Context::TEMP_MAX_INCLUDES', 2)
- end
-
- context 'when the includes count exceeds the maximum' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({ includes: [{ file: 1 }, { file: 2 }, { file: 3 }] })
- end
- end
-
- it 'creates a log entry' do
- expect(Gitlab::AppJsonLogger)
- .to receive(:info)
- .with(a_hash_including({ 'pipeline_includes_count' => 3 }))
- .and_call_original
-
- expect(pipeline).to be_created_successfully
- end
- end
-
- context 'when the includes count does not exceed the maximum' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({ includes: [{ file: 1 }, { file: 2 }] })
- end
- end
-
- it 'does not create a log entry but it collects the data' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
-
- expect(service.logger.observations_hash)
- .to match(a_hash_including({ 'pipeline_includes_count' => 2 }))
- end
- end
-
- context 'when the includes data is nil' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return({})
- end
- end
-
- it 'does not create a log entry' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
- end
- end
-
- context 'when the pipeline config_metadata is nil' do
- before do
- allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:config_metadata)
- .and_return(nil)
- end
- end
-
- it 'does not create a log entry but it collects the data' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
- expect(pipeline).to be_created_successfully
- end
- end
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index e644273df9a..65180ac055f 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -20,7 +20,29 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
before do
project.add_developer(user)
- stub_ci_pipeline_yaml_file(config)
+ end
+
+ it_behaves_like 'creating a pipeline with environment keyword' do
+ let!(:project) { create(:project, :repository) }
+ let(:execute_service) { service.execute(:push) }
+ let(:expected_deployable_class) { Ci::Bridge }
+ let(:expected_deployment_status) { 'running' }
+ let(:expected_job_status) { 'running' }
+ let(:child_config) { YAML.dump({ deploy: { script: 'deploy' } }) }
+ let(:base_config) do
+ {
+ trigger: {
+ include: [{ local: '.child.yml' }],
+ strategy: 'depend'
+ }
+ }
+ end
+
+ before do
+ project.add_developer(user)
+ project.repository.create_file(user, '.gitlab-ci.yml', config, branch_name: 'master', message: 'ok')
+ project.repository.create_file(user, '.child.yml', child_config, branch_name: 'master', message: 'ok')
+ end
end
shared_examples 'successful creation' do
@@ -67,6 +89,10 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
YAML
end
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
it_behaves_like 'successful creation' do
let(:expected_bridge_options) do
{
@@ -158,6 +184,10 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
end
describe 'child pipeline triggers' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
context 'when YAML is valid' do
let(:config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index a81d1487fab..05fa3cfeba3 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -298,6 +298,46 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
+ context 'with CI_ENVIRONMENT_* predefined variables' do
+ let(:config) do
+ <<-EOY
+ deploy:
+ script: "deploy"
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: development
+ url: https://gitlab.com
+ rules:
+ - if: $CI_ENVIRONMENT_NAME =~ /^review\// && $CI_ENVIRONMENT_ACTION == "start" && $CI_ENVIRONMENT_TIER == "development" && $CI_ENVIRONMENT_URL == "https://gitlab.com"
+
+ teardown:
+ script: "teardown"
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: development
+ url: https://gitlab.com
+ action: stop
+ rules:
+ - if: $CI_ENVIRONMENT_NAME =~ /^review\// && $CI_ENVIRONMENT_ACTION == "stop" && $CI_ENVIRONMENT_TIER == "development" && $CI_ENVIRONMENT_URL == "https://gitlab.com"
+ when: manual
+ EOY
+ end
+
+ it 'assigns correct attributes to the jobs' do
+ expect(pipeline).to be_persisted
+
+ BatchLoader::Executor.clear_current
+
+ expect(build_names).to contain_exactly('deploy', 'teardown')
+ expect(find_job('deploy').when).to eq('on_success')
+ expect(find_job('teardown').when).to eq('manual')
+ expect(find_job('deploy').allow_failure).to eq(false)
+ expect(find_job('teardown').allow_failure).to eq(false)
+ expect(find_job('deploy').actual_persisted_environment.name).to eq('review/master')
+ expect(find_job('teardown').actual_persisted_environment.name).to eq('review/master')
+ end
+ end
+
context 'with simple if: clauses' do
let(:config) do
<<-EOY
diff --git a/spec/services/ci/create_pipeline_service/variables_spec.rb b/spec/services/ci/create_pipeline_service/variables_spec.rb
index aac9a0c9c2d..3039ffb2751 100644
--- a/spec/services/ci/create_pipeline_service/variables_spec.rb
+++ b/spec/services/ci/create_pipeline_service/variables_spec.rb
@@ -90,6 +90,39 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
)
end
end
+
+ context 'when trigger variables have CI_ENVIRONMENT_* predefined variables' do
+ let(:config) do
+ <<-YAML
+ child:
+ variables:
+ UPSTREAM_ENVIRONMENT_NAME: $CI_ENVIRONMENT_NAME
+ UPSTREAM_ENVIRONMENT_TIER: $CI_ENVIRONMENT_TIER
+ UPSTREAM_ENVIRONMENT_URL: $CI_ENVIRONMENT_URL
+ UPSTREAM_ENVIRONMENT_ACTION: $CI_ENVIRONMENT_ACTION
+ environment:
+ name: review/$CI_COMMIT_REF_NAME
+ deployment_tier: testing
+ url: https://gitlab.com
+ action: start
+ trigger:
+ include: child.yml
+ YAML
+ end
+
+ let(:child) { find_job('child') }
+
+ it 'creates the pipeline with a trigger job that has downstream_variables' do
+ expect(pipeline).to be_created_successfully
+
+ expect(child.downstream_variables).to include(
+ { key: 'UPSTREAM_ENVIRONMENT_NAME', value: 'review/master' },
+ { key: 'UPSTREAM_ENVIRONMENT_TIER', value: 'testing' },
+ { key: 'UPSTREAM_ENVIRONMENT_URL', value: 'https://gitlab.com' },
+ { key: 'UPSTREAM_ENVIRONMENT_ACTION', value: 'start' }
+ )
+ end
+ end
end
private
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a28ede89cee..a28dd9e7a55 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -748,131 +748,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
- context 'with environment' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/$CI_COMMIT_REF_NAME" },
- script: 'ls',
- tags: ['hello']
- })
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates the environment with tags', :sidekiq_inline do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: "review/master")).to be_present
- expect(result.builds.first.tag_list).to contain_exactly('hello')
- expect(result.builds.first.deployment).to be_persisted
- expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
- end
- end
-
- context 'with environment with auto_stop_in' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/$CI_COMMIT_REF_NAME", auto_stop_in: '1 day' },
- script: 'ls'
- })
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates the environment with auto stop in' do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(result.builds.first.options[:environment][:auto_stop_in]).to eq('1 day')
- end
- end
-
- context 'with environment name including persisted variables' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_JOB_ID" },
- script: 'ls'
- }
- )
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'skips persisted variables in environment name' do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: "review/id1/id2")).to be_present
- end
- end
-
- context 'environment with Kubernetes configuration' do
- let(:kubernetes_namespace) { 'custom-namespace' }
-
- before do
- config = YAML.dump(
- deploy: {
- environment: {
- name: "environment-name",
- kubernetes: { namespace: kubernetes_namespace }
- },
- script: 'ls'
- }
- )
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'stores the requested namespace' do
- result = execute_service.payload
- build = result.builds.first
-
- expect(result).to be_persisted
- expect(build.options.dig(:environment, :kubernetes, :namespace)).to eq(kubernetes_namespace)
- end
- end
-
- context 'when environment with invalid name' do
- before do
- config = YAML.dump(deploy: { environment: { name: 'name,with,commas' }, script: 'ls' })
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'does not create an environment' do
- expect do
- result = execute_service.payload
-
- expect(result).to be_persisted
- end.not_to change { Environment.count }
- end
- end
-
- context 'when environment with duplicate names' do
- let(:ci_yaml) do
- {
- deploy: { environment: { name: 'production' }, script: 'ls' },
- deploy_2: { environment: { name: 'production' }, script: 'ls' }
- }
- end
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(ci_yaml))
- end
-
- it 'creates a pipeline with the environment', :sidekiq_inline do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: 'production')).to be_present
- expect(result.builds.first.deployment).to be_persisted
- expect(result.builds.first.deployment.deployable).to be_a(Ci::Build)
- end
- end
-
context 'when builds with auto-retries are configured' do
let(:pipeline) { execute_service.payload }
let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
@@ -1294,55 +1169,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
- context 'when pipeline has a job with environment' do
- let(:pipeline) { execute_service.payload }
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(config))
- end
-
- context 'when environment name is valid' do
- let(:config) do
- {
- review_app: {
- script: 'deploy',
- environment: {
- name: 'review/${CI_COMMIT_REF_NAME}',
- url: 'http://${CI_COMMIT_REF_SLUG}-staging.example.com'
- }
- }
- }
- end
-
- it 'has a job with environment', :sidekiq_inline do
- expect(pipeline.builds.count).to eq(1)
- expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
- expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
- expect(pipeline.builds.first.deployment).to be_created
- end
- end
-
- context 'when environment name is invalid' do
- let(:config) do
- {
- 'job:deploy-to-test-site': {
- script: 'deploy',
- environment: {
- name: '${CI_JOB_NAME}',
- url: 'https://$APP_URL'
- }
- }
- }
- end
-
- it 'has a job without environment' do
- expect(pipeline.builds.count).to eq(1)
- expect(pipeline.builds.first.persisted_environment).to be_nil
- expect(pipeline.builds.first.deployment).to be_nil
- end
- end
- end
-
describe 'Pipeline for external pull requests' do
let(:response) do
execute_service(
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 82a8e425cd0..fffac0fd64b 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -35,20 +35,6 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(pipeline)).to contain_exactly('pending')
expect(build_statuses(old_pipeline)).to contain_exactly('pending')
end
-
- context 'with lower_interval_for_canceling_redundant_pipelines disabled' do
- before do
- stub_feature_flags(lower_interval_for_canceling_redundant_pipelines: false)
- end
-
- it 'cancels pipelines created more than 3 days ago' do
- execute
-
- expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
- expect(build_statuses(pipeline)).to contain_exactly('pending')
- expect(build_statuses(old_pipeline)).to contain_exactly('canceled')
- end
- end
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 61fec82c688..83bae16a30e 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -26,7 +26,9 @@ module Ci
end
it 'result is valid if replica did caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?).with(:runner, runner.id) { true }
+ expect(ApplicationRecord.sticking).to receive(:find_caught_up_replica)
+ .with(:runner, runner.id, use_primary_on_failure: false)
+ .and_return(true)
expect { execute }.not_to change { Ci::RunnerManagerBuild.count }.from(0)
expect(execute).to be_valid
@@ -35,8 +37,9 @@ module Ci
end
it 'result is invalid if replica did not caught-up', :aggregate_failures do
- expect(ApplicationRecord.sticking).to receive(:all_caught_up?)
- .with(:runner, shared_runner.id) { false }
+ expect(ApplicationRecord.sticking).to receive(:find_caught_up_replica)
+ .with(:runner, shared_runner.id, use_primary_on_failure: false)
+ .and_return(false)
expect(subject).not_to be_valid
expect(subject.build).to be_nil
@@ -948,8 +951,8 @@ module Ci
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
- let(:expected_jobs_running_for_project_first_job) { 0 }
- let(:expected_jobs_running_for_project_third_job) { 2 }
+ let(:expected_jobs_running_for_project_first_job) { '0' }
+ let(:expected_jobs_running_for_project_third_job) { '2' }
it_behaves_like 'metrics collector'
@@ -969,7 +972,7 @@ module Ci
context 'when max running jobs bucket size is exceeded' do
before do
- stub_const('Gitlab::Ci::Queue::Metrics::JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET', 1)
+ stub_const('Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET', 1)
end
let(:expected_jobs_running_for_project_third_job) { '1+' }
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
index d952fca25a5..8d612174a0b 100644
--- a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', fe
end
expect(execute).to be_error
- expect(runner.reload.projects).to eq(original_projects)
+ expect(runner.reload.projects.order(:id)).to eq(original_projects)
end
end
@@ -117,7 +117,7 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', fe
it 'returns error response and rolls back transaction' do
expect(execute).to be_error
expect(execute.errors).to contain_exactly('user is not authorized to add runners to project')
- expect(runner.reload.projects).to eq(original_projects)
+ expect(runner.reload.projects.order(:id)).to eq(original_projects)
end
end
end
diff --git a/spec/services/concerns/rate_limited_service_spec.rb b/spec/services/concerns/rate_limited_service_spec.rb
index 2172c756ecf..2cfc6692f23 100644
--- a/spec/services/concerns/rate_limited_service_spec.rb
+++ b/spec/services/concerns/rate_limited_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe RateLimitedService, feature_category: :rate_limiting do
let(:key) { :issues_create }
let(:scope) { [:container, :current_user] }
- let(:opts) { { scope: scope, users_allowlist: -> { [User.support_bot.username] } } }
+ let(:opts) { { scope: scope, users_allowlist: -> { [Users::Internal.support_bot.username] } } }
let(:rate_limiter) { ::Gitlab::ApplicationRateLimiter }
describe 'RateLimitedError' do
diff --git a/spec/services/concerns/services/return_service_responses_spec.rb b/spec/services/concerns/services/return_service_responses_spec.rb
new file mode 100644
index 00000000000..3589b952e87
--- /dev/null
+++ b/spec/services/concerns/services/return_service_responses_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Services::ReturnServiceResponses, feature_category: :rate_limiting do
+ subject(:object) { Class.new { include Services::ReturnServiceResponses }.new }
+
+ let(:message) { 'a delivering message' }
+ let(:payload) { 'string payload' }
+
+ describe '#success' do
+ it 'returns a ServiceResponse instance' do
+ response = object.success(payload)
+ expect(response).to be_an(ServiceResponse)
+ expect(response).to be_success
+ expect(response.message).to be_nil
+ expect(response.payload).to eq(payload)
+ expect(response.http_status).to eq(:ok)
+ end
+ end
+
+ describe '#error' do
+ it 'returns a ServiceResponse instance' do
+ response = object.error(message, :not_found, pass_back: payload)
+ expect(response).to be_an(ServiceResponse)
+ expect(response).to be_error
+ expect(response.message).to eq(message)
+ expect(response.payload).to eq(payload)
+ expect(response.http_status).to eq(:not_found)
+ end
+ end
+end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 0a93e300eb6..79bf0d972d4 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -79,6 +79,27 @@ RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continu
expect(subject.execute).to eq(deployment)
end
+ context 'when deployable is bridge job' do
+ let(:job) do
+ create(:ci_bridge,
+ :with_deployment,
+ pipeline: pipeline,
+ ref: 'master',
+ tag: false,
+ environment: environment_name,
+ options: { environment: options },
+ project: project)
+ end
+
+ it 'creates ref' do
+ expect_any_instance_of(Repository)
+ .to receive(:create_ref)
+ .with(deployment.sha, "refs/environments/production/deployments/#{deployment.iid}")
+
+ service.execute
+ end
+ end
+
context 'when start action is defined' do
let(:options) { { name: 'production', action: 'start' } }
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index 22570a14443..b6a80cf26cc 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
subject(:service) { described_class.new(project, user, issue: issue, designs: designs) }
- # Defined as a method so that the reponse is not cached. We also construct
+ # Defined as a method so that the response is not cached. We also construct
# a new service executor each time to avoid the intermediate cached values
# it constructs during its execution.
def run_service(delenda = nil)
@@ -173,8 +173,10 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
run_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED }
+ let(:namespace) { project.namespace }
+
subject(:service_action) { run_service }
end
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index ea53fcc3b12..8e5065184ca 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -11,9 +11,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
let(:project) { issue.project }
let(:user) { developer }
let(:files) { [rails_sample] }
- let(:design_repository) do
- ::Gitlab::GlRepository::DESIGN.repository_resolver.call(project)
- end
+ let(:design_repository) { project.find_or_create_design_management_repository.repository }
let(:rails_sample_name) { 'rails_sample.jpg' }
let(:rails_sample) { sample_image(rails_sample_name) }
@@ -43,9 +41,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
design_files = files_to_upload || files
design_files.each(&:rewind)
- service = described_class.new(project, user,
- issue: issue,
- files: design_files)
+ service = described_class.new(project, user, issue: issue, files: design_files)
service.execute
end
@@ -123,8 +119,9 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
)
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED }
+ let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -221,8 +218,9 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
run_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_MODIFIED }
+ let(:namespace) { project.namespace }
subject(:service_action) { run_service }
end
@@ -390,9 +388,12 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
before do
path = File.join(build(:design, issue: issue, filename: filename).full_path)
design_repository.create_if_not_exists
- design_repository.create_file(user, path, 'something fake',
- branch_name: project.default_branch_or_main,
- message: 'Somehow created without being tracked in db')
+ design_repository.create_file(
+ user,
+ path, 'something fake',
+ branch_name: project.default_branch_or_main,
+ message: 'Somehow created without being tracked in db'
+ )
end
it 'creates the design and a new version for it' do
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index a6e1bad30ce..88ef390bb02 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -94,9 +94,11 @@ RSpec.describe Discussions::ResolveService, feature_category: :code_review_workf
it 'raises an argument error if discussions do not belong to the same noteable' do
other_merge_request = create(:merge_request)
- other_discussion = create(:diff_note_on_merge_request,
- noteable: other_merge_request,
- project: other_merge_request.source_project).to_discussion
+ other_discussion = create(
+ :diff_note_on_merge_request,
+ noteable: other_merge_request,
+ project: other_merge_request.source_project
+ ).to_discussion
expect do
described_class.new(project, user, one_or_more_discussions: [discussion, other_discussion])
end.to raise_error(
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index dab06637c1a..48959baeaa5 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -292,9 +292,12 @@ RSpec.describe DraftNotes::PublishService, feature_category: :code_review_workfl
other_user = create(:user)
project.add_developer(other_user)
- create(:draft_note, merge_request: merge_request,
- author: user,
- note: "thanks\n/assign #{other_user.to_reference}")
+ create(
+ :draft_note,
+ merge_request: merge_request,
+ author: user,
+ note: "thanks\n/assign #{other_user.to_reference}"
+ )
expect { publish }.to change { DraftNote.count }.by(-1).and change { Note.count }.by(2)
expect(merge_request.reload.assignees).to match_array([other_user])
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index 6e3b36b5636..04116c5238f 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -135,8 +135,7 @@ RSpec.describe Environments::StopService, feature_category: :continuous_delivery
context 'when branch for stop action is protected' do
before do
project.add_developer(user)
- create(:protected_branch, :no_one_can_push,
- name: 'master', project: project)
+ create(:protected_branch, :no_one_can_push, name: 'master', project: project)
end
it 'does not stop environment' do
diff --git a/spec/services/environments/stop_stale_service_spec.rb b/spec/services/environments/stop_stale_service_spec.rb
index 46d770c30cc..0aa5659f81d 100644
--- a/spec/services/environments/stop_stale_service_spec.rb
+++ b/spec/services/environments/stop_stale_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Environments::StopStaleService,
- :clean_gitlab_redis_shared_state,
- :sidekiq_inline,
- feature_category: :continuous_delivery do
+ :clean_gitlab_redis_shared_state,
+ :sidekiq_inline,
+ feature_category: :continuous_delivery do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/files/delete_service_spec.rb b/spec/services/files/delete_service_spec.rb
index dd99e5f9742..982e1bda5ac 100644
--- a/spec/services/files/delete_service_spec.rb
+++ b/spec/services/files/delete_service_spec.rb
@@ -56,9 +56,10 @@ RSpec.describe Files::DeleteService, feature_category: :source_code_management d
let(:last_commit_sha) { Gitlab::Git::Commit.last_for_path(project.repository, project.default_branch, file_path).parent_id }
it "returns a hash with the correct error message and a :error status" do
- expect { subject.execute }
- .to raise_error(Files::UpdateService::FileChangedError,
- "You are attempting to delete a file that has been previously updated.")
+ expect { subject.execute }.to raise_error(
+ Files::UpdateService::FileChangedError,
+ "You are attempting to delete a file that has been previously updated."
+ )
end
end
diff --git a/spec/services/files/update_service_spec.rb b/spec/services/files/update_service_spec.rb
index 6a9f9d6b86f..be424d5cb0d 100644
--- a/spec/services/files/update_service_spec.rb
+++ b/spec/services/files/update_service_spec.rb
@@ -36,8 +36,10 @@ RSpec.describe Files::UpdateService, feature_category: :source_code_management d
it "returns a hash with the correct error message and a :error status" do
expect { subject.execute }
- .to raise_error(Files::UpdateService::FileChangedError,
- "You are attempting to update a file that has changed since you started editing it.")
+ .to raise_error(
+ Files::UpdateService::FileChangedError,
+ "You are attempting to update a file that has changed since you started editing it."
+ )
end
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 5e43426b9dd..74f1f4bc7ac 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -81,18 +81,18 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
end
it 'creates a pipeline with the right parameters' do
- expect(Ci::CreatePipelineService)
- .to receive(:new)
- .with(project,
- user,
- {
- before: oldrev,
- after: newrev,
- ref: ref,
- checkout_sha: SeedRepo::Commit::ID,
- variables_attributes: [],
- push_options: {}
- }).and_call_original
+ expect(Ci::CreatePipelineService).to receive(:new).with(
+ project,
+ user,
+ {
+ before: oldrev,
+ after: newrev,
+ ref: ref,
+ checkout_sha: SeedRepo::Commit::ID,
+ variables_attributes: [],
+ push_options: {}
+ }
+ ).and_call_original
subject
end
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
index 4f2e0bea623..c31e76170d5 100644
--- a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
+++ b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -28,13 +28,14 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :de
it 'triggers creation of a cloudsql instance' do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
expected_instance_name = "gitlab-#{project.id}-postgres-8000-test-env-42"
- expect(client).to receive(:create_cloudsql_instance)
- .with(gcp_project_id,
- expected_instance_name,
- String,
- database_version,
- 'us-east1',
- tier)
+ expect(client).to receive(:create_cloudsql_instance).with(
+ gcp_project_id,
+ expected_instance_name,
+ String,
+ database_version,
+ 'us-east1',
+ tier
+ )
end
result = service.execute
@@ -74,13 +75,14 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :de
it 'uses defined region' do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
- expect(client).to receive(:create_cloudsql_instance)
- .with(gcp_project_id,
- String,
- String,
- database_version,
- 'user-defined-region',
- tier)
+ expect(client).to receive(:create_cloudsql_instance).with(
+ gcp_project_id,
+ String,
+ String,
+ database_version,
+ 'user-defined-region',
+ tier
+ )
end
service.execute
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
index e5f06824b9f..f8d5ba99bf6 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
-:clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
+ :clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
include StubRequests
let(:google_cloud_ips) { File.read(Rails.root.join('spec/fixtures/cdn/google_cloud.json')) }
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/google_cloud/generate_pipeline_service_spec.rb
index b363b7b17b6..26a1ccb7e3b 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/google_cloud/generate_pipeline_service_spec.rb
@@ -84,11 +84,13 @@ test-java:
stage: test
script: mvn clean test
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'introduces a `deploy` stage and includes the deploy-to-cloud-run job' do
@@ -138,11 +140,13 @@ test-java:
stage: test
script: mvn clean test
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the deploy-to-cloud-run job' do
@@ -178,11 +182,13 @@ stages:
include:
local: 'some-pipeline.yml'
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the deploy-to-cloud-run job' do
@@ -309,11 +315,13 @@ stages:
include:
local: 'some-pipeline.yml'
EOF
- project.repository.create_file(maintainer,
- file_name,
- file_content,
- message: 'Pipeline with three stages and two jobs',
- branch_name: project.default_branch)
+ project.repository.create_file(
+ maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch
+ )
end
it 'includes the vision ai pipeline' do
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
index ed41d0fd487..cd2ad00ac3f 100644
--- a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
+++ b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -39,24 +39,26 @@ RSpec.describe GoogleCloud::GetCloudsqlInstancesService, feature_category: :depl
end
it 'result is grouped by environment', :aggregate_failures do
- expect(service.execute).to contain_exactly({
- ref: '*',
- gcp_project: 'value-GCP_PROJECT_ID-*',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-*',
- version: 'value-GCP_CLOUDSQL_VERSION-*'
- },
- {
- ref: 'STG',
- gcp_project: 'value-GCP_PROJECT_ID-STG',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-STG',
- version: 'value-GCP_CLOUDSQL_VERSION-STG'
- },
- {
- ref: 'PRD',
- gcp_project: 'value-GCP_PROJECT_ID-PRD',
- instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-PRD',
- version: 'value-GCP_CLOUDSQL_VERSION-PRD'
- })
+ expect(service.execute).to contain_exactly(
+ {
+ ref: '*',
+ gcp_project: 'value-GCP_PROJECT_ID-*',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-*',
+ version: 'value-GCP_CLOUDSQL_VERSION-*'
+ },
+ {
+ ref: 'STG',
+ gcp_project: 'value-GCP_PROJECT_ID-STG',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-STG',
+ version: 'value-GCP_CLOUDSQL_VERSION-STG'
+ },
+ {
+ ref: 'PRD',
+ gcp_project: 'value-GCP_PROJECT_ID-PRD',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-PRD',
+ version: 'value-GCP_CLOUDSQL_VERSION-PRD'
+ }
+ )
end
end
end
diff --git a/spec/services/gpg_keys/destroy_service_spec.rb b/spec/services/gpg_keys/destroy_service_spec.rb
index b9aa3e351c9..85c1fc2893b 100644
--- a/spec/services/gpg_keys/destroy_service_spec.rb
+++ b/spec/services/gpg_keys/destroy_service_spec.rb
@@ -2,14 +2,28 @@
require 'spec_helper'
-RSpec.describe GpgKeys::DestroyService do
- let(:user) { create(:user) }
+RSpec.describe GpgKeys::DestroyService, feature_category: :source_code_management do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:gpg_key) { create(:gpg_key) }
subject { described_class.new(user) }
it 'destroys the GPG key' do
- gpg_key = create(:gpg_key)
-
expect { subject.execute(gpg_key) }.to change(GpgKey, :count).by(-1)
end
+
+ it 'nullifies the related signatures in batches' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ first_signature = create(:gpg_signature, gpg_key: gpg_key)
+ second_signature = create(:gpg_signature, gpg_key: gpg_key)
+ third_signature = create(:gpg_signature, gpg_key: create(:another_gpg_key))
+
+ control = ActiveRecord::QueryRecorder.new { subject.execute(gpg_key) }
+ expect(control.count).to eq(5)
+
+ expect(first_signature.reload.gpg_key).to be_nil
+ expect(second_signature.reload.gpg_key).to be_nil
+ expect(third_signature.reload.gpg_key).not_to be_nil
+ end
end
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 929f7d5b4e3..ebdce07d03c 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -44,8 +44,7 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
destroy_group(group, user, async)
expect(
- Users::GhostUserMigration.where(user: bot,
- initiator_user: user)
+ Users::GhostUserMigration.where(user: bot, initiator_user: user)
).to be_exists
end
end
@@ -70,10 +69,6 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
end
it 'verifies that paths have been deleted' do
- Gitlab::GitalyClient::NamespaceService.allow do
- expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
- .exists?(group.path)).to be_falsey
- end
expect(removed_repo).not_to exist
end
end
@@ -101,10 +96,6 @@ RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
end
it 'verifies original paths and projects still exist' do
- Gitlab::GitalyClient::NamespaceService.allow do
- expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
- .exists?(group.path)).to be_truthy
- end
expect(removed_repo).not_to exist
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index 9ba664212b8..913d72eff9c 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -55,8 +55,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute', feature_category:
context 'when sharing outside the hierarchy is disabled' do
let_it_be_with_refind(:group_parent) do
- create(:group,
- namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true))
+ create(:group, namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true))
end
it_behaves_like 'not shareable'
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 861728f00c6..5e37f33e4f2 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -264,18 +264,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'not allowing a path update'
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -283,18 +271,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
@@ -306,18 +282,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -325,18 +289,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
@@ -348,18 +300,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
context 'updating the subgroup' do
@@ -367,18 +307,6 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
-
- context 'when npm_package_registry_fix_group_path_validation is disabled' do
- before do
- stub_feature_flags(npm_package_registry_fix_group_path_validation: false)
- expect_next_instance_of(::Groups::UpdateService) do |service|
- expect(service).to receive(:valid_path_change_with_npm_packages?).and_call_original
- end
- end
-
- it_behaves_like 'not allowing a path update'
- it_behaves_like 'allowing an update', on: :name
- end
end
end
end
diff --git a/spec/services/import_export_clean_up_service_spec.rb b/spec/services/import_export_clean_up_service_spec.rb
index 7b638b4948b..138dee3d975 100644
--- a/spec/services/import_export_clean_up_service_spec.rb
+++ b/spec/services/import_export_clean_up_service_spec.rb
@@ -53,9 +53,11 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
context 'with uploader exports' do
it 'removes old files and logs' do
- upload = create(:import_export_upload,
- updated_at: 2.days.ago,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ upload = create(
+ :import_export_upload,
+ updated_at: 2.days.ago,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
+ )
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
@@ -73,9 +75,11 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
end
it 'does not remove new files or logs' do
- upload = create(:import_export_upload,
- updated_at: 1.hour.ago,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ upload = create(
+ :import_export_upload,
+ updated_at: 1.hour.ago,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
+ )
expect(Gitlab::Import::Logger).not_to receive(:new)
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
index e6ded379434..d0f9d414044 100644
--- a/spec/services/incident_management/incidents/create_service_spec.rb
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::Incidents::CreateService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let(:description) { 'Incident description' }
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index caa5ee495b7..5bbca91cdcd 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService, feature_category: :incident_management do
let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:parsed_payload) { ::PagerDuty::WebhookPayloadParser.call(webhook_payload) }
@@ -29,7 +29,7 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService, featur
end
it 'the issue author is Alert bot' do
- expect(execute.payload[:issue].author).to eq(User.alert_bot)
+ expect(execute.payload[:issue].author).to eq(Users::Internal.alert_bot)
end
it 'issue has a correct title' do
diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb
index 2751267c08b..fac7ef9ce77 100644
--- a/spec/services/issuable/process_assignees_spec.rb
+++ b/spec/services/issuable/process_assignees_spec.rb
@@ -5,75 +5,89 @@ require 'spec_helper'
RSpec.describe Issuable::ProcessAssignees, feature_category: :team_planning do
describe '#execute' do
it 'returns assignee_ids when add_assignee_ids and remove_assignee_ids are not specified' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: nil,
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 9),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 9),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(5, 7, 9)
end
it 'combines other ids when assignee_ids is nil' do
- process = described_class.new(assignee_ids: nil,
- add_assignee_ids: nil,
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: nil,
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 11, 12)
end
it 'combines other ids when both add_assignee_ids and remove_assignee_ids are not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
end
it 'combines other ids when remove_assignee_ids is not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: nil,
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: nil,
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 12)
end
it 'combines other ids when add_assignee_ids is not empty' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: nil,
- existing_assignee_ids: %w(1 3 11),
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 4, 3, 5, 6, 11, 12)
end
it 'combines ids when existing_assignee_ids and extra_assignee_ids are omitted' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11)
+ )
result = process.execute
expect(result.sort).to eq([2, 6].sort)
end
it 'handles mixed string and integer arrays' do
- process = described_class.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: [2, 4, 6],
- remove_assignee_ids: %w(4 7 11),
- existing_assignee_ids: [1, 3, 11],
- extra_assignee_ids: %w(2 5 12))
+ process = described_class.new(
+ assignee_ids: %w(5 7 9),
+ add_assignee_ids: [2, 4, 6],
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: [1, 3, 11],
+ extra_assignee_ids: %w(2 5 12)
+ )
result = process.execute
expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
diff --git a/spec/services/issue_links/destroy_service_spec.rb b/spec/services/issue_links/destroy_service_spec.rb
index 5c4814f5ad1..c367b0157cb 100644
--- a/spec/services/issue_links/destroy_service_spec.rb
+++ b/spec/services/issue_links/destroy_service_spec.rb
@@ -5,25 +5,22 @@ require 'spec_helper'
RSpec.describe IssueLinks::DestroyService, feature_category: :team_planning do
describe '#execute' do
let_it_be(:project) { create(:project_empty_repo, :private) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be(:issue_a) { create(:issue, project: project) }
let_it_be(:issue_b) { create(:issue, project: project) }
let!(:issuable_link) { create(:issue_link, source: issue_a, target: issue_b) }
+ let(:user) { reporter }
subject { described_class.new(issuable_link, user).execute }
it_behaves_like 'a destroyable issuable link'
context 'when target is an incident' do
- before do
- project.add_reporter(user)
- end
-
let(:issue_b) { create(:incident, project: project) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_unrelate do
- let(:current_user) { user }
+ let(:current_user) { reporter }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
diff --git a/spec/services/issue_links/list_service_spec.rb b/spec/services/issue_links/list_service_spec.rb
index bfb6127ed56..b5cc8c4dcdc 100644
--- a/spec/services/issue_links/list_service_spec.rb
+++ b/spec/services/issue_links/list_service_spec.rb
@@ -21,18 +21,15 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
let(:issue_d) { create :issue, project: project }
let!(:issue_link_c) do
- create(:issue_link, source: issue_d,
- target: issue)
+ create(:issue_link, source: issue_d, target: issue)
end
let!(:issue_link_b) do
- create(:issue_link, source: issue,
- target: issue_c)
+ create(:issue_link, source: issue, target: issue_c)
end
let!(:issue_link_a) do
- create(:issue_link, source: issue,
- target: issue_b)
+ create(:issue_link, source: issue, target: issue_b)
end
it 'ensures no N+1 queries are made' do
@@ -53,26 +50,32 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
it 'returns related issues JSON' do
expect(subject.size).to eq(3)
- expect(subject).to include(include(id: issue_b.id,
- title: issue_b.title,
- state: issue_b.state,
- reference: issue_b.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_b.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_a.id}"))
-
- expect(subject).to include(include(id: issue_c.id,
- title: issue_c.title,
- state: issue_c.state,
- reference: issue_c.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_c.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_b.id}"))
-
- expect(subject).to include(include(id: issue_d.id,
- title: issue_d.title,
- state: issue_d.state,
- reference: issue_d.to_reference(project),
- path: "/#{project.full_path}/-/issues/#{issue_d.iid}",
- relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_c.id}"))
+ expect(subject).to include(include(
+ id: issue_b.id,
+ title: issue_b.title,
+ state: issue_b.state,
+ reference: issue_b.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_b.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_a.id}"
+ ))
+
+ expect(subject).to include(include(
+ id: issue_c.id,
+ title: issue_c.title,
+ state: issue_c.state,
+ reference: issue_c.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_c.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_b.id}"
+ ))
+
+ expect(subject).to include(include(
+ id: issue_d.id,
+ title: issue_d.title,
+ state: issue_d.state,
+ reference: issue_d.to_reference(project),
+ path: "/#{project.full_path}/-/issues/#{issue_d.iid}",
+ relation_path: "/#{project.full_path}/-/issues/#{issue.iid}/links/#{issue_link_c.id}"
+ ))
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 47925236a74..dabbd4bfa84 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -321,7 +321,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
alert = create(:alert_management_alert, issue: issue, project: project)
expect(SystemNoteService).to receive(:change_alert_status)
- .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
+ .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
close_issue
@@ -356,7 +356,7 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do
alerts.each do |alert|
expect(SystemNoteService).to receive(:change_alert_status)
- .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
+ .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
end
close_issue
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 2daba8e359d..7cd2cd8f564 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -577,8 +577,10 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
context "when issuable feature is private" do
before do
- project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE,
- merge_requests_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(
+ issues_access_level: ProjectFeature::PRIVATE,
+ merge_requests_access_level: ProjectFeature::PRIVATE
+ )
end
levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC]
@@ -680,7 +682,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
context 'with alert bot author' do
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let_it_be(:label) { create(:label, project: project) }
let(:opts) do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 1ac64c0301d..31eaa72255d 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -43,18 +43,20 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
# so create these first.
issue.timelogs.create!(time_spent: 360, user: user)
issue.timelogs.create!(time_spent: 200, user: user)
- issue.update!(milestone: milestone,
- assignees: [user],
- description: 'Issue with details',
- state: :opened,
- due_date: DateTime.new(2014, 3, 2),
- created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
- updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
- closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
- weight: 4,
- discussion_locked: true,
- labels: [feature_label, idea_label],
- time_estimate: 72000)
+ issue.update!(
+ milestone: milestone,
+ assignees: [user],
+ description: 'Issue with details',
+ state: :opened,
+ due_date: DateTime.new(2014, 3, 2),
+ created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
+ updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
+ closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
+ weight: 4,
+ discussion_locked: true,
+ labels: [feature_label, idea_label],
+ time_estimate: 72000
+ )
end
shared_examples 'exports CSVs for issues' do
@@ -158,9 +160,9 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
context 'with issues filtered by labels and project' do
subject do
described_class.new(
- IssuesFinder.new(user,
- project_id: project.id,
- label_name: %w(Idea Feature)).execute, project)
+ IssuesFinder.new(user, project_id: project.id, label_name: %w(Idea Feature)).execute,
+ project
+ )
end
it 'returns only filtered objects' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 12924df3200..55f912fb703 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -390,8 +390,7 @@ RSpec.describe Issues::MoveService, feature_category: :team_planning do
let(:moved_to_issue) { create(:issue) }
let(:old_issue) do
- create(:issue, project: old_project, author: author,
- moved_to: moved_to_issue)
+ create(:issue, project: old_project, author: author, moved_to: moved_to_issue)
end
it { expect { move }.to raise_error(StandardError, /permissions/) }
diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb
index c2111bffdda..ea4ad0440ec 100644
--- a/spec/services/issues/resolve_discussions_spec.rb
+++ b/spec/services/issues/resolve_discussions_spec.rb
@@ -60,10 +60,14 @@ RSpec.describe Issues::ResolveDiscussions, feature_category: :team_planning do
end
it "contains all discussions when only a merge request is passed" do
- second_discussion = Discussion.new([create(:diff_note_on_merge_request,
- noteable: merge_request,
- project: merge_request.target_project,
- line_number: 15)])
+ second_discussion = Discussion.new([
+ create(
+ :diff_note_on_merge_request,
+ noteable: merge_request,
+ project: merge_request.target_project,
+ line_number: 15
+ )
+ ])
service = DummyService.new(
container: project,
current_user: user,
@@ -77,11 +81,15 @@ RSpec.describe Issues::ResolveDiscussions, feature_category: :team_planning do
end
it "contains only unresolved discussions" do
- _second_discussion = Discussion.new([create(:diff_note_on_merge_request, :resolved,
- noteable: merge_request,
- project: merge_request.target_project,
- line_number: 15
- )])
+ _second_discussion = Discussion.new([
+ create(
+ :diff_note_on_merge_request,
+ :resolved,
+ noteable: merge_request,
+ project: merge_request.target_project,
+ line_number: 15
+ )
+ ])
service = DummyService.new(
container: project,
current_user: user,
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index c677dc0315c..eb9fe2b4ed7 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -15,11 +15,14 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
let_it_be(:milestone) { create(:milestone, project: project) }
let(:issue) do
- create(:issue, title: 'Old title',
- description: "for #{user2.to_reference}",
- assignee_ids: [user3.id],
- project: project,
- author: create(:user))
+ create(
+ :issue,
+ title: 'Old title',
+ description: "for #{user2.to_reference}",
+ assignee_ids: [user3.id],
+ project: project,
+ author: create(:user)
+ )
end
before_all do
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index c9f75283c75..2b398210034 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -108,36 +108,24 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
end
end
- describe '#filter_locked_labels_ids_in_param' do
- let(:label_ids) { labels.map(&:id).push(non_existing_record_id) }
+ describe '#filter_locked_label_ids' do
+ let(:label_ids) { labels.map(&:id) }
context 'when parent is a project' do
- it 'returns only locked label ids' do
- result = described_class.new(user, project, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, project, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([project_label_locked.id, group_label_locked.id])
end
-
- it 'returns labels in preserved order' do
- result = described_class.new(user, project, ids: label_ids.reverse).filter_locked_labels_ids_in_param(:ids)
-
- expect(result).to eq([group_label_locked.id, project_label_locked.id])
- end
end
context 'when parent is a group' do
- it 'returns only locked label ids' do
- result = described_class.new(user, group, ids: label_ids).filter_locked_labels_ids_in_param(:ids)
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, group, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([group_label_locked.id])
end
end
-
- it 'accepts a single id parameter' do
- result = described_class.new(user, project, label_id: project_label_locked.id).filter_locked_labels_ids_in_param(:label_id)
-
- expect(result).to match_array([project_label_locked.id])
- end
end
describe '#available_labels' do
diff --git a/spec/services/labels/update_service_spec.rb b/spec/services/labels/update_service_spec.rb
index 9a8868dac10..61e229e3138 100644
--- a/spec/services/labels/update_service_spec.rb
+++ b/spec/services/labels/update_service_spec.rb
@@ -99,6 +99,14 @@ RSpec.describe Labels::UpdateService, feature_category: :team_planning do
expect(label.reload.lock_on_merge).to be_truthy
end
+ it 'does not allow lock_on_merge to be unset' do
+ label_locked = Labels::CreateService.new(title: 'Initial', lock_on_merge: true).execute(project: project)
+ label = described_class.new(title: 'test', lock_on_merge: false).execute(label_locked)
+
+ expect(label.reload.lock_on_merge).to be_truthy
+ expect(label.reload.title).to eq 'test'
+ end
+
it 'does not allow setting lock_on_merge for templates' do
template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
label = described_class.new(params).execute(template_label)
diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
index 86f528d1ea7..d9982b664e5 100644
--- a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
+++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
@@ -88,10 +88,11 @@ RSpec.describe LooseForeignKeys::BatchCleanerService, feature_category: :databas
expect(loose_fk_child_table_1.count).to eq(4)
expect(loose_fk_child_table_2.count).to eq(4)
- described_class.new(parent_table: '_test_loose_fk_parent_table',
- loose_foreign_key_definitions: loose_foreign_key_definitions,
- deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100)
- ).execute
+ described_class.new(
+ parent_table: '_test_loose_fk_parent_table',
+ loose_foreign_key_definitions: loose_foreign_key_definitions,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100)
+ ).execute
end
it 'cleans up the child records' do
@@ -125,11 +126,12 @@ RSpec.describe LooseForeignKeys::BatchCleanerService, feature_category: :databas
let(:deleted_records_incremented_counter) { Gitlab::Metrics.registry.get(:loose_foreign_key_incremented_deleted_records) }
let(:cleaner) do
- described_class.new(parent_table: '_test_loose_fk_parent_table',
- loose_foreign_key_definitions: loose_foreign_key_definitions,
- deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100),
- modification_tracker: modification_tracker
- )
+ described_class.new(
+ parent_table: '_test_loose_fk_parent_table',
+ loose_foreign_key_definitions: loose_foreign_key_definitions,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100),
+ modification_tracker: modification_tracker
+ )
end
before do
diff --git a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
index b59339b24b4..20a193e3b01 100644
--- a/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
+++ b/spec/services/loose_foreign_keys/process_deleted_records_service_spec.rb
@@ -162,7 +162,9 @@ RSpec.describe LooseForeignKeys::ProcessDeletedRecordsService, feature_category:
end
before do
- stub_const('LooseForeignKeys::ModificationTracker::MAX_DELETES', 2)
+ allow_next_instance_of(LooseForeignKeys::ModificationTracker) do |instance|
+ allow(instance).to receive(:max_deletes).and_return(2)
+ end
stub_const('LooseForeignKeys::CleanerService::DELETE_LIMIT', 1)
end
diff --git a/spec/services/members/invitation_reminder_email_service_spec.rb b/spec/services/members/invitation_reminder_email_service_spec.rb
index 2b72a4919b4..a3c2e994c2e 100644
--- a/spec/services/members/invitation_reminder_email_service_spec.rb
+++ b/spec/services/members/invitation_reminder_email_service_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Members::InvitationReminderEmailService, feature_category: :group
with_them do
# Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
# We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
- (0..10).each do |day|
+ 11.times do |day|
it 'sends an invitation reminder only on the expected days' do
next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 6140021c8d2..81fc5661032 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -78,6 +78,51 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
service.execute(merge_request)
end
+ context 'when generating a patch_id_sha' do
+ it 'records a value' do
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).not_to be_nil
+ end
+
+ context 'when base_sha is nil' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:base_sha).at_least(:once).and_return(nil)
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when head_sha is nil' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:head_sha).at_least(:once).and_return(nil)
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+
+ context 'when base_sha and head_sha match' do
+ it 'records patch_id_sha as nil' do
+ expect_next_instance_of(Gitlab::Diff::DiffRefs) do |diff_ref|
+ expect(diff_ref).to receive(:base_sha).at_least(:once).and_return("abc123")
+ expect(diff_ref).to receive(:head_sha).at_least(:once).and_return("abc123")
+ end
+
+ service.execute(merge_request)
+
+ expect(merge_request.approvals.last.patch_id_sha).to be_nil
+ end
+ end
+ end
+
it 'publishes MergeRequests::ApprovedEvent' do
expect { service.execute(merge_request) }
.to publish_event(MergeRequests::ApprovedEvent)
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 1ca4bfe622c..6a8758c8684 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -141,4 +141,32 @@ RSpec.describe MergeRequests::BaseService, feature_category: :code_review_workfl
describe '#constructor_container_arg' do
it { expect(described_class.constructor_container_arg("some-value")).to eq({ project: "some-value" }) }
end
+
+ describe '#inspect' do
+ context 'when #merge_request is defined' do
+ let(:klass) do
+ Class.new(described_class) do
+ def merge_request
+ params[:merge_request]
+ end
+ end
+ end
+
+ let(:params) { {} }
+
+ subject do
+ klass
+ .new(project: nil, current_user: nil, params: params)
+ .inspect
+ end
+
+ it { is_expected.to eq "#<#{klass}>" }
+
+ context 'when merge request is present' do
+ let(:params) { { merge_request: build(:merge_request) } }
+
+ it { is_expected.to eq "#<#{klass} #{params[:merge_request].to_reference(full: true)}>" }
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/create_ref_service_spec.rb b/spec/services/merge_requests/create_ref_service_spec.rb
index 85ac651c1fa..5f7b9430416 100644
--- a/spec/services/merge_requests/create_ref_service_spec.rb
+++ b/spec/services/merge_requests/create_ref_service_spec.rb
@@ -6,13 +6,14 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
using RSpec::Parameterized::TableSyntax
describe '#execute' do
- let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be_with_reload(:project) { create(:project, :empty_repo) }
let_it_be(:user) { project.creator }
let_it_be(:first_parent_ref) { project.default_branch_or_main }
let_it_be(:source_branch) { 'branch' }
let(:target_ref) { "refs/merge-requests/#{merge_request.iid}/train" }
let(:source_sha) { project.commit(source_branch).sha }
let(:squash) { false }
+ let(:default_commit_message) { merge_request.default_merge_commit_message(user: user) }
let(:merge_request) do
create(
@@ -84,36 +85,40 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
)
end
- it 'writes the merged result into target_ref', :aggregate_failures do
- expect(result[:status]).to eq :success
- expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
- expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
- expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
- match(
- [
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
- 'Feature branch commit 2',
- 'Feature branch commit 1',
- 'Base parent commit 2',
- 'Base parent commit 1'
- ]
+ shared_examples_for 'writing with a merge commit' do
+ it 'merges with a merge commit', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
+ expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).to be_present
+ expect(result[:squash_commit_sha]).not_to be_present
+ expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
+ match(
+ [
+ expected_merge_commit,
+ 'Feature branch commit 2',
+ 'Feature branch commit 1',
+ 'Base parent commit 2',
+ 'Base parent commit 1'
+ ]
+ )
)
- )
+ end
end
- context 'when squash is requested' do
- let(:squash) { true }
-
+ shared_examples_for 'writing with a squash and merge commit' do
it 'writes the squashed result', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).to be_present
+ expect(result[:squash_commit_sha]).to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
match(
[
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
+ expected_merge_commit,
"#{merge_request.title}\n",
'Base parent commit 2',
'Base parent commit 1'
@@ -123,23 +128,18 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
end
end
- context 'when semi-linear merges are enabled' do
- before do
- project.merge_method = :rebase_merge
- project.save!
- end
-
- it 'writes the semi-linear merged result', :aggregate_failures do
+ shared_examples_for 'writing with a squash and no merge commit' do
+ it 'writes the squashed result without a merge commit', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).parents[1].sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).not_to be_present
+ expect(result[:squash_commit_sha]).to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
match(
[
- a_string_matching(/Merge branch '#{source_branch}' into '#{first_parent_ref}'/),
- 'Feature branch commit 2',
- 'Feature branch commit 1',
+ "#{merge_request.title}\n",
'Base parent commit 2',
'Base parent commit 1'
]
@@ -148,17 +148,14 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
end
end
- context 'when fast-forward merges are enabled' do
- before do
- project.merge_method = :ff
- project.save!
- end
-
+ shared_examples_for 'writing without a merge commit' do
it 'writes the rebased merged result', :aggregate_failures do
expect(result[:status]).to eq :success
expect(result[:commit_sha]).to eq(project.repository.commit(target_ref).sha)
- expect(result[:source_sha]).to eq(project.repository.commit(target_ref).sha)
+ expect(result[:source_sha]).to eq(project.repository.commit(source_branch).sha)
expect(result[:target_sha]).to eq(project.repository.commit(first_parent_ref).sha)
+ expect(result[:merge_commit_sha]).not_to be_present
+ expect(result[:squash_commit_sha]).not_to be_present
expect(project.repository.commits(target_ref, limit: 10, order: 'topo').map(&:message)).to(
eq(
[
@@ -171,6 +168,114 @@ RSpec.describe MergeRequests::CreateRefService, feature_category: :merge_trains
)
end
end
+
+ shared_examples 'merge commits without squash' do
+ context 'with a custom template' do
+ let(:expected_merge_commit) { 'This is the merge commit' } # could also be default_commit_message
+
+ before do
+ project.project_setting.update!(merge_commit_template: expected_merge_commit)
+ end
+
+ it_behaves_like 'writing with a merge commit'
+ end
+
+ context 'with no custom template' do
+ let(:expected_merge_commit) { default_commit_message }
+
+ before do
+ project.project_setting.update!(merge_commit_template: nil)
+ end
+
+ it_behaves_like 'writing with a merge commit'
+ end
+ end
+
+ shared_examples 'merge commits with squash' do
+ context 'when squash set' do
+ let(:squash) { true }
+ let(:expected_merge_commit) { merge_request.default_merge_commit_message(user: user) }
+
+ before do
+ project.project_setting.update!(merge_commit_template: 'This is the merge commit')
+ end
+
+ it_behaves_like 'writing with a squash and merge commit'
+ end
+ end
+
+ context 'when the merge commit message is provided at time of merge' do
+ let(:expected_merge_commit) { 'something custom' }
+
+ before do
+ merge_request.merge_params['commit_message'] = expected_merge_commit
+ end
+
+ it 'writes the merged result', :aggregate_failures do
+ expect(result[:status]).to eq :success
+ expect(project.repository.commits(target_ref, limit: 1, order: 'topo').map(&:message)).to(
+ match([expected_merge_commit])
+ )
+ end
+
+ context 'when squash set' do
+ let(:squash) { true }
+
+ it_behaves_like 'writing with a squash and merge commit'
+ end
+ end
+
+ context 'when merged commit strategy' do
+ include_examples 'merge commits without squash'
+ include_examples 'merge commits with squash'
+ end
+
+ context 'when semi-linear merge strategy' do
+ before do
+ project.merge_method = :rebase_merge
+ project.save!
+ end
+
+ include_examples 'merge commits without squash'
+ include_examples 'merge commits with squash'
+
+ context 'when the target ref changes between rebase and merge' do
+ # this tests internal handling of expected_old_oid
+
+ it 'returns an error', :aggregate_failures do
+ expect_next_instance_of(described_class) do |instance|
+ original = instance.method(:maybe_merge!)
+
+ expect(instance).to receive(:maybe_merge!) do |*args|
+ # Corrupt target_ref before the merge, simulating a race with
+ # another instance of the service for the same MR. source_sha is
+ # just an arbitrary valid commit that differs from what was just
+ # written.
+ project.repository.write_ref(target_ref, source_sha)
+ original.call(*args)
+ end
+ end
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "9:Could not update #{target_ref}. Please refresh and try again."
+ end
+ end
+ end
+
+ context 'when fast-forward merge strategy' do
+ before do
+ project.merge_method = :ff
+ project.save!
+ end
+
+ it_behaves_like 'writing without a merge commit'
+
+ context 'when squash set' do
+ let(:squash) { true }
+
+ it_behaves_like 'writing with a squash and no merge commit'
+ end
+ end
end
end
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
deleted file mode 100644
index c48ed19e40d..00000000000
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ /dev/null
@@ -1,144 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::FfMergeService, feature_category: :code_review_workflow do
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:merge_request) do
- create(
- :merge_request,
- source_branch: 'flatten-dir',
- target_branch: 'improve/awesome',
- assignees: [user2],
- author: create(:user)
- )
- end
-
- let(:project) { merge_request.project }
- let(:valid_merge_params) { { sha: merge_request.diff_head_sha } }
-
- before do
- stub_feature_flags(refactor_merge_service: false)
- project.add_maintainer(user)
- project.add_developer(user2)
- end
-
- describe '#execute' do
- context 'valid params' do
- let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params) }
-
- def execute_ff_merge
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
- end
-
- before do
- allow(service).to receive(:execute_hooks)
- end
-
- it "does not create merge commit" do
- execute_ff_merge
-
- source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
- target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
-
- expect(source_branch_sha).to eq(target_branch_sha)
- end
-
- it 'keeps the merge request valid' do
- expect { execute_ff_merge }
- .not_to change { merge_request.valid? }
- end
-
- it 'updates the merge request to merged' do
- expect { execute_ff_merge }
- .to change { merge_request.merged? }
- .from(false)
- .to(true)
- end
-
- it 'sends email to user2 about merge of new merge_request' do
- execute_ff_merge
-
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
-
- it 'creates resource event about merge_request merge' do
- execute_ff_merge
-
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
-
- it 'does not update squash_commit_sha if it is not a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
-
- it 'updates squash_commit_sha if it is a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- merge_request.update!(squash: true)
-
- expect { execute_ff_merge }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
-
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
- end
-
- context 'error handling' do
- let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params.merge(commit_message: 'Awesome message')) }
-
- before do
- allow(Gitlab::AppLogger).to receive(:error)
- end
-
- it 'logs and saves error if there is an exception' do
- error_message = 'error message'
-
- allow(service).to receive(:repository).and_raise("error message")
- allow(service).to receive(:execute_hooks)
-
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error)
- .with(hash_including(message: a_string_matching(error_message)))
- end
-
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
- raw_message = 'The truth is out there'
-
- pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message, fallback_message: error_message)
- allow(service).to receive(:repository).and_raise(pre_receive_error)
- allow(service).to receive(:execute_hooks)
-
- service.execute(merge_request)
-
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error)
- .with(hash_including(message: a_string_matching(error_message)))
- end
-
- it 'does not update squash_commit_sha if squash merge is not successful' do
- merge_request.update!(squash: true)
-
- expect(project.repository.raw).to receive(:ff_merge) do
- raise 'Merge error'
- end
-
- expect { service.execute(merge_request) }.not_to change { merge_request.squash_commit_sha }
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 1faa1fd3644..6e34f4362c1 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -8,437 +8,506 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
- where(:ff_refactor_merge_service_enabled) { [true, false] }
+ let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
+ let(:project) { merge_request.project }
- with_them do
- let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
- let(:project) { merge_request.project }
-
- before do
- stub_feature_flags(refactor_merge_service: ff_refactor_merge_service_enabled)
+ before do
+ project.add_maintainer(user)
+ project.add_developer(user2)
+ end
- project.add_maintainer(user)
- project.add_developer(user2)
+ describe '#execute' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
+ let(:merge_params) do
+ { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
end
- describe '#execute' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
- let(:merge_params) do
- { commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
- end
+ let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
+ let!(:lease) { stub_exclusive_lease(lease_key) }
- let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
- let!(:lease) { stub_exclusive_lease(lease_key) }
+ shared_examples 'with valid params' do
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- shared_examples 'with valid params' do
- before do
- allow(service).to receive(:execute_hooks)
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
-
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- it { expect(merge_request).to be_valid }
- it { expect(merge_request).to be_merged }
+ it { expect(merge_request).to be_valid }
+ it { expect(merge_request).to be_merged }
- it 'does not update squash_commit_sha if it is not a squash' do
- expect(merge_request.squash_commit_sha).to be_nil
- end
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request.squash_commit_sha).to be_nil
+ end
- it 'sends email to user2 about merge of new merge_request' do
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'sends email to user2 about merge of new merge_request' do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- context 'note creation' do
- it 'creates resource state event about merge_request merge' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
+ context 'note creation' do
+ it 'creates resource state event about merge_request merge' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
end
end
+ end
- shared_examples 'squashing' do
- # A merge request with 5 commits
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- author: user2,
- assignees: [user2],
- squash: true,
- source_branch: 'improve/awesome',
- target_branch: 'fix'
- )
- end
+ shared_examples 'squashing' do
+ # A merge request with 5 commits
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix'
+ )
+ end
- let(:merge_params) do
- { commit_message: 'Merge commit message',
- squash_commit_message: 'Squash commit message',
- sha: merge_request.diff_head_sha }
- end
+ let(:merge_params) do
+ { commit_message: 'Merge commit message',
+ squash_commit_message: 'Squash commit message',
+ sha: merge_request.diff_head_sha }
+ end
- before do
- allow(service).to receive(:execute_hooks)
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+ before do
+ allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- it 'merges the merge request with squashed commits' do
- expect(merge_request).to be_merged
+ it 'merges the merge request with squashed commits' do
+ expect(merge_request).to be_merged
- merge_commit = merge_request.merge_commit
- squash_commit = merge_request.merge_commit.parents.last
+ merge_commit = merge_request.merge_commit
+ squash_commit = merge_request.merge_commit.parents.last
- expect(merge_commit.message).to eq('Merge commit message')
- expect(squash_commit.message).to eq("Squash commit message\n")
- end
+ expect(merge_commit.message).to eq('Merge commit message')
+ expect(squash_commit.message).to eq("Squash commit message\n")
+ end
- it 'persists squash_commit_sha' do
- squash_commit = merge_request.merge_commit.parents.last
+ it 'persists squash_commit_sha' do
+ squash_commit = merge_request.merge_commit.parents.last
- expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
- end
+ expect(merge_request.squash_commit_sha).to eq(squash_commit.id)
end
+ end
- context 'when merge strategy is merge commit' do
- it 'persists merge_commit_sha and nullifies in_progress_merge_commit_sha' do
- service.execute(merge_request)
+ context 'when merge strategy is merge commit' do
+ it 'persists merge_commit_sha and merged_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- expect(merge_request.merge_commit_sha).not_to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ expect(merge_request.merge_commit_sha).not_to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.merge_commit_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- it_behaves_like 'with valid params'
+ it_behaves_like 'with valid params'
- it_behaves_like 'squashing'
+ it_behaves_like 'squashing'
+ end
+
+ context 'when merge strategy is fast forward' do
+ before do
+ project.update!(merge_requests_ff_only_enabled: true)
end
- context 'when merge strategy is fast forward' do
- before do
- project.update!(merge_requests_ff_only_enabled: true)
- end
+ let(:merge_request) do
+ create(
+ :merge_request,
+ source_branch: 'flatten-dir',
+ target_branch: 'improve/awesome',
+ assignees: [user2],
+ author: create(:user)
+ )
+ end
- let(:merge_request) do
- create(
- :merge_request,
- source_branch: 'flatten-dir',
- target_branch: 'improve/awesome',
- assignees: [user2],
- author: create(:user)
- )
- end
+ it 'does not create merge_commit_sha, but persists merged_commit_sha and nullifies in_progress_merge_commit_sha' do
+ service.execute(merge_request)
- it 'does not create merge_commit_sha and nullifies in_progress_merge_commit_sha' do
- service.execute(merge_request)
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merged_commit_sha).not_to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.diff_head_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ it_behaves_like 'with valid params'
- it_behaves_like 'with valid params'
+ it 'updates squash_commit_sha and merged_commit_sha if it is a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'updates squash_commit_sha if it is a squash' do
- expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
+ merge_request.update!(squash: true)
- merge_request.update!(squash: true)
+ expect { service.execute(merge_request) }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
- expect { service.execute(merge_request) }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merged_commit_sha).to eq merge_request.squash_commit_sha
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
+ end
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ context 'running the service once' do
+ let(:ref) { merge_request.to_reference(full: true) }
+ let(:jid) { SecureRandom.hex }
+
+ let(:messages) do
+ [
+ /#{ref} - Git merge started on JID #{jid}/,
+ /#{ref} - Git merge finished on JID #{jid}/,
+ /#{ref} - Post merge started on JID #{jid}/,
+ /#{ref} - Post merge finished on JID #{jid}/,
+ /#{ref} - Merge process finished on JID #{jid}/
+ ]
+ end
+
+ before do
+ merge_request.update!(merge_jid: jid)
+ ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
end
- context 'running the service once' do
- let(:ref) { merge_request.to_reference(full: true) }
- let(:jid) { SecureRandom.hex }
+ it 'logs status messages' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
- let(:messages) do
- [
- /#{ref} - Git merge started on JID #{jid}/,
- /#{ref} - Git merge finished on JID #{jid}/,
- /#{ref} - Post merge started on JID #{jid}/,
- /#{ref} - Post merge finished on JID #{jid}/,
- /#{ref} - Merge process finished on JID #{jid}/
- ]
+ messages.each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'MergeWorker',
+ message: message,
+ merge_request_info: ref
+ )
+ ).and_call_original
end
- before do
- merge_request.update!(merge_jid: jid)
- ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
- end
+ service.execute(merge_request)
+ end
+ end
- it 'logs status messages' do
- allow(Gitlab::AppLogger).to receive(:info).and_call_original
+ context 'running the service multiple time' do
+ it 'is idempotent' do
+ 2.times { service.execute(merge_request) }
- messages.each do |message|
- expect(Gitlab::AppLogger).to receive(:info).with(
- hash_including(
- 'meta.caller_id' => 'MergeWorker',
- message: message,
- merge_request_info: ref
- )
- ).and_call_original
- end
+ expect(merge_request.merge_error).to be_falsey
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
- service.execute(merge_request)
- end
+ commit_messages = project.repository.commits('master', limit: 2).map(&:message)
+ expect(commit_messages.uniq.size).to eq(2)
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
+ end
- context 'running the service multiple time' do
- it 'is idempotent' do
- 2.times { service.execute(merge_request) }
+ context 'when an invalid sha is passed' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ author: user2,
+ assignees: [user2],
+ squash: true,
+ source_branch: 'improve/awesome',
+ target_branch: 'fix'
+ )
+ end
- expect(merge_request.merge_error).to be_falsey
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
+ let(:merge_params) do
+ { sha: merge_request.commits.second.sha }
+ end
- commit_messages = project.repository.commits('master', limit: 2).map(&:message)
- expect(commit_messages.uniq.size).to eq(2)
- expect(merge_request.in_progress_merge_commit_sha).to be_nil
- end
+ it 'does not merge the MR' do
+ service.execute(merge_request)
+
+ expect(merge_request).not_to be_merged
+ expect(merge_request.merge_error).to match(/Branch has been updated/)
end
+ end
- context 'when an invalid sha is passed' do
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- author: user2,
- assignees: [user2],
- squash: true,
- source_branch: 'improve/awesome',
- target_branch: 'fix'
- )
- end
+ context 'when the `sha` param is missing' do
+ let(:merge_params) { {} }
- let(:merge_params) do
- { sha: merge_request.commits.second.sha }
- end
+ it 'returns the error' do
+ merge_error = 'Branch has been updated since the merge was requested. '\
+ 'Please review the changes.'
- it 'does not merge the MR' do
- service.execute(merge_request)
+ expect { service.execute(merge_request) }
+ .to change { merge_request.merge_error }
+ .from(nil).to(merge_error)
+ end
+ end
- expect(merge_request).not_to be_merged
- expect(merge_request.merge_error).to match(/Branch has been updated/)
- end
+ context 'closes related issues' do
+ before do
+ allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
end
- context 'when the `sha` param is missing' do
- let(:merge_params) { {} }
+ it 'closes GitLab issue tracker issues', :sidekiq_inline do
+ issue = create :issue, project: project
+ commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ allow(merge_request).to receive(:commits).and_return([commit])
+ merge_request.cache_merge_request_closes_issues!
- it 'returns the error' do
- merge_error = 'Branch has been updated since the merge was requested. '\
- 'Please review the changes.'
+ service.execute(merge_request)
- expect { service.execute(merge_request) }
- .to change { merge_request.merge_error }
- .from(nil).to(merge_error)
- end
+ expect(issue.reload.closed?).to be_truthy
end
- context 'closes related issues' do
+ context 'with Jira integration' do
+ include JiraIntegrationHelpers
+
+ let(:jira_tracker) { project.create_jira_integration }
+ let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
+ let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
+
before do
- allow(project).to receive(:default_branch).and_return(merge_request.target_branch)
+ stub_jira_integration_test
+ project.update!(has_external_issue_tracker: true)
+ jira_integration_settings
+ stub_jira_urls(jira_issue.id)
+ allow(merge_request).to receive(:commits).and_return([commit])
end
- it 'closes GitLab issue tracker issues', :sidekiq_inline do
- issue = create :issue, project: project
- commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ it 'closes issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('JIRA-123', project)
+ stub_jira_urls(jira_issue)
+ commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- merge_request.cache_merge_request_closes_issues!
- service.execute(merge_request)
+ expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
- expect(issue.reload.closed?).to be_truthy
+ service.execute(merge_request)
end
- context 'with Jira integration' do
- include JiraIntegrationHelpers
-
- let(:jira_tracker) { project.create_jira_integration }
- let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
- let(:commit) { double('commit', safe_message: "Fixes #{jira_issue.to_reference}") }
-
- before do
- stub_jira_integration_test
- project.update!(has_external_issue_tracker: true)
- jira_integration_settings
- stub_jira_urls(jira_issue.id)
- allow(merge_request).to receive(:commits).and_return([commit])
- end
-
- it 'closes issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('JIRA-123', project)
+ context 'wrong issue markdown' do
+ it 'does not close issues on Jira issue tracker' do
+ jira_issue = ExternalIssue.new('#JIRA-123', project)
stub_jira_urls(jira_issue)
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
+ expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
service.execute(merge_request)
end
+ end
+ end
+ end
- context 'wrong issue markdown' do
- it 'does not close issues on Jira issue tracker' do
- jira_issue = ExternalIssue.new('#JIRA-123', project)
- stub_jira_urls(jira_issue)
- commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
- allow(merge_request).to receive(:commits).and_return([commit])
+ context 'closes related todos' do
+ let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
+ let(:project) { merge_request.project }
- expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
+ let!(:todo) do
+ create(:todo, :assigned,
+ project: project,
+ author: user,
+ user: user,
+ target: merge_request)
+ end
- service.execute(merge_request)
- end
- end
+ before do
+ allow(service).to receive(:execute_hooks)
+
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ todo.reload
end
end
- context 'closes related todos' do
- let(:merge_request) { create(:merge_request, assignees: [user], author: user) }
- let(:project) { merge_request.project }
+ it { expect(todo).to be_done }
+ end
- let!(:todo) do
- create(:todo, :assigned,
- project: project,
- author: user,
- user: user,
- target: merge_request)
+ context 'source branch removal' do
+ context 'when the source branch is protected' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
before do
- allow(service).to receive(:execute_hooks)
-
- perform_enqueued_jobs do
- service.execute(merge_request)
- todo.reload
- end
+ create(:protected_branch, project: project, name: merge_request.source_branch)
end
- it { expect(todo).to be_done }
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+
+ service.execute(merge_request)
+ end
end
- context 'source branch removal' do
- context 'when the source branch is protected' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
- end
+ context 'when the source branch is the default branch' do
+ let(:service) do
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
+ end
+ before do
+ allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
+ end
+
+ it 'does not delete the source branch' do
+ expect(::Branches::DeleteService).not_to receive(:new)
+ service.execute(merge_request)
+ end
+ end
+
+ context 'when the source branch can be removed' do
+ context 'when MR author set the source branch to be removed' do
before do
- create(:protected_branch, project: project, name: merge_request.source_branch)
+ merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
end
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
+ # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
+ it 'removes the source branch using the author user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be nil
+ end
+
+ context 'when the merger set the source branch not to be removed' do
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
+
+ it 'does not delete the source branch' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+
+ service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be false
+ end
end
end
- context 'when the source branch is the default branch' do
+ context 'when MR merger set the source branch to be removed' do
let(:service) do
described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
- before do
- allow(project).to receive(:root_ref?).with(merge_request.source_branch).and_return(true)
- end
+ it 'removes the source branch using the current user' do
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
- it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be true
end
end
+ end
+ end
- context 'when the source branch can be removed' do
- context 'when MR author set the source branch to be removed' do
- before do
- merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
- end
+ context 'error handling' do
+ before do
+ allow(Gitlab::AppLogger).to receive(:error)
+ end
- # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
- it 'removes the source branch using the author user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
+ context 'when source is missing' do
+ it 'logs and saves error' do
+ allow(merge_request).to receive(:diff_head_sha) { nil }
- service.execute(merge_request)
+ error_message = 'No source for merge'
- expect(merge_request.reload.should_remove_source_branch?).to be nil
- end
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to eq(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
+ end
- context 'when the merger set the source branch not to be removed' do
- let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
+ it 'logs and saves error if there is an exception' do
+ error_message = 'error message'
- it 'does not delete the source branch' do
- expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(error_message)
+ end
- service.execute(merge_request)
+ service.execute(merge_request)
- expect(merge_request.reload.should_remove_source_branch?).to be false
- end
- end
- end
+ expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
- context 'when MR merger set the source branch to be removed' do
- let(:service) do
- described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
- end
+ it 'logs and saves error if user is not authorized' do
+ stub_exclusive_lease
- it 'removes the source branch using the current user' do
- expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
+ unauthorized_user = create(:user)
+ project.add_reporter(unauthorized_user)
- service.execute(merge_request)
+ service = described_class.new(project: project, current_user: unauthorized_user)
- expect(merge_request.reload.should_remove_source_branch?).to be true
- end
- end
- end
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error)
+ .to eq('You are not allowed to merge this merge request')
end
- context 'error handling' do
- before do
- allow(Gitlab::AppLogger).to receive(:error)
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
end
- context 'when source is missing' do
- it 'logs and saves error' do
- allow(merge_request).to receive(:diff_head_sha) { nil }
+ service.execute(merge_request)
- error_message = 'No source for merge'
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
+ )
+ end
- service.execute(merge_request)
+ it 'logs and saves error if commit is not created' do
+ allow_any_instance_of(Repository).to receive(:merge).and_return(false)
+ allow(service).to receive(:execute_hooks)
- expect(merge_request.merge_error).to eq(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
+ service.execute(merge_request)
- it 'logs and saves error if there is an exception' do
- error_message = 'error message'
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
+ )
+ )
+ end
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(error_message)
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(error_message)
- allow(service).to receive(:execute_hooks)
+ context 'when squashing is required' do
+ before do
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ merge_request.target_project.project_setting.squash_always!
+ end
+
+ it 'raises an error if squashing is not done' do
+ error_message = 'requires squashing commits'
service.execute(merge_request)
- expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
+ expect(merge_request).to be_open
+
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -446,34 +515,25 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
+ end
- it 'logs and saves error if user is not authorized' do
- stub_exclusive_lease
-
- unauthorized_user = create(:user)
- project.add_reporter(unauthorized_user)
-
- service = described_class.new(project: project, current_user: unauthorized_user)
-
- service.execute(merge_request)
-
- expect(merge_request.merge_error)
- .to eq('You are not allowed to merge this merge request')
+ context 'when squashing' do
+ before do
+ merge_request.update!(source_branch: 'master', target_branch: 'feature')
end
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
+ it 'logs and saves error if there is an error when squashing' do
+ error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- allow(service).to receive(:execute_hooks)
+ allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
+ merge_request.update!(squash: true)
service.execute(merge_request)
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -482,65 +542,69 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- it 'logs and saves error if commit is not created' do
- allow_any_instance_of(Repository).to receive(:merge).and_return(false)
- allow(service).to receive(:execute_hooks)
+ it 'logs and saves error if there is an PreReceiveError exception' do
+ error_message = 'error message'
+
+ allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
+ allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ end
+ merge_request.update!(squash: true)
service.execute(merge_request)
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)
+ message: a_string_matching(error_message)
)
)
end
- context 'when squashing is required' do
+ context 'when fast-forward merge is not allowed' do
before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
- merge_request.target_project.project_setting.squash_always!
+ allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
end
- it 'raises an error if squashing is not done' do
- error_message = 'requires squashing commits'
+ %w(semi-linear ff).each do |merge_method|
+ it "logs and saves error if merge is #{merge_method} only" do
+ merge_method = 'rebase_merge' if merge_method == 'semi-linear'
+ merge_request.project.update!(merge_method: merge_method)
+ error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
+ allow(service).to receive(:execute_hooks)
+ expect(lease).to receive(:cancel)
- service.execute(merge_request)
-
- expect(merge_request).to be_open
+ service.execute(merge_request)
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
+ expect(merge_request).to be_open
+ expect(merge_request.merge_commit_sha).to be_nil
+ expect(merge_request.squash_commit_sha).to be_nil
+ expect(merge_request.merge_error).to include(error_message)
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ hash_including(
+ merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)
+ )
)
- )
+ end
end
end
+ end
- context 'when squashing' do
+ context 'when not mergeable' do
+ let!(:error_message) { 'Merge request is not mergeable' }
+
+ context 'with failing CI' do
before do
- merge_request.update!(source_branch: 'master', target_branch: 'feature')
+ allow(merge_request).to receive(:mergeable_ci_state?) { false }
end
- it 'logs and saves error if there is an error when squashing' do
- error_message = 'Squashing failed: Squash the commits locally, resolve any conflicts, then push the branch.'
-
- allow_any_instance_of(MergeRequests::SquashService).to receive(:squash!).and_return(nil)
- merge_request.update!(squash: true)
-
+ it 'logs and saves error' do
service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -548,24 +612,16 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
)
end
+ end
- it 'logs and saves error if there is an PreReceiveError exception' do
- error_message = 'error message'
-
- allow_next_instance_of(MergeRequests::MergeStrategies::FromSourceBranch) do |strategy|
- allow(strategy).to receive(:execute_git_merge!).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- end
- # we can remove these allows upon refactor_merge_service cleanup
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
- allow(service).to receive(:execute_hooks)
- merge_request.update!(squash: true)
+ context 'with unresolved discussions' do
+ before do
+ allow(merge_request).to receive(:mergeable_discussions_state?) { false }
+ end
+ it 'logs and saves error' do
service.execute(merge_request)
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
expect(Gitlab::AppLogger).to have_received(:error).with(
hash_including(
merge_request_info: merge_request.to_reference(full: true),
@@ -574,102 +630,35 @@ RSpec.describe MergeRequests::MergeService, feature_category: :code_review_workf
)
end
- context 'when fast-forward merge is not allowed' do
- before do
- allow_any_instance_of(Repository).to receive(:ancestor?).and_return(nil)
- end
-
- %w(semi-linear ff).each do |merge_method|
- it "logs and saves error if merge is #{merge_method} only" do
- merge_method = 'rebase_merge' if merge_method == 'semi-linear'
- merge_request.project.update!(merge_method: merge_method)
- error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
- allow(service).to receive(:execute_hooks)
- expect(lease).to receive(:cancel)
-
- service.execute(merge_request)
-
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
- end
- end
-
- context 'when not mergeable' do
- let!(:error_message) { 'Merge request is not mergeable' }
-
- context 'with failing CI' do
- before do
- allow(merge_request).to receive(:mergeable_ci_state?) { false }
- end
-
- it 'logs and saves error' do
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
- end
-
- context 'with unresolved discussions' do
- before do
- allow(merge_request).to receive(:mergeable_discussions_state?) { false }
- end
-
- it 'logs and saves error' do
- service.execute(merge_request)
-
- expect(Gitlab::AppLogger).to have_received(:error).with(
- hash_including(
- merge_request_info: merge_request.to_reference(full: true),
- message: a_string_matching(error_message)
- )
- )
- end
-
- context 'when passing `skip_discussions_check: true` as `options` parameter' do
- it 'merges the merge request' do
- service.execute(merge_request, skip_discussions_check: true)
+ context 'when passing `skip_discussions_check: true` as `options` parameter' do
+ it 'merges the merge request' do
+ service.execute(merge_request, skip_discussions_check: true)
- expect(merge_request).to be_valid
- expect(merge_request).to be_merged
- end
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
end
end
end
+ end
- context 'when passing `check_mergeability_retry_lease: true` as `options` parameter' do
- it 'call mergeable? with check_mergeability_retry_lease' do
- expect(merge_request).to receive(:mergeable?).with(hash_including(check_mergeability_retry_lease: true)).and_call_original
+ context 'when passing `check_mergeability_retry_lease: true` as `options` parameter' do
+ it 'call mergeable? with check_mergeability_retry_lease' do
+ expect(merge_request).to receive(:mergeable?).with(hash_including(check_mergeability_retry_lease: true)).and_call_original
- service.execute(merge_request, check_mergeability_retry_lease: true)
- end
+ service.execute(merge_request, check_mergeability_retry_lease: true)
end
end
+ end
- context 'when the other sidekiq worker has already been running' do
- before do
- stub_exclusive_lease_taken(lease_key)
- end
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
- it 'does not execute service' do
- expect(service).not_to receive(:commit)
+ it 'does not execute service' do
+ expect(service).not_to receive(:commit)
- service.execute(merge_request)
- end
+ service.execute(merge_request)
end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 06932af26dc..d5b7b56ccdd 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -913,7 +913,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
subject { service.execute(oldrev, newrev, 'refs/heads/merge-commit-analyze-before') }
context 'feature enabled' do
- it "updates merge requests' merge_commits" do
+ it "updates merge requests' merge_commit and merged_commit values", :aggregate_failures do
expect(Gitlab::BranchPushMergeCommitAnalyzer).to receive(:new).and_wrap_original do |original_method, commits|
expect(commits.map(&:id)).to eq(%w{646ece5cfed840eca0a4feb21bcd6a81bb19bda3 29284d9bcc350bcae005872d0be6edd016e2efb5 5f82584f0a907f3b30cfce5bb8df371454a90051 8a994512e8c8f0dfcf22bb16df6e876be7a61036 689600b91aabec706e657e38ea706ece1ee8268f db46a1c5a5e474aa169b6cdb7a522d891bc4c5f9})
@@ -927,6 +927,11 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
expect(merge_request.merge_commit.id).to eq('646ece5cfed840eca0a4feb21bcd6a81bb19bda3')
expect(merge_request_side_branch.merge_commit.id).to eq('29284d9bcc350bcae005872d0be6edd016e2efb5')
+ # we need to use read_attribute to bypass the overridden
+ # #merged_commit_sha method, which contains a fallback to
+ # #merge_commit_sha
+ expect(merge_request.read_attribute(:merged_commit_sha)).to eq('646ece5cfed840eca0a4feb21bcd6a81bb19bda3')
+ expect(merge_request_side_branch.read_attribute(:merged_commit_sha)).to eq('29284d9bcc350bcae005872d0be6edd016e2efb5')
end
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 2f6db13a041..72e41f7b814 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -1301,41 +1301,39 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
end
context 'updating labels' do
- let(:label_a) { label }
- let(:label_b) { create(:label, title: 'b', project: project) }
- let(:label_c) { create(:label, title: 'c', project: project) }
- let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
- let(:issuable) { merge_request }
+ context 'when merge request is not merged' do
+ let(:label_a) { label }
+ let(:label_b) { create(:label, title: 'b', project: project) }
+ let(:label_c) { create(:label, title: 'c', project: project) }
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
+ let(:issuable) { merge_request }
- it_behaves_like 'updating issuable labels'
- it_behaves_like 'keeps issuable labels sorted after update'
- it_behaves_like 'broadcasting issuable labels updates'
+ it_behaves_like 'updating issuable labels'
+ it_behaves_like 'keeps issuable labels sorted after update'
+ it_behaves_like 'broadcasting issuable labels updates'
+ end
context 'when merge request has been merged' do
- context 'when remove_label_ids contains a locked label' do
- let(:params) { { remove_label_ids: [label_locked.id] } }
+ let(:label_a) { create(:label, title: 'a', project: project, lock_on_merge: true) }
+ let(:label_b) { create(:label, title: 'b', project: project, lock_on_merge: true) }
+ let(:label_c) { create(:label, title: 'c', project: project, lock_on_merge: true) }
+ let(:label_unlocked) { create(:label, title: 'unlocked', project: project) }
+ let(:issuable) { merge_request }
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(enforce_locked_labels_on_merge: false)
- end
+ before do
+ merge_request.update!(state: 'merged')
+ end
- it 'removes locked labels' do
- merge_request.update!(state: 'merged', labels: [label_a, label_locked])
- update_issuable(params)
+ it_behaves_like 'updating merged MR with locked labels'
- expect(merge_request.label_ids).to contain_exactly(label_a.id)
- end
- end
-
- context 'when feature flag is enabled' do
- it 'does not remove locked labels' do
- merge_request.update!(state: 'merged', labels: [label_a, label_locked])
- update_issuable(params)
+ context 'when feature flag is disabled' do
+ let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) }
- expect(merge_request.label_ids).to contain_exactly(label_a.id, label_locked.id)
- end
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
end
+
+ it_behaves_like 'updating issuable labels'
end
end
diff --git a/spec/services/metrics/global_metrics_update_service_spec.rb b/spec/services/metrics/global_metrics_update_service_spec.rb
deleted file mode 100644
index 38c7f9282d9..00000000000
--- a/spec/services/metrics/global_metrics_update_service_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::GlobalMetricsUpdateService, :prometheus, feature_category: :metrics do
- describe '#execute' do
- it 'sets gitlab_maintenance_mode gauge metric' do
- metric = subject.maintenance_mode_metric
- expect(Gitlab).to receive(:maintenance_mode?).and_return(true)
-
- expect { subject.execute }.to change { metric.get }.from(0).to(1)
- end
- end
-end
diff --git a/spec/services/metrics/sample_metrics_service_spec.rb b/spec/services/metrics/sample_metrics_service_spec.rb
deleted file mode 100644
index 3442b4303db..00000000000
--- a/spec/services/metrics/sample_metrics_service_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::SampleMetricsService, feature_category: :metrics do
- describe 'query' do
- let(:range_start) { '2019-12-02T23:31:45.000Z' }
- let(:range_end) { '2019-12-03T00:01:45.000Z' }
-
- subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
-
- context 'when the file is not found' do
- let(:identifier) { nil }
-
- it { is_expected.to be_nil }
- end
-
- context 'when the file is found' do
- let(:identifier) { 'sample_metric_query_result' }
- let(:source) { File.join(Rails.root, 'spec/fixtures/gitlab/sample_metrics', "#{identifier}.yml") }
- let(:destination) { File.join(Rails.root, Metrics::SampleMetricsService::DIRECTORY, "#{identifier}.yml") }
-
- around do |example|
- FileUtils.mkdir_p(Metrics::SampleMetricsService::DIRECTORY)
- FileUtils.cp(source, destination)
-
- example.run
- ensure
- FileUtils.rm(destination)
- end
-
- subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
-
- it 'loads data from the sample file correctly' do
- expect(subject).to eq(YAML.load_file(source)[30])
- end
- end
-
- context 'when the identifier is for a path outside of sample_metrics' do
- let(:identifier) { '../config/secrets' }
-
- it { is_expected.to be_nil }
- end
- end
-end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
deleted file mode 100644
index 8a2ecd5c3e0..00000000000
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ /dev/null
@@ -1,216 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute', feature_category: :purchase do
- subject(:execute_service) { described_class.new(track, interval).execute }
-
- let(:track) { :create }
- let(:interval) { 1 }
-
- let(:frozen_time) { Time.zone.parse('23 Mar 2021 10:14:40 UTC') }
- let(:previous_action_completed_at) { frozen_time - 2.days }
- let(:current_action_completed_at) { nil }
- let(:user_can_perform_current_track_action) { true }
- let(:actions_completed) { { created_at: previous_action_completed_at, git_write_at: current_action_completed_at } }
-
- let_it_be(:group) { create(:group) }
- let_it_be(:user) { create(:user, email_opted_in: true) }
-
- before do
- travel_to(frozen_time)
- create(:onboarding_progress, namespace: group, **actions_completed)
- group.add_developer(user)
- allow(Ability).to receive(:allowed?).with(user, anything, anything).and_return(user_can_perform_current_track_action)
- allow(Notify).to receive(:in_product_marketing_email).and_return(double(deliver_later: nil))
- end
-
- RSpec::Matchers.define :send_in_product_marketing_email do |*args|
- match do
- expect(Notify).to have_received(:in_product_marketing_email).with(*args).once
- end
-
- match_when_negated do
- expect(Notify).not_to have_received(:in_product_marketing_email)
- end
- end
-
- context 'for each track and series with the right conditions' do
- using RSpec::Parameterized::TableSyntax
-
- where(:track, :interval, :actions_completed) do
- :create | 1 | { created_at: frozen_time - 2.days }
- :create | 5 | { created_at: frozen_time - 6.days }
- :create | 10 | { created_at: frozen_time - 11.days }
- :team_short | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
- :trial_short | 2 | { created_at: frozen_time - 3.days, git_write_at: frozen_time - 3.days }
- :admin_verify | 3 | { created_at: frozen_time - 4.days, git_write_at: frozen_time - 4.days }
- :verify | 4 | { created_at: frozen_time - 5.days, git_write_at: frozen_time - 5.days }
- :verify | 8 | { created_at: frozen_time - 9.days, git_write_at: frozen_time - 9.days }
- :verify | 13 | { created_at: frozen_time - 14.days, git_write_at: frozen_time - 14.days }
- :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
- :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
- :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
- :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
- :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
- :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
- end
-
- with_them do
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, described_class::TRACKS[track][:interval_days].index(interval)) }
- end
- end
-
- context 'when initialized with a different track' do
- let(:track) { :team_short }
-
- it { is_expected.not_to send_in_product_marketing_email }
-
- context 'when the previous track actions have been completed' do
- let(:current_action_completed_at) { frozen_time - 2.days }
-
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, 0) }
- end
- end
-
- context 'when initialized with a different interval' do
- let(:interval) { 5 }
-
- it { is_expected.not_to send_in_product_marketing_email }
-
- context 'when the previous track action was completed within the intervals range' do
- let(:previous_action_completed_at) { frozen_time - 6.days }
-
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 1) }
- end
- end
-
- context 'when the previous track action is not yet completed' do
- let(:previous_action_completed_at) { nil }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the previous track action is completed outside the intervals range' do
- let(:previous_action_completed_at) { frozen_time - 3.days }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the current track action is completed' do
- let(:current_action_completed_at) { frozen_time }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context "when the user cannot perform the current track's action" do
- let(:user_can_perform_current_track_action) { false }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- context 'when the user has not opted into marketing emails' do
- let(:user) { create(:user, email_opted_in: false) }
-
- it { is_expected.not_to send_in_product_marketing_email }
- end
-
- describe 'do not send emails twice' do
- subject { described_class.send_for_all_tracks_and_intervals }
-
- let(:user) { create(:user, email_opted_in: true) }
-
- context 'when user already got a specific email' do
- before do
- create(:in_product_marketing_email, user: user, track: track, series: 0)
- end
-
- it { is_expected.not_to send_in_product_marketing_email(user.id, anything, track, 0) }
- end
-
- context 'when user already got sent the whole track' do
- before do
- 0.upto(2) do |series|
- create(:in_product_marketing_email, user: user, track: track, series: series)
- end
- end
-
- it 'does not send any of the emails anymore', :aggregate_failures do
- 0.upto(2) do |series|
- expect(subject).not_to send_in_product_marketing_email(user.id, anything, track, series)
- end
- end
- end
-
- context 'when user is in two groups' do
- let(:other_group) { create(:group) }
-
- before do
- other_group.add_developer(user)
- end
-
- context 'when both groups would get the same email' do
- before do
- create(:onboarding_progress, namespace: other_group, **actions_completed)
- end
-
- it 'does not send the same email twice' do
- subject
-
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, anything, :create, 0).once
- end
- end
-
- context 'when other group gets a different email' do
- before do
- create(:onboarding_progress, namespace: other_group, created_at: previous_action_completed_at, git_write_at: frozen_time - 2.days)
- end
-
- it 'sends both emails' do
- subject
-
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, group.id, :create, 0)
- expect(Notify).to have_received(:in_product_marketing_email).with(user.id, other_group.id, :team_short, 0)
- end
- end
- end
- end
-
- it 'records sent emails' do
- expect { subject }.to change { Users::InProductMarketingEmail.count }.by(1)
-
- expect(
- Users::InProductMarketingEmail.where(
- user: user,
- track: Users::InProductMarketingEmail::ACTIVE_TRACKS[:create],
- series: 0
- )
- ).to exist
- end
-
- context 'when invoked with a non existing track' do
- let(:track) { :foo }
-
- before do
- stub_const("#{described_class}::TRACKS", { bar: {} })
- end
-
- it { expect { subject }.to raise_error(ArgumentError, 'Track foo not defined') }
- end
-
- context 'when group is a sub-group' do
- let(:root_group) { create(:group) }
- let(:group) { create(:group) }
-
- before do
- group.parent = root_group
- group.save!
-
- allow(Ability).to receive(:allowed?).and_call_original
- end
-
- it 'does not raise an exception' do
- expect { execute_service }.not_to raise_error
- end
- end
-end
diff --git a/spec/services/note_summary_spec.rb b/spec/services/note_summary_spec.rb
index 1cbbb68205d..f3233ef925e 100644
--- a/spec/services/note_summary_spec.rb
+++ b/spec/services/note_summary_spec.rb
@@ -24,8 +24,13 @@ RSpec.describe NoteSummary, feature_category: :code_review_workflow do
describe '#note' do
it 'returns note hash' do
freeze_time do
- expect(create_note_summary.note).to eq(noteable: noteable, project: project, author: user, note: 'note',
- created_at: Time.current)
+ expect(create_note_summary.note).to eq(
+ noteable: noteable,
+ project: project,
+ author: user,
+ note: 'note',
+ created_at: Time.current
+ )
end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 22509885c92..b5eb5f8037a 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -180,8 +180,9 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
execute_create_service
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ let(:namespace) { project.namespace }
subject(:service_action) { execute_create_service }
end
end
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 396e23351c9..54782774b4e 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
end
describe 'comment removed event tracking', :snowplow do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
let(:note) { create(:note, project: project, noteable: issue) }
let(:service_action) { described_class.new(project, user).execute(note) }
@@ -39,11 +39,12 @@ RSpec.describe Notes::DestroyService, feature_category: :team_planning do
expect do
service_action
end.to change {
- counter.unique_events(event_names: property, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
+ counter.unique_events(event_names: action, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now)
}.by(1)
end
- it_behaves_like 'issue_edit snowplow tracking' do
+ it_behaves_like 'internal event tracking' do
+ let(:namespace) { project.namespace }
subject(:execute_service_action) { service_action }
end
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 0065fd639b8..b6e29299fdd 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -188,6 +188,45 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
+ describe '/confidential' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :issue, project: project) }
+ let_it_be(:note_text) { '/confidential' }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ context 'when work item does not have children' do
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'marks work item as confidential' do
+ expect { execute(note) }.to change { noteable.reload.confidential }.from(false).to(true)
+ end
+ end
+
+ context 'when work item has children' do
+ before do
+ create(:parent_link, work_item: task, work_item_parent: noteable)
+ end
+
+ context 'when children are not confidential' do
+ let(:task) { create(:work_item, :task, project: project) }
+
+ it 'does not mark parent work item as confidential' do
+ expect { execute(note) }.to not_change { noteable.reload.confidential }.from(false)
+ expect(noteable.errors[:base]).to include('A confidential work item cannot have a parent that already has non-confidential children.')
+ end
+ end
+
+ context 'when children are confidential' do
+ let(:task) { create(:work_item, :confidential, :task, project: project) }
+
+ it 'marks parent work item as confidential' do
+ expect { execute(note) }.to change { noteable.reload.confidential }.from(false).to(true)
+ end
+ end
+ end
+ end
+
describe 'note with command & text' do
describe '/close, /label, /assign & /milestone' do
let(:note_text) do
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index e109bfbcd0b..8389db000b8 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -75,6 +75,13 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
update_note({})
end
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
+ let(:namespace) { project.namespace }
+
+ subject(:service_action) { update_note(note: 'new text') }
+ end
+
it 'tracks issue usage data', :clean_gitlab_redis_shared_state do
counter = Gitlab::UsageDataCounters::HLLRedisCounter
@@ -85,11 +92,6 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
update_note(note: 'new text')
end.to change { counter.unique_events(event_names: event, start_date: Date.today.beginning_of_week, end_date: 1.week.from_now) }.by(1)
end
-
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
- subject(:service_action) { update_note(note: 'new text') }
- end
end
context 'when note text was changed' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 028c3ea6610..40597c30c4a 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -324,7 +324,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
describe "never emails the ghost user" do
- let(:key_options) { { user: User.ghost } }
+ let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_ssh_key_email")
@@ -345,7 +345,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
describe "never emails the ghost user" do
- let(:key_options) { { user: User.ghost } }
+ let(:key_options) { { user: Users::Internal.ghost } }
it "does not send email to key owner" do
expect { subject }.not_to have_enqueued_email(key.id, mail: "new_gpg_key_email")
@@ -376,6 +376,74 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
+ describe '#resource_access_token_about_to_expire' do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
+
+ let_it_be(:owner1) { create(:user) }
+ let_it_be(:owner2) { create(:user) }
+
+ subject(:notification_service) do
+ notification.resource_access_tokens_about_to_expire(project_bot, [expiring_token.name])
+ end
+
+ context 'when the resource is a group' do
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(owner1)
+ group.add_owner(owner2)
+ group.add_reporter(project_bot)
+ end
+
+ it 'sends emails to the group owners' do
+ expect { notification_service }.to(
+ have_enqueued_email(
+ owner1,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ ).and(
+ have_enqueued_email(
+ owner2,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ )
+ )
+ )
+ end
+ end
+
+ context 'when the resource is a project' do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(owner1)
+ project.add_maintainer(owner2)
+ project.add_reporter(project_bot)
+ end
+
+ it 'sends emails to the group owners' do
+ expect { notification_service }.to(
+ have_enqueued_email(
+ owner1,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ ).and(
+ have_enqueued_email(
+ owner2,
+ project_bot.resource_bot_resource,
+ [expiring_token.name],
+ mail: "resource_access_tokens_about_to_expire_email"
+ )
+ )
+ )
+ end
+ end
+ end
+
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
@@ -534,7 +602,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:subject) { described_class.new }
let(:mailer) { double(deliver_later: true) }
- let(:issue) { create(:issue, author: User.support_bot) }
+ let(:issue) { create(:issue, author: Users::Internal.support_bot) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
@@ -576,6 +644,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
it_behaves_like 'notification with exact metric events', 1
+
+ context 'when service desk is disabled' do
+ before do
+ project.update!(service_desk_enabled: false)
+ end
+
+ it_behaves_like 'no participants are notified'
+ end
end
context 'do exist and note is confidential' do
@@ -1211,9 +1287,11 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let_it_be(:member_and_not_mentioned) { create(:user, developer_projects: [project]) }
let_it_be(:non_member_and_mentioned) { create(:user) }
let_it_be(:note) do
- create(:diff_note_on_design,
- noteable: design,
- note: "Hello #{member_and_mentioned.to_reference}, G'day #{non_member_and_mentioned.to_reference}")
+ create(
+ :diff_note_on_design,
+ noteable: design,
+ note: "Hello #{member_and_mentioned.to_reference}, G'day #{non_member_and_mentioned.to_reference}"
+ )
end
let_it_be(:note_2) do
@@ -3498,12 +3576,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:commit) { project.commit }
def create_pipeline(user, status)
- create(:ci_pipeline, status,
- project: project,
- user: user,
- ref: 'refs/heads/master',
- sha: commit.id,
- before_sha: '00000000')
+ create(
+ :ci_pipeline, status,
+ project: project,
+ user: user,
+ ref: 'refs/heads/master',
+ sha: commit.id,
+ before_sha: '00000000'
+ )
end
before_all do
@@ -4012,12 +4092,14 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
project.add_maintainer(reviewer)
merge_request.assignees.each { |assignee| project.add_maintainer(assignee) }
- create(:diff_note_on_merge_request,
- project: project,
- noteable: merge_request,
- author: reviewer,
- review: review,
- note: "cc @mention")
+ create(
+ :diff_note_on_merge_request,
+ project: project,
+ noteable: merge_request,
+ author: reviewer,
+ review: review,
+ note: "cc @mention"
+ )
end
it 'sends emails' do
@@ -4059,10 +4141,18 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
subject { notification.inactive_project_deletion_warning(project, deletion_date) }
it "sends email to project owners and maintainers" do
- expect { subject }.to have_enqueued_email(project, maintainer, deletion_date,
- mail: "inactive_project_deletion_warning_email")
- expect { subject }.not_to have_enqueued_email(project, developer, deletion_date,
- mail: "inactive_project_deletion_warning_email")
+ expect { subject }.to have_enqueued_email(
+ project,
+ maintainer,
+ deletion_date,
+ mail: "inactive_project_deletion_warning_email"
+ )
+ expect { subject }.not_to have_enqueued_email(
+ project,
+ developer,
+ deletion_date,
+ mail: "inactive_project_deletion_warning_email"
+ )
end
end
diff --git a/spec/services/packages/ml_model/create_package_file_service_spec.rb b/spec/services/packages/ml_model/create_package_file_service_spec.rb
index 32754279e17..30a6bedd07b 100644
--- a/spec/services/packages/ml_model/create_package_file_service_spec.rb
+++ b/spec/services/packages/ml_model/create_package_file_service_spec.rb
@@ -38,12 +38,12 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
it 'creates package file', :aggregate_failures do
expect { execute_service }
- .to change { project.packages.ml_model.count }.by(1)
+ .to change { Packages::MlModel::Package.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(0)
.and change { Ml::ModelVersion.count }.by(1)
- new_model = project.packages.ml_model.last
+ new_model = Packages::MlModel::Package.last
package_file = new_model.package_files.last
new_model_version = Ml::ModelVersion.last
diff --git a/spec/services/packages/npm/generate_metadata_service_spec.rb b/spec/services/packages/npm/generate_metadata_service_spec.rb
index fdd0ab0ccee..d8e794405e6 100644
--- a/spec/services/packages/npm/generate_metadata_service_spec.rb
+++ b/spec/services/packages/npm/generate_metadata_service_spec.rb
@@ -70,6 +70,17 @@ RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :pack
it { expect(subject.dig(package2.version, dependency_type)).to be nil }
end
+
+ context 'when generate dependencies' do
+ let(:packages) { ::Packages::Package.where(id: package1.id) }
+
+ it 'loads grouped dependency links', :aggregate_failures do
+ expect(::Packages::DependencyLink).to receive(:dependency_ids_grouped_by_type).and_call_original
+ expect(::Packages::Package).not_to receive(:including_dependency_links)
+
+ subject
+ end
+ end
end
context 'for metadatum' do
diff --git a/spec/services/packages/nuget/check_duplicates_service_spec.rb b/spec/services/packages/nuget/check_duplicates_service_spec.rb
new file mode 100644
index 00000000000..9675aa5f5e2
--- /dev/null
+++ b/spec/services/packages/nuget/check_duplicates_service_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::CheckDuplicatesService, feature_category: :package_registry do
+ include PackagesManagerApiSpecHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:file_name) { 'package.nupkg' }
+
+ let(:params) do
+ {
+ file_name: file_name,
+ file: temp_file(file_name)
+ }
+ end
+
+ let(:service) { described_class.new(project, user, params) }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ shared_examples 'returning error' do |reason:, message:|
+ it 'returns an error' do
+ response = execute
+
+ expect(response.status).to eq(:error)
+ expect(response.reason).to eq(reason)
+ expect(response.message).to eq(message)
+ end
+ end
+
+ shared_examples 'returning success' do
+ it 'returns success' do
+ response = execute
+
+ expect(response.status).to eq(:success)
+ end
+ end
+
+ shared_examples 'handling duplicates disallowed when package exists' do
+ it_behaves_like 'returning error', reason: :conflict,
+ message: 'A package with the same name and version already exists'
+
+ context 'with nuget_duplicate_exception_regex' do
+ before do
+ package_settings.update_column(:nuget_duplicate_exception_regex, ".*#{existing_package.name.last(3)}.*")
+ end
+
+ it_behaves_like 'returning success'
+ end
+ end
+
+ context 'with existing package' do
+ let_it_be(:existing_package) { create(:nuget_package, :with_metadatum, project: project, version: '1.7.15.0') }
+ let_it_be(:metadata) do
+ {
+ package_name: existing_package.name,
+ package_version: existing_package.version,
+ authors: 'authors',
+ description: 'description'
+ }
+ end
+
+ context 'when nuget duplicates are allowed' do
+ before do
+ allow_next_instance_of(Namespace::PackageSetting) do |instance|
+ allow(instance).to receive(:nuget_duplicates_allowed?).and_return(true)
+ end
+ end
+
+ it_behaves_like 'returning success'
+ end
+
+ context 'when nuget duplicates are not allowed' do
+ let!(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: false)
+ end
+
+ context 'when package file is in object storage' do
+ let(:params) { super().merge(remote_url: 'https://example.com') }
+
+ before do
+ allow_next_instance_of(::Packages::Nuget::ExtractRemoteMetadataFileService) do |instance|
+ allow(instance).to receive(:execute)
+ .and_return(ServiceResponse.success(payload: Nokogiri::XML::Document.new))
+ end
+ allow_next_instance_of(::Packages::Nuget::ExtractMetadataContentService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+
+ context 'when ExtractRemoteMetadataFileService raises ExtractionError' do
+ before do
+ allow_next_instance_of(::Packages::Nuget::ExtractRemoteMetadataFileService) do |instance|
+ allow(instance).to receive(:execute).and_raise(
+ ::Packages::Nuget::ExtractRemoteMetadataFileService::ExtractionError, 'nuspec file not found'
+ )
+ end
+ end
+
+ it_behaves_like 'returning error', reason: :bad_request, message: 'nuspec file not found'
+ end
+
+ context 'when version is normalized' do
+ let(:metadata) { super().merge(package_version: '1.7.15') }
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+ end
+ end
+
+ context 'when package file is on disk' do
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ it_behaves_like 'handling duplicates disallowed when package exists'
+ end
+ end
+ end
+
+ context 'with non existing package' do
+ let_it_be(:metadata) do
+ { package_name: 'foo', package_version: '1.0.0', authors: 'author', description: 'description' }
+ end
+
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ context 'when nuget duplicates are allowed' do
+ let_it_be(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: true)
+ end
+
+ it_behaves_like 'returning success'
+ end
+
+ context 'when nuget duplicates are not allowed' do
+ let_it_be(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: false)
+ end
+
+ it_behaves_like 'returning success'
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
index 412c22fe8de..57b08f8773c 100644
--- a/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
+++ b/spec/services/packages/nuget/extract_metadata_file_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :package_registry do
- let_it_be(:package_file) { create(:nuget_package).package_files.first }
+ let_it_be_with_reload(:package_file) { create(:nuget_package).package_files.first }
- let(:service) { described_class.new(package_file.id) }
+ let(:service) { described_class.new(package_file) }
describe '#execute' do
subject { service.execute }
@@ -14,7 +14,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
it { expect { subject }.to raise_error(described_class::ExtractionError, error_message) }
end
- context 'with valid package file id' do
+ context 'with valid package file' do
expected_metadata = <<~XML.squish
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
<metadata>
@@ -39,8 +39,8 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
end
end
- context 'with invalid package file id' do
- let(:package_file) { instance_double('Packages::PackageFile', id: 555) }
+ context 'with invalid package file' do
+ let(:package_file) { nil }
it_behaves_like 'raises an error', 'invalid package file'
end
@@ -53,7 +53,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
it_behaves_like 'raises an error', 'invalid package file'
end
- context 'with a 0 byte package file id' do
+ context 'with a 0 byte package file' do
before do
allow_next_instance_of(Packages::PackageFileUploader) do |instance|
allow(instance).to receive(:size).and_return(0)
@@ -76,7 +76,7 @@ RSpec.describe Packages::Nuget::ExtractMetadataFileService, feature_category: :p
context 'with a too big nuspec file' do
before do
allow_next_instance_of(Zip::File) do |instance|
- allow(instance).to receive(:glob).and_return([instance_double('File', size: 6.megabytes)])
+ allow(instance).to receive(:glob).and_return([instance_double(File, size: 6.megabytes)])
end
end
diff --git a/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb b/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb
new file mode 100644
index 00000000000..b5aff6e7588
--- /dev/null
+++ b/spec/services/packages/nuget/extract_remote_metadata_file_service_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::ExtractRemoteMetadataFileService, feature_category: :package_registry do
+ let_it_be(:remote_url) { 'http://example.com/package.nupkg' }
+ let_it_be(:nupkg_filepath) { 'packages/nuget/package.nupkg' }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(remote_url) }
+
+ context 'when the remote URL is blank' do
+ let(:remote_url) { '' }
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'invalid file url') }
+ end
+
+ context 'when the package file is corrupted' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield('corrupted data')
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file not found') }
+ end
+
+ context 'when reaching the maximum received fragments' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield('Fragment 1').and_yield('Fragment 2').and_yield('Fragment 3').and_yield('Fragment 4')
+ .and_yield('Fragment 5').and_yield(fixture_file(nupkg_filepath))
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file not found') }
+ end
+
+ context 'when nuspec file is too big' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:size).and_return(6.megabytes)
+ end
+ end
+
+ it { expect { service.execute }.to raise_error(described_class::ExtractionError, 'nuspec file too big') }
+ end
+
+ context 'when nuspec file is fragmented' do
+ let_it_be(:nuspec_path) { expand_fixture_path('packages/nuget/with_metadata.nuspec') }
+ let_it_be(:tmp_zip) { Tempfile.new('nuget_zip') }
+ let_it_be(:zipped_nuspec) { zip_nuspec_file(nuspec_path, tmp_zip.path).get_raw_input_stream.read }
+ let_it_be(:fragments) { zipped_nuspec.chars.each_slice(zipped_nuspec.size / 2).map(&:join) }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fragments[0]).and_yield(fragments[1])
+ end
+
+ after do
+ tmp_zip.unlink
+ end
+
+ it 'ignores the Zip::DecompressionError and constructs the nuspec file from the fragments' do
+ response = service.execute
+
+ expect(response).to be_success
+ expect(response.payload).to include('<id>DummyProject.WithMetadata</id>')
+ .and include('<version>1.2.3</version>')
+ end
+ end
+
+ context 'when the remote URL is valid' do
+ let(:fragments) { fixture_file(nupkg_filepath).chars.each_slice(1.kilobyte).map(&:join) }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fragments[0]).and_yield(fragments[1]).and_yield(fragments[2]).and_yield(fragments[3])
+ end
+
+ it 'returns a success response with the nuspec file content' do
+ response = service.execute
+
+ expect(response).to be_success
+ expect(response.payload).to include('<id>DummyProject.DummyPackage</id>')
+ .and include('<version>1.0.0</version>')
+ end
+ end
+
+ context 'with a corrupted nupkg file with a wrong entry size' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow_next_instance_of(Zip::Entry) do |instance|
+ allow(instance).to receive(:extract).and_raise(Zip::EntrySizeError)
+ end
+ end
+
+ it {
+ expect do
+ service.execute
+ end.to raise_error(described_class::ExtractionError, /nuspec file has the wrong entry size/)
+ }
+ end
+
+ context 'with a Zip::Error exception' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).with(remote_url, stream_body: true, allow_object_storage: true)
+ .and_yield(fixture_file(nupkg_filepath))
+ allow(Zip::InputStream).to receive(:open).and_raise(Zip::Error)
+ end
+
+ it {
+ expect do
+ service.execute
+ end.to raise_error(described_class::ExtractionError, /Error opening zip stream/)
+ }
+ end
+ end
+
+ def zip_nuspec_file(nuspec_path, zip_path)
+ Zip::File.open(zip_path, Zip::File::CREATE) do |zipfile|
+ zipfile.add('package.nuspec', nuspec_path)
+ end
+ end
+end
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index c8c06414830..ea7557b6d64 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :package_registry do
let_it_be(:package_file) { create(:nuget_package).package_files.first }
- subject { described_class.new(package_file.id) }
+ subject { described_class.new(package_file) }
describe '#execute' do
let(:nuspec_file_content) do
@@ -49,7 +49,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
end
it 'calls the necessary services and executes the metadata extraction' do
- expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file.id) do
+ expect(::Packages::Nuget::ExtractMetadataFileService).to receive(:new).with(package_file) do
double.tap do |service|
expect(service).to receive(:execute).and_return(double(payload: nuspec_file_content))
end
diff --git a/spec/services/packages/nuget/odata_package_entry_service_spec.rb b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
new file mode 100644
index 00000000000..d4c47538ce2
--- /dev/null
+++ b/spec/services/packages/nuget/odata_package_entry_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::OdataPackageEntryService, feature_category: :package_registry do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:params) { { package_name: 'dummy', package_version: '1.0.0' } }
+ let(:doc) { Nokogiri::XML(subject.payload) }
+
+ subject { described_class.new(project, params).execute }
+
+ describe '#execute' do
+ shared_examples 'returning a package entry with the correct attributes' do |pkg_version, content_url_pkg_version|
+ it 'returns a package entry with the correct attributes' do
+ expect(doc.root.name).to eq('entry')
+ expect(doc_node('id').text).to include(
+ id_url(project.id, params[:package_name], pkg_version)
+ )
+ expect(doc_node('title').text).to eq(params[:package_name])
+ expect(doc_node('content').attr('src')).to include(
+ content_url(project.id, params[:package_name], content_url_pkg_version)
+ )
+ expect(doc_node('Version').text).to eq(pkg_version)
+ end
+ end
+
+ context 'when package_version is present' do
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes', '1.0.0', '1.0.0'
+ end
+
+ context 'when package_version is nil' do
+ let(:params) { { package_name: 'dummy', package_version: nil } }
+
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes',
+ described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ end
+
+ context 'when package_version is 0.0.0-latest-version' do
+ let(:params) { { package_name: 'dummy', package_version: described_class::SEMVER_LATEST_VERSION_PLACEHOLDER } }
+
+ it 'returns a success ServiceResponse' do
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'returning a package entry with the correct attributes',
+ described_class::SEMVER_LATEST_VERSION_PLACEHOLDER, described_class::LATEST_VERSION_FOR_V2_DOWNLOAD_ENDPOINT
+ end
+ end
+
+ def doc_node(name)
+ doc.css('*').detect { |el| el.name == name }
+ end
+
+ def id_url(id, package_name, package_version)
+ "api/v4/projects/#{id}/packages/nuget/v2/Packages(Id='#{package_name}',Version='#{package_version}')"
+ end
+
+ def content_url(id, package_name, package_version)
+ "api/v4/projects/#{id}/packages/nuget/v2/download/#{package_name}/#{package_version}"
+ end
+end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
index b18f62c1c28..e1cce2c87eb 100644
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -21,9 +21,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ expect(service.execute).to eq(
+ status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
)
expect(project.pages_metadatum.reload.deployed).to eq(false)
@@ -35,9 +35,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :success,
- message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ expect(service.execute).to eq(
+ status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed"
)
expect(project.pages_metadatum.reload.deployed).to eq(true)
@@ -49,9 +49,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :error,
- message: "Archive not created. Missing public directory in #{project.pages_path}")
+ expect(service.execute).to eq(
+ status: :error,
+ message: "Archive not created. Missing public directory in #{project.pages_path}"
)
expect(project.pages_metadatum.reload.deployed).to eq(true)
@@ -60,9 +60,9 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService, feature_category:
it 'removes pages archive when can not save deployment' do
archive = fixture_file_upload("spec/fixtures/pages.zip")
expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
- expect(zip_service).to receive(:execute).and_return(status: :success,
- archive_path: archive.path,
- entries_count: 3)
+ expect(zip_service).to receive(:execute).and_return(
+ status: :success, archive_path: archive.path, entries_count: 3
+ )
end
expect_next_instance_of(PagesDeployment) do |deployment|
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 2377fbcf003..7f8992e8bbc 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -132,8 +132,7 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_catego
ef.create_extension("basicConstraints", "CA:TRUE", true),
ef.create_extension("subjectKeyIdentifier", "hash")
]
- cert.add_extension ef.create_extension("authorityKeyIdentifier",
- "keyid:always,issuer:always")
+ cert.add_extension ef.create_extension("authorityKeyIdentifier", "keyid:always,issuer:always")
cert.sign key, OpenSSL::Digest.new('SHA256')
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index 6fa44310ae5..f6aca9970c8 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -28,9 +28,11 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
let(:text) { "```suggestion\nfoo\n```" }
let(:params) do
- suggestion_params.merge(text: text,
- target_type: 'MergeRequest',
- target_id: merge_request.iid)
+ suggestion_params.merge(
+ text: text,
+ target_type: 'MergeRequest',
+ target_id: merge_request.iid
+ )
end
let(:service) { described_class.new(project, user, params) }
@@ -52,15 +54,16 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
end
it 'returns suggestions referenced in text' do
- position = Gitlab::Diff::Position.new(new_path: path,
- new_line: line,
- diff_refs: diff_refs)
+ position = Gitlab::Diff::Position.new(new_path: path, new_line: line, diff_refs: diff_refs)
expect(Gitlab::Diff::SuggestionsParser)
.to receive(:parse)
- .with(text, position: position,
- project: merge_request.project,
- supports_suggestion: true)
+ .with(
+ text,
+ position: position,
+ project: merge_request.project,
+ supports_suggestion: true
+ )
.and_call_original
result = service.execute
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
index ecabaa28119..0d7d1254428 100644
--- a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -48,7 +48,24 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService, feature
stub_delete_reference_requests('A' => 500, 'Ba' => 500)
end
- it { is_expected.to eq(status: :error, message: 'could not delete tags') }
+ it { is_expected.to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}") }
+
+ context 'when a large list of tag delete fails' do
+ let(:tags) { Array.new(135) { |i| "tag#{i}" } }
+ let(:container_repository) { instance_double(ContainerRepository) }
+
+ before do
+ allow(ContainerRepository).to receive(:find).with(repository).and_return(container_repository)
+ tags.each do |tag|
+ stub_delete_reference_requests(tag => 500)
+ end
+ allow(container_repository).to receive(:delete_tag_by_name).and_return(false)
+ end
+
+ it 'truncates the log message' do
+ expect(subject).to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}".truncate(1000))
+ end
+ end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 683e438eb08..ce7e5188c7b 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
project = create_project(bot_user, opts)
expect(project.errors.errors.length).to eq 1
- expect(project.errors.messages[:namespace].first).to eq(("is not valid"))
+ expect(project.errors.messages[:namespace].first).to eq("is not valid")
end
end
end
@@ -640,6 +640,17 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
expect(project.project_namespace).to be_in_sync_with_project(project)
end
+ it 'raises when repository fails to create' do
+ expect_next_instance_of(Project) do |instance|
+ expect(instance).to receive(:create_repository).and_return(false)
+ end
+
+ project = create_project(user, opts)
+ expect(project).not_to be_persisted
+ expect(project.errors.messages).to have_key(:base)
+ expect(project.errors.messages[:base].first).to match('Failed to create repository')
+ end
+
context 'when another repository already exists on disk' do
let(:opts) do
{
@@ -1158,4 +1169,17 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
expect_not_disabled_features(project, exclude: [:repository, :builds, :merge_requests])
end
end
+
+ it 'adds pages unique domain', feature_category: :pages do
+ stub_pages_setting(enabled: true)
+
+ expect(Gitlab::Pages)
+ .to receive(:add_unique_domain_to)
+ .and_call_original
+
+ project = create_project(user, opts)
+
+ expect(project.project_setting.pages_unique_domain_enabled).to eq(true)
+ expect(project.project_setting.pages_unique_domain).to be_present
+ end
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index 97a3b338069..16b9d2618ca 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -163,72 +163,100 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
context 'when importer does not support refmap' do
it 'succeeds if repository import is successful' do
- expect(project.repository).to receive(:import_repository).and_return(true)
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :success)
- end
-
result = subject.execute
expect(result[:status]).to eq :success
end
it 'fails if repository import fails' do
- expect(project.repository)
- .to receive(:import_repository)
- .with('https://bitbucket.org/vim/vim.git', resolved_address: '')
- .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+ expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
+ expect(importer).to receive(:execute)
+ .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+ end
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
- end
- context 'when lfs import fails' do
- it 'logs the error' do
- error_message = 'error message'
+ context 'when bitbucket_parallel_importer feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_parallel_importer: false)
+ end
- expect(project.repository).to receive(:import_repository).and_return(true)
+ it 'succeeds if repository import is successful' do
+ expect(project.repository).to receive(:import_repository).and_return(true)
+ expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect(importer).to receive(:execute).and_return(true)
+ end
- expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
- expect(importer).to receive(:execute).and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :success)
+ end
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :success
end
- expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
- expect(service).to receive(:execute).and_return(status: :error, message: error_message)
+ it 'fails if repository import fails' do
+ expect(project.repository)
+ .to receive(:import_repository)
+ .with('https://bitbucket.org/vim/vim.git', resolved_address: '')
+ .and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
- expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
+ context 'when lfs import fails' do
+ it 'logs the error' do
+ error_message = 'error message'
- subject.execute
- end
- end
+ expect(project.repository).to receive(:import_repository).and_return(true)
- context 'when repository import scheduled' do
- before do
- expect(project.repository).to receive(:import_repository).and_return(true)
- allow(subject).to receive(:import_data)
- end
+ expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
+ expect(importer).to receive(:execute).and_return(true)
+ end
- it 'downloads lfs objects if lfs_enabled is enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :error, message: error_message)
+ end
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
+ expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
- subject.execute
- end
+ subject.execute
+ end
+ end
- it 'does not download lfs objects if lfs_enabled is not enabled for project' do
- allow(project).to receive(:lfs_enabled?).and_return(false)
- expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
+ context 'when repository import scheduled' do
+ before do
+ expect(project.repository).to receive(:import_repository).and_return(true)
+ allow(subject).to receive(:import_data)
+ end
- subject.execute
+ it 'downloads lfs objects if lfs_enabled is enabled for project' do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+
+ expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
+
+ subject.execute
+ end
+
+ it 'does not download lfs objects if lfs_enabled is not enabled for project' do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
+
+ subject.execute
+ end
+ end
end
end
end
diff --git a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
index 4ad6fd0edff..fab8cafd1a0 100644
--- a/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
+++ b/spec/services/projects/in_product_marketing_campaign_emails_service_spec.rb
@@ -4,33 +4,34 @@ require 'spec_helper'
RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_category: :experimentation_adoption do
describe '#execute' do
- let(:user) { create(:user, email_opted_in: true) }
+ let(:user) { create(:user) }
let(:project) { create(:project) }
let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
+ before do
+ allow(Notify)
+ .to receive(:build_ios_app_guide_email)
+ .and_return(instance_double(ActionMailer::MessageDelivery, deliver_later: true))
+ end
+
subject(:execute) do
described_class.new(project, campaign).execute
end
context 'users can receive marketing emails' do
- let(:owner) { create(:user, email_opted_in: true) }
- let(:maintainer) { create(:user, email_opted_in: true) }
- let(:developer) { create(:user, email_opted_in: true) }
+ let(:maintainer) { create(:user) }
+ let(:developer) { create(:user) }
before do
- project.add_owner(owner)
project.add_developer(developer)
project.add_maintainer(maintainer)
end
it 'sends the email to all project members with access_level >= Developer', :aggregate_failures do
- double = instance_double(ActionMailer::MessageDelivery, deliver_later: true)
-
- [owner, maintainer, developer].each do |user|
+ [project.owner, maintainer, developer].each do |user|
email = user.notification_email_or_default
- expect(Notify).to receive(:build_ios_app_guide_email).with(email) { double }
- expect(double).to receive(:deliver_later)
+ expect(Notify).to receive(:build_ios_app_guide_email).with(email)
end
execute
@@ -39,7 +40,7 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
it 'records sent emails', :aggregate_failures do
expect { execute }.to change { Users::InProductMarketingEmail.count }.from(0).to(3)
- [owner, maintainer, developer].each do |user|
+ [project.owner, maintainer, developer].each do |user|
expect(
Users::InProductMarketingEmail.where(
user: user,
@@ -58,15 +59,24 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
end
end
- shared_examples 'does nothing' do
- it 'does not send the email' do
+ shared_examples 'does not send the email' do
+ it do
email = user.notification_email_or_default
expect(Notify).not_to receive(:build_ios_app_guide_email).with(email)
execute
end
+ end
+
+ shared_examples 'does not create a record of the sent email' do
+ it do
+ expect(
+ Users::InProductMarketingEmail.where(
+ user: user,
+ campaign: campaign
+ )
+ ).not_to exist
- it 'does not create a record of the sent email' do
- expect { execute }.not_to change { Users::InProductMarketingEmail.count }
+ execute
end
end
@@ -94,12 +104,6 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
execute
end
end
-
- context 'when user is not opted in to receive marketing emails' do
- let(:user) { create(:user, email_opted_in: false) }
-
- it_behaves_like 'does nothing'
- end
end
context 'when campaign email has already been sent to the user' do
@@ -108,7 +112,7 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
create(:in_product_marketing_email, :campaign, user: user, campaign: campaign)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
end
context "when user is a reporter" do
@@ -116,7 +120,8 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
project.add_reporter(user)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
+ it_behaves_like 'does not create a record of the sent email'
end
context "when user is a guest" do
@@ -124,7 +129,8 @@ RSpec.describe Projects::InProductMarketingCampaignEmailsService, feature_catego
project.add_guest(user)
end
- it_behaves_like 'does nothing'
+ it_behaves_like 'does not send the email'
+ it_behaves_like 'does not create a record of the sent email'
end
end
end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 73932887cd9..77e7be6cc0a 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -86,9 +86,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :i
end
context 'with simultaneous manual configuration' do
- let_it_be(:integration) { create(:alert_management_prometheus_integration, :legacy, project: project) }
+ let_it_be(:alerting_setting) { create(:project_alerting_setting, :with_http_integration, project: project) }
let_it_be(:old_prometheus_integration) { create(:prometheus_integration, project: project) }
- let_it_be(:alerting_setting) { create(:project_alerting_setting, project: project, token: integration.token) }
+ let_it_be(:integration) { project.alert_management_http_integrations.last! }
subject { service.execute(integration.token, integration) }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a113f3506e1..6c767876d05 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -20,13 +20,45 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:custom_root_file_metadata) { "spec/fixtures/pages_with_custom_root.zip.meta" }
let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
- subject { described_class.new(project, build) }
+ subject(:service) { described_class.new(project, build) }
+
+ RSpec.shared_examples 'pages size limit is' do |size_limit|
+ context "when size is below the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit - 1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'updates pages correctly' do
+ subject.execute
+
+ deploy_status = GenericCommitStatus.last
+ expect(deploy_status.description).not_to be_present
+ expect(project.pages_metadatum).to be_deployed
+ end
+ end
+
+ context "when size is above the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit + 1.megabyte)
+ allow(metadata).to receive(:entries).and_return([])
+ end
+
+ it 'limits the maximum size of gitlab pages' do
+ subject.execute
+
+ deploy_status = GenericCommitStatus.last
+ expect(deploy_status.description).to match(/artifacts for pages are too large/)
+ expect(deploy_status).to be_script_failure
+ end
+ end
+ end
context 'when a deploy stage already exists', :aggregate_failures do
let!(:stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) }
it 'assigns the deploy stage' do
- expect { subject.execute }
+ expect { service.execute }
.to change(GenericCommitStatus, :count).by(1)
.and change(Ci::Stage.where(name: 'deploy'), :count).by(0)
@@ -41,7 +73,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
context 'when a deploy stage does not exists' do
it 'assigns the deploy stage' do
- expect { subject.execute }
+ expect { service.execute }
.to change(GenericCommitStatus, :count).by(1)
.and change(Ci::Stage.where(name: 'deploy'), :count).by(1)
@@ -64,7 +96,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it "doesn't delete artifacts after deploying" do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(project.pages_metadatum).to be_deployed
expect(build.artifacts?).to eq(true)
@@ -72,7 +104,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(project.pages_metadatum).to be_deployed
expect(project.pages_deployed?).to be_truthy
end
@@ -84,12 +116,12 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
root_namespace_id: project.root_namespace.id
}
- expect { subject.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
+ expect { service.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
end
it 'creates pages_deployment and saves it in the metadata' do
expect do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
deployment = project.pages_deployments.last
@@ -108,7 +140,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
project.reload
expect do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end.to change { project.pages_deployments.count }.by(1)
expect(project.pages_metadatum.reload.pages_deployment).to eq(project.pages_deployments.last)
@@ -127,12 +159,12 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
)
)
- execute
+ service.execute
end
it 'removes older deployments', :sidekiq_inline do
expect do
- execute
+ service.execute
end.not_to change { PagesDeployment.count } # it creates one and deletes one
expect(PagesDeployment.find_by_id(old_deployment.id)).to be_nil
@@ -144,7 +176,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:metadata_filename) { empty_metadata_filename }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -158,7 +190,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:options) { { publish: 'foo' } }
it 'creates pages_deployment and saves it in the metadata' do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
deployment = project.pages_deployments.last
expect(deployment.root_directory).to eq(options[:publish])
@@ -169,7 +201,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:options) { { publish: 'bar' } }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -181,7 +213,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
it 'returns an error' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`")
end
@@ -190,13 +222,13 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
it 'limits pages file count' do
create(:plan_limits, :default_plan, pages_file_entries: 2)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
end
@@ -209,9 +241,11 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'raises an error' do
- expect { execute }.to raise_error(SocketError)
+ expect { service.execute }.to raise_error(SocketError)
build.reload
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(project.pages_metadatum).not_to be_deployed
end
@@ -223,9 +257,11 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'does not raise an error as failed job' do
- execute
+ service.execute
build.reload
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(project.pages_metadatum).not_to be_deployed
end
@@ -234,7 +270,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
context 'with background jobs running', :sidekiq_inline do
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
end
end
@@ -246,41 +282,43 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
end
- shared_examples 'successfully deploys' do
- it 'succeeds' do
- expect do
- expect(execute).to eq(:success)
- end.to change { project.pages_deployments.count }.by(1)
+ it 'creates a new pages deployment and mark it as deployed' do
+ expect do
+ expect(service.execute[:status]).to eq(:success)
+ end.to change { project.pages_deployments.count }.by(1)
- deployment = project.pages_deployments.last
- expect(deployment.ci_build_id).to eq(build.id)
- end
+ deployment = project.pages_deployments.last
+ expect(deployment.ci_build_id).to eq(build.id)
end
- include_examples 'successfully deploys'
-
context 'when old deployment present' do
+ let!(:old_build) { create(:ci_build, name: 'pages', pipeline: old_pipeline, ref: 'HEAD') }
+ let!(:old_deployment) { create(:pages_deployment, ci_build: old_build, project: project) }
+
before do
- old_build = create(:ci_build, pipeline: old_pipeline, ref: 'HEAD')
- old_deployment = create(:pages_deployment, ci_build: old_build, project: project)
project.update_pages_deployment!(old_deployment)
end
- include_examples 'successfully deploys'
+ it 'deactivates old deployments' do
+ expect(service.execute[:status]).to eq(:success)
+
+ expect(old_deployment.reload.deleted_at).not_to be_nil
+ end
end
context 'when newer deployment present' do
before do
new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
- new_build = create(:ci_build, pipeline: new_pipeline, ref: 'HEAD')
+ new_build = create(:ci_build, name: 'pages', pipeline: new_pipeline, ref: 'HEAD')
new_deployment = create(:pages_deployment, ci_build: new_build, project: project)
project.update_pages_deployment!(new_deployment)
end
it 'fails with outdated reference message' do
- expect(execute).to eq(:error)
+ expect(service.execute[:status]).to eq(:error)
expect(project.reload.pages_metadatum).not_to be_deployed
+ deploy_status = GenericCommitStatus.last
expect(deploy_status).to be_failed
expect(deploy_status.description).to eq('build SHA is outdated for this ref')
end
@@ -294,7 +332,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
.and_return(file.size + 1)
end
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
expect(GenericCommitStatus.last.description).to eq('The uploaded artifact size does not match the expected value')
project.pages_metadatum.reload
@@ -318,18 +356,18 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
it 'fails with exception raised' do
expect do
- execute
+ service.execute
end.to raise_error("Validation failed: File sha256 can't be blank")
end
end
it 'fails if no artifacts' do
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
it 'fails for invalid archive' do
create(:ci_job_artifact, :archive, file: invalid_file, job: build)
- expect(execute).not_to eq(:success)
+ expect(service.execute[:status]).not_to eq(:success)
end
describe 'maximum pages artifacts size' do
@@ -383,21 +421,13 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
end
it 'marks older pages:deploy jobs retried' do
- expect(execute).to eq(:success)
+ expect(service.execute[:status]).to eq(:success)
expect(older_deploy_job.reload).to be_retried
+
+ deploy_status = GenericCommitStatus.last
expect(deploy_status.ci_stage).to eq(stage)
expect(deploy_status.stage_idx).to eq(stage.position)
end
end
-
- private
-
- def deploy_status
- GenericCommitStatus.where(name: 'pages:deploy').last
- end
-
- def execute
- subject.execute[:status]
- end
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index d3972009d38..b30c1d30044 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -229,6 +229,27 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
end
end
+ context 'when new shard has a repository pool without the root project' do
+ let!(:new_pool_repository) { create(:pool_repository, :ready, shard: shard_to, disk_path: pool_repository.disk_path) }
+
+ before do
+ pool_repository.update!(source_project: nil)
+ new_pool_repository.update!(source_project: nil)
+ end
+
+ it 'connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ project_pool_repository = project.pool_repository
+
+ expect(project_pool_repository).to eq(new_pool_repository)
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
context 'when repository does not exist' do
let(:project) { create(:project) }
let(:checksum) { nil }
@@ -266,6 +287,32 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
end
end
+ context 'when project belongs to the repository pool without a root project' do
+ let!(:pool_repository) { create(:pool_repository, :ready, shard: shard_from) }
+
+ before do
+ pool_repository.update!(source_project: nil)
+ project.update!(pool_repository: pool_repository)
+ end
+
+ it 'creates a new repository pool without a root project and connects project to it' do
+ result = subject.execute
+ expect(result).to be_success
+
+ project.reload.cleanup
+
+ new_pool_repository = project.pool_repository
+
+ expect(new_pool_repository).not_to eq(pool_repository)
+ expect(new_pool_repository.shard).to eq(shard_second_storage)
+ expect(new_pool_repository.state).to eq('ready')
+ expect(new_pool_repository.source_project).to eq(nil)
+ expect(new_pool_repository.disk_path).to eq(pool_repository.disk_path)
+
+ expect(object_pool_double).to have_received(:link).with(project.repository.raw)
+ end
+ end
+
context 'when object pool checksum does not match' do
let(:new_object_pool_checksum) { 'not_match' }
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index d9090b87514..195cfe78b3f 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -791,72 +791,26 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
end
describe 'when updating pages unique domain', feature_category: :pages do
- let(:group) { create(:group, path: 'group') }
- let(:project) { create(:project, path: 'project', group: group) }
-
- it 'updates project pages unique domain' do
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
-
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
- end
-
- it 'does not changes unique domain when it already exists' do
- project.project_setting.update!(
- pages_unique_domain_enabled: false,
- pages_unique_domain: 'unique-domain'
- )
-
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
- end.to change { project.project_setting.pages_unique_domain_enabled }
-
- expect(project.project_setting.pages_unique_domain_enabled).to eq true
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ before do
+ stub_pages_setting(enabled: true)
end
- it 'does not changes unique domain when it disabling unique domain' do
- project.project_setting.update!(
- pages_unique_domain_enabled: true,
- pages_unique_domain: 'unique-domain'
- )
-
- expect do
- update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: false
- })
- end.not_to change { project.project_setting.pages_unique_domain }
+ context 'when turning it on' do
+ it 'adds pages unique domain' do
+ expect(Gitlab::Pages).to receive(:add_unique_domain_to)
- expect(project.project_setting.pages_unique_domain_enabled).to eq false
- expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
+ expect { update_project(project, user, project_setting_attributes: { pages_unique_domain_enabled: true }) }
+ .to change { project.project_setting.pages_unique_domain_enabled }
+ .from(false).to(true)
+ end
end
- context 'when there is another project with the unique domain' do
- it 'fails pages unique domain already exists' do
- create(
- :project_setting,
- pages_unique_domain_enabled: true,
- pages_unique_domain: 'unique-domain'
- )
-
- allow(Gitlab::Pages::RandomDomain)
- .to receive(:generate)
- .and_return('unique-domain')
+ context 'when turning it off' do
+ it 'adds pages unique domain' do
+ expect(Gitlab::Pages).not_to receive(:add_unique_domain_to)
- result = update_project(project, user, project_setting_attributes: {
- pages_unique_domain_enabled: true
- })
-
- expect(result).to eq(
- status: :error,
- message: 'Project setting pages unique domain has already been taken'
- )
+ expect { update_project(project, user, project_setting_attributes: { pages_unique_domain_enabled: false }) }
+ .not_to change { project.project_setting.pages_unique_domain_enabled }
end
end
end
diff --git a/spec/services/protected_branches/api_service_spec.rb b/spec/services/protected_branches/api_service_spec.rb
index f7f5f451a49..d7717f08efb 100644
--- a/spec/services/protected_branches/api_service_spec.rb
+++ b/spec/services/protected_branches/api_service_spec.rb
@@ -6,10 +6,12 @@ RSpec.describe ProtectedBranches::ApiService, feature_category: :compliance_mana
shared_examples 'execute with entity' do
it 'creates a protected branch with prefilled defaults' do
expect(::ProtectedBranches::CreateService).to receive(:new).with(
- entity, user, hash_including(
- push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
- merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
- )
+ entity,
+ user,
+ hash_including(
+ push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
+ )
).and_call_original
expect(described_class.new(entity, user, { name: 'new name' }).create).to be_valid
@@ -17,10 +19,12 @@ RSpec.describe ProtectedBranches::ApiService, feature_category: :compliance_mana
it 'updates a protected branch without prefilled defaults' do
expect(::ProtectedBranches::UpdateService).to receive(:new).with(
- entity, user, hash_including(
- push_access_levels_attributes: [],
- merge_access_levels_attributes: []
- )
+ entity,
+ user,
+ hash_including(
+ push_access_levels_attributes: [],
+ merge_access_levels_attributes: []
+ )
).and_call_original
expect do
diff --git a/spec/services/push_event_payload_service_spec.rb b/spec/services/push_event_payload_service_spec.rb
index 50da5ca9b24..999b71ff754 100644
--- a/spec/services/push_event_payload_service_spec.rb
+++ b/spec/services/push_event_payload_service_spec.rb
@@ -190,9 +190,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
end
it 'returns :removed when removing an existing ref' do
- service = described_class.new(event,
- before: '123',
- after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: '123', after: Gitlab::Git::BLANK_SHA)
expect(service.action).to eq(:removed)
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 30a3c212ba5..5e7fb8397e3 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -29,9 +29,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
before do
- stub_licensed_features(multiple_issue_assignees: false,
- multiple_merge_request_reviewers: false,
- multiple_merge_request_assignees: false)
+ stub_licensed_features(
+ multiple_issue_assignees: false,
+ multiple_merge_request_reviewers: false,
+ multiple_merge_request_assignees: false
+ )
end
describe '#execute' do
@@ -1394,6 +1396,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'subscribe command' do
+ let(:content) { '/subscribe' }
+ let(:issuable) { work_item }
+ end
+
it_behaves_like 'unsubscribe command' do
let(:content) { '/unsubscribe' }
let(:issuable) { issue }
@@ -1404,6 +1411,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'unsubscribe command' do
+ let(:content) { '/unsubscribe' }
+ let(:issuable) { work_item }
+ end
+
it_behaves_like 'failed command', 'Could not apply due command.' do
let(:content) { '/due 2016-08-28' }
let(:issuable) { merge_request }
@@ -1860,11 +1872,21 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { merge_request }
end
+ it_behaves_like 'award command' do
+ let(:content) { '/award :100:' }
+ let(:issuable) { work_item }
+ end
+
context 'ignores command with no argument' do
it_behaves_like 'failed command' do
let(:content) { '/award' }
let(:issuable) { issue }
end
+
+ it_behaves_like 'failed command' do
+ let(:content) { '/award' }
+ let(:issuable) { work_item }
+ end
end
context 'ignores non-existing / invalid emojis' do
@@ -1877,6 +1899,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:content) { '/award :lorem_ipsum:' }
let(:issuable) { issue }
end
+
+ it_behaves_like 'failed command' do
+ let(:content) { '/award :lorem_ipsum:' }
+ let(:issuable) { work_item }
+ end
end
context 'if issuable is a Commit' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index ca5dd912e77..0170c3abcaf 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe Releases::CreateService, feature_category: :continuous_integratio
context 'when project is a catalog resource' do
let(:ref) { 'master' }
- let!(:catalog_resource) { create(:catalog_resource, project: project) }
+ let!(:ci_catalog_resource) { create(:ci_catalog_resource, project: project) }
context 'and it is valid' do
let_it_be(:project) { create(:project, :repository, description: 'our components') }
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index 953490ac379..2b6e96a781e 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -28,6 +28,26 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
it 'returns the destroyed object' do
is_expected.to include(status: :success, release: release)
end
+
+ context 'when the release is for a catalog resource' do
+ let!(:catalog_resource) { create(:ci_catalog_resource, project: project, state: 'published') }
+ let!(:version) { create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: release) }
+
+ it 'does not update the catalog resources if there are still releases' do
+ second_release = create(:release, project: project, tag: 'v1.2.0')
+ create(:ci_catalog_resource_version, catalog_resource: catalog_resource, release: second_release)
+
+ subject
+
+ expect(catalog_resource.reload.state).to eq('published')
+ end
+
+ it 'updates the catalog resource if there are no more releases' do
+ subject
+
+ expect(catalog_resource.reload.state).to eq('draft')
+ end
+ end
end
context 'when tag does not exist in the repository' do
@@ -42,9 +62,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
let!(:release) {}
it 'returns an error' do
- is_expected.to include(status: :error,
- message: 'Release does not exist',
- http_status: 404)
+ is_expected.to include(status: :error, message: 'Release does not exist', http_status: 404)
end
end
@@ -52,9 +70,7 @@ RSpec.describe Releases::DestroyService, feature_category: :release_orchestratio
let(:user) { repoter }
it 'returns an error' do
- is_expected.to include(status: :error,
- message: 'Access Denied',
- http_status: 403)
+ is_expected.to include(status: :error, message: 'Access Denied', http_status: 403)
end
end
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index c00146961e3..060697cd1df 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -33,8 +33,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
subject
expect(
- Users::GhostUserMigration.where(user: resource_bot,
- initiator_user: user)
+ Users::GhostUserMigration.where(user: resource_bot, initiator_user: user)
).to be_exists
end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 8393ce78df8..28b345f8191 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -49,10 +49,8 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
expect(event.action).to eq(action)
end
- it 'expires resource note etag cache' do
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(
- "/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes"
- )
+ it 'broadcasts resource note change' do
+ expect(resource).to receive(:broadcast_notes_changed)
described_class.new(resource, author).execute(added_labels: [labels[0]])
end
@@ -126,9 +124,11 @@ RSpec.describe ResourceEvents::ChangeLabelsService, feature_category: :team_plan
change_labels
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL_CHANGED }
let(:user) { author }
+ let(:namespace) { project.namespace }
+
subject(:service_action) { change_labels }
end
end
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 6eb6780d704..39dd2508728 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -61,8 +61,7 @@ RSpec.describe ResourceEvents::MergeIntoNotesService, feature_category: :team_pl
create_event(created_at: 4.days.ago)
event = create_event(created_at: 1.day.ago)
- notes = described_class.new(resource, user,
- last_fetched_at: 2.days.ago).execute
+ notes = described_class.new(resource, user, last_fetched_at: 2.days.ago).execute
expect(notes.count).to eq 1
expect(notes.first.discussion_id).to eq event.reload.discussion_id
diff --git a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
index 7ac2249642a..abd412097d2 100644
--- a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Security::CiConfiguration::DependencyScanningCreateService, :snowplow,
- feature_category: :software_composition_analysis do
+ feature_category: :software_composition_analysis do
subject(:result) { described_class.new(project, user).execute }
let(:branch_name) { 'set-dependency-scanning-config-1' }
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index 809d0b27c20..c141bbe5b5a 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -16,78 +16,87 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
let(:identifier_wasc) { build(:ci_reports_security_identifier, external_id: '13', external_type: 'wasc') }
let(:finding_id_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_1_primary, identifier_1_cve],
- scanner: scanner_1,
- severity: :low
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
end
let(:finding_id_1_extra) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_1_primary, identifier_1_cve],
- scanner: scanner_1,
- severity: :low
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_1_primary, identifier_1_cve],
+ scanner: scanner_1,
+ severity: :low
+ )
end
let(:finding_id_2_loc_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_id_2_loc_1_extra) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_id_2_loc_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_2_primary, identifier_2_cve],
- location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
- scanner: scanner_2,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_2_primary, identifier_2_cve],
+ location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
+ scanner: scanner_2,
+ severity: :medium
+ )
end
let(:finding_cwe_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_cwe],
- scanner: scanner_3,
- severity: :high
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_3,
+ severity: :high
+ )
end
let(:finding_cwe_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_cwe],
- scanner: scanner_1,
- severity: :critical
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cwe],
+ scanner: scanner_1,
+ severity: :critical
+ )
end
let(:finding_wasc_1) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_wasc],
- scanner: scanner_1,
- severity: :medium
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_1,
+ severity: :medium
+ )
end
let(:finding_wasc_2) do
- build(:ci_reports_security_finding,
- identifiers: [identifier_wasc],
- scanner: scanner_2,
- severity: :critical
- )
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_wasc],
+ scanner: scanner_2,
+ severity: :critical
+ )
end
let(:report_1_findings) { [finding_id_1, finding_id_2_loc_1, finding_id_2_loc_1_extra, finding_cwe_2, finding_wasc_1] }
diff --git a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
index fceb6fc78b4..0046213e0b2 100644
--- a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
@@ -14,6 +14,12 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
let(:service) { described_class.new(project: project, current_user: user) }
+ let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
+ let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
+ let(:error_settings_missing) { s_('ServiceDesk|Service Desk setting missing') }
+ let(:expected_error_message) { error_settings_missing }
+ let(:logger_params) { { category: 'custom_email_verification' } }
+
before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_triggered_email).and_return(message_delivery)
@@ -29,6 +35,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -48,6 +58,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
# Correct amount of result notification emails were sent
expect(Notify).to receive(:service_desk_verification_result_email).exactly(project.owners.size + 1).times
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params.merge(
+ error_message: error_identifier.to_s
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -67,6 +81,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
it_behaves_like 'a verification process that exits early'
context 'when feature flag :service_desk_custom_email is disabled' do
+ let(:expected_error_message) { error_feature_flag_disabled }
+
before do
stub_feature_flags(service_desk_custom_email: false)
end
@@ -77,12 +93,17 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
context 'when service desk setting exists' do
let(:settings) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') }
let(:service) { described_class.new(project: settings.project, current_user: user) }
+ let(:expected_error_message) { error_user_not_authorized }
it 'aborts verification process and exits early', :aggregate_failures do
# Because we exit early it should not send any verification or notification emails
expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reload
@@ -105,6 +126,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_cat
# Check whether the correct amount of notification emails were sent
expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(project.owners.size + 1).times
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
settings.reload
diff --git a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
index f1e683c0185..d882cd8635a 100644
--- a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
+++ b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
@@ -14,6 +14,16 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
let(:service) { described_class.new(project: settings.project, params: { mail: mail_object }) }
+ let(:error_feature_flag_disabled) { 'Feature flag service_desk_custom_email is not enabled' }
+ let(:error_parameter_missing) { s_('ServiceDesk|Service Desk setting or verification object missing') }
+ let(:error_already_finished) { s_('ServiceDesk|Custom email address has already been verified.') }
+ let(:error_already_failed) do
+ s_('ServiceDesk|Custom email address verification has already been processed and failed.')
+ end
+
+ let(:expected_error_message) { error_parameter_missing }
+ let(:logger_params) { { category: 'custom_email_verification' } }
+
before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
@@ -23,6 +33,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'refuses to verify and sends result emails' do
expect(Notify).to receive(:service_desk_verification_result_email).twice
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params.merge(
+ error_message: expected_error_identifier.to_s
+ )).once
+
response = described_class.new(project: settings.project, params: { mail: mail_object }).execute
settings.reset
@@ -41,6 +55,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reset
@@ -55,6 +73,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
settings.reset
@@ -64,6 +86,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
end
context 'when feature flag :service_desk_custom_email is disabled' do
+ let(:expected_error_message) { error_feature_flag_disabled }
+
before do
stub_feature_flags(service_desk_custom_email: false)
end
@@ -71,6 +95,10 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'exits early' do
expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -85,6 +113,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it_behaves_like 'a failing verification process', 'mail_not_received_within_timeframe'
context 'when already verified' do
+ let(:expected_error_message) { error_already_finished }
+
before do
verification.mark_as_finished!
end
@@ -93,6 +123,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
end
context 'when we already have an error' do
+ let(:expected_error_message) { error_already_failed }
+
before do
verification.mark_as_failed!(:smtp_host_issue)
end
@@ -112,6 +144,8 @@ RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_cat
it 'verifies and sends result emails' do
expect(Notify).to receive(:service_desk_verification_result_email).twice
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
settings.reset
diff --git a/spec/services/service_desk/custom_emails/create_service_spec.rb b/spec/services/service_desk/custom_emails/create_service_spec.rb
index 0d9582ba235..2029c9a0c3f 100644
--- a/spec/services/service_desk/custom_emails/create_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/create_service_spec.rb
@@ -17,9 +17,14 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
let(:params) { {} }
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
let(:message) { instance_double(Mail::Message) }
+ let(:logger_params) { { category: 'custom_email' } }
shared_examples 'a service that exits with error' do
it 'exits early' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -29,6 +34,10 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
shared_examples 'a failing service that does not create records' do
it 'exits with error and does not create records' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
project.reset
@@ -148,6 +157,10 @@ RSpec.describe ServiceDesk::CustomEmails::CreateService, feature_category: :serv
end
it 'creates all records returns a successful response' do
+ # Because we also log in ServiceDesk::CustomEmailVerifications::CreateService
+ expect(Gitlab::AppLogger).to receive(:info).with({ category: 'custom_email_verification' }).once
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
response = service.execute
project.reset
diff --git a/spec/services/service_desk/custom_emails/destroy_service_spec.rb b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
index f5a22e26865..7f53a941d4e 100644
--- a/spec/services/service_desk/custom_emails/destroy_service_spec.rb
+++ b/spec/services/service_desk/custom_emails/destroy_service_spec.rb
@@ -12,9 +12,14 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
let(:error_user_not_authorized) { s_('ServiceDesk|User cannot manage project.') }
let(:error_does_not_exist) { s_('ServiceDesk|Custom email does not exist') }
let(:expected_error_message) { nil }
+ let(:logger_params) { { category: 'custom_email' } }
shared_examples 'a service that exits with error' do
it 'exits early' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(logger_params.merge(
+ error_message: expected_error_message
+ )).once
+
response = service.execute
expect(response).to be_error
@@ -24,6 +29,8 @@ RSpec.describe ServiceDesk::CustomEmails::DestroyService, feature_category: :ser
shared_examples 'a successful service that destroys all custom email records' do
it 'ensures no custom email records exist' do
+ expect(Gitlab::AppLogger).to receive(:info).with(logger_params).once
+
project.reset
response = service.execute
diff --git a/spec/services/service_desk_settings/update_service_spec.rb b/spec/services/service_desk_settings/update_service_spec.rb
index ff564963677..27978225bcf 100644
--- a/spec/services/service_desk_settings/update_service_spec.rb
+++ b/spec/services/service_desk_settings/update_service_spec.rb
@@ -3,7 +3,12 @@ require 'spec_helper'
RSpec.describe ServiceDeskSettings::UpdateService, feature_category: :service_desk do
describe '#execute' do
- let_it_be(:settings) { create(:service_desk_setting, outgoing_name: 'original name') }
+ let_it_be(:settings) do
+ create(:service_desk_setting, outgoing_name: 'original name', custom_email: 'user@example.com')
+ end
+
+ let_it_be(:credential) { create(:service_desk_custom_email_credential, project: settings.project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, :finished, project: settings.project) }
let_it_be(:user) { create(:user) }
context 'with valid params' do
@@ -16,6 +21,24 @@ RSpec.describe ServiceDeskSettings::UpdateService, feature_category: :service_de
expect(settings.reload.outgoing_name).to eq 'some name'
expect(settings.reload.project_key).to eq 'foo'
end
+
+ context 'with custom email verification in finished state' do
+ let(:params) { { custom_email_enabled: true } }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ it 'allows to enable custom email' do
+ settings.project.reset
+
+ response = described_class.new(settings.project, user, params).execute
+
+ expect(response).to be_success
+ expect(settings.reset.custom_email_enabled).to be true
+ expect(Gitlab::AppLogger).to have_received(:info).with({ category: 'custom_email' })
+ end
+ end
end
context 'when project_key is an empty string' do
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index fc86ecfe7f2..4133609d9ae 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -222,6 +222,9 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
end
context 'spam verdict service advises to block the user' do
+ # create a fresh user to ensure it is in the unbanned state
+ let(:user) { create(:user) }
+
before do
allow(fake_verdict_service).to receive(:execute).and_return(BLOCK_USER)
end
@@ -234,6 +237,14 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
expect(response.message).to match(expected_service_check_response_message)
expect(target).to be_spam
end
+
+ it 'bans the user' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_SPAM_LOG_ID).first
+ expect(custom_attribute.value).to eq(target.spam_log.id.to_s)
+ expect(user).to be_banned
+ end
end
context 'when spam verdict service conditionally allows' do
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 1e3be24b05f..ac5682a35bb 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :groups_
subject { described_class.new(noteable: noteable, project: project).create_new_alert('Some Service') }
it_behaves_like 'a system note' do
- let(:author) { User.alert_bot }
+ let(:author) { Users::Internal.alert_bot }
let(:action) { 'new_alert_added' }
end
@@ -62,7 +62,7 @@ RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :groups_
subject { described_class.new(noteable: noteable, project: project).log_resolving_alert('Some Service') }
it_behaves_like 'a system note' do
- let(:author) { User.alert_bot }
+ let(:author) { Users::Internal.alert_bot }
let(:action) { 'new_alert_added' }
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index af660a9b72e..4a795f2db20 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -28,6 +28,14 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
expect(subject.note).to eq "marked this issue as related to #{noteable_ref.to_reference(project)}"
end
end
+
+ context 'with work items' do
+ let_it_be(:noteable) { create(:work_item, :task, project: project) }
+
+ it 'sets the note text with the correct work item type' do
+ expect(subject.note).to eq "marked this task as related to #{noteable_ref.to_reference(project)}"
+ end
+ end
end
describe '#unrelate_issuable' do
@@ -686,9 +694,10 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
let(:user) { author }
+ let(:namespace) { project.namespace }
end
end
end
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index a3793880ff1..52b99a6976d 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -118,10 +118,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DUE_DATE_CHANGED }
let(:user) { author }
- subject(:service_action) { note }
+ let(:namespace) { project.namespace }
end
context 'when only start_date is added' do
@@ -231,10 +231,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_ESTIMATE_CHANGED }
let(:user) { author }
- let(:service_action) { subject }
+ let(:namespace) { project.namespace }
end
end
@@ -363,13 +363,10 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
subject
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TIME_SPENT_CHANGED }
let(:user) { author }
- let(:service_action) do
- spend_time!(277200)
- subject
- end
+ let(:namespace) { project.namespace }
end
end
diff --git a/spec/services/users/authorized_build_service_spec.rb b/spec/services/users/authorized_build_service_spec.rb
index 7eed6833cba..a96854f33d9 100644
--- a/spec/services/users/authorized_build_service_spec.rb
+++ b/spec/services/users/authorized_build_service_spec.rb
@@ -12,5 +12,13 @@ RSpec.describe Users::AuthorizedBuildService, feature_category: :user_management
it_behaves_like 'common user build items'
it_behaves_like 'current user not admin build items'
+
+ context 'for additional authorized build allowed params' do
+ before do
+ params.merge!(external: true)
+ end
+
+ it { expect(user).to be_external }
+ end
end
end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index f3236d40412..5f1949adc32 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -16,6 +16,57 @@ RSpec.describe Users::BuildService, feature_category: :user_management do
it_behaves_like 'common user build items'
it_behaves_like 'current user not admin build items'
+
+ context 'with "user_default_external" application setting' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
+ true | nil | 'fl@example.com' | nil | true
+ true | true | 'fl@example.com' | nil | true
+ true | false | 'fl@example.com' | nil | true # admin difference
+
+ true | nil | 'fl@example.com' | '' | true
+ true | true | 'fl@example.com' | '' | true
+ true | false | 'fl@example.com' | '' | true # admin difference
+
+ true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+
+ true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
+
+ false | nil | 'fl@example.com' | nil | false
+ false | true | 'fl@example.com' | nil | false # admin difference
+ false | false | 'fl@example.com' | nil | false
+
+ false | nil | 'fl@example.com' | '' | false
+ false | true | 'fl@example.com' | '' | false # admin difference
+ false | false | 'fl@example.com' | '' | false
+
+ false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+
+ false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
+ false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(user_default_external: user_default_external)
+ stub_application_setting(user_default_internal_regex: user_default_internal_regex)
+
+ params.merge!({ external: external, email: email }.compact)
+ end
+
+ it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
+ expect(user.external).to eq(result)
+ end
+ end
+ end
end
context 'with non admin current_user' do
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index 36b2730a2de..d6fb7a2954d 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -100,7 +100,11 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
context "when the awardable already has an award emoji of the same name assigned to the ghost user" do
let(:awardable) { create(:issue) }
- let!(:existing_award_emoji) { create(:award_emoji, user: User.ghost, name: "thumbsup", awardable: awardable) }
+
+ let!(:existing_award_emoji) do
+ create(:award_emoji, user: Users::Internal.ghost, name: "thumbsup", awardable: awardable)
+ end
+
let!(:award_emoji) { create(:award_emoji, user: user, name: "thumbsup", awardable: awardable) }
it "migrates the award emoji regardless" do
@@ -108,7 +112,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
migrated_record = AwardEmoji.find_by_id(award_emoji.id)
- expect(migrated_record.user).to eq(User.ghost)
+ expect(migrated_record.user).to eq(Users::Internal.ghost)
end
it "does not leave the migrated award emoji in an invalid state" do
@@ -322,7 +326,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
service.execute
expect(gitlab_shell.repository_exists?(repo.shard_name, "#{repo.disk_path}.git")).to be(true)
- expect(User.ghost.snippets).to include(repo.snippet)
+ expect(Users::Internal.ghost.snippets).to include(repo.snippet)
end
context 'when an error is raised deleting snippets' do
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
index ebd2502398d..4e23b51cae2 100644
--- a/spec/services/users/upsert_credit_card_validation_service_spec.rb
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -101,6 +101,14 @@ RSpec.describe Users::UpsertCreditCardValidationService, feature_category: :user
end
context 'when unexpected exception happen' do
+ let(:exception) { StandardError.new }
+
+ before do
+ allow_next_instance_of(::Users::CreditCardValidation) do |instance|
+ allow(instance).to receive(:save).and_raise(exception)
+ end
+ end
+
it 'tracks the exception and returns an error' do
logged_params = {
credit_card_validated_at: credit_card_validated_time,
@@ -111,8 +119,7 @@ RSpec.describe Users::UpsertCreditCardValidationService, feature_category: :user
user_id: user_id
}
- expect(::Users::CreditCardValidation).to receive(:upsert).and_raise(e = StandardError.new('My exception!'))
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: logged_params)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, class: described_class.to_s, params: logged_params)
result = service.execute
diff --git a/spec/services/work_items/related_work_item_links/destroy_service_spec.rb b/spec/services/work_items/related_work_item_links/destroy_service_spec.rb
new file mode 100644
index 00000000000..39381078c45
--- /dev/null
+++ b/spec/services/work_items/related_work_item_links/destroy_service_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::RelatedWorkItemLinks::DestroyService, feature_category: :portfolio_management do
+ describe '#execute' do
+ let_it_be(:project) { create(:project_empty_repo, :private) }
+ let_it_be(:other_project) { create(:project_empty_repo, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:source) { create(:work_item, project: project) }
+ let_it_be(:linked_item1) { create(:work_item, project: project) }
+ let_it_be(:linked_item2) { create(:work_item, project: project) }
+ let_it_be(:no_access_item) { create(:work_item, project: other_project) }
+ let_it_be(:not_linked_item) { create(:work_item, project: project) }
+
+ let_it_be(:link1) { create(:work_item_link, source: source, target: linked_item1) }
+ let_it_be(:link2) { create(:work_item_link, source: source, target: linked_item2) }
+ let_it_be(:link3) { create(:work_item_link, source: source, target: no_access_item) }
+
+ let(:ids_to_remove) { [linked_item1.id, linked_item2.id, no_access_item.id, not_linked_item.id] }
+
+ subject(:destroy_links) { described_class.new(source, user, { item_ids: ids_to_remove }).execute }
+
+ context 'when user can `admin_work_item_link` for the work item' do
+ before_all do
+ project.add_guest(user)
+ end
+
+ it 'removes existing linked items with access' do
+ expect { destroy_links }.to change { WorkItems::RelatedWorkItemLink.count }.by(-2)
+ end
+
+ it 'creates notes for the source and target of each removed link' do
+ [linked_item1, linked_item2].each do |item|
+ expect(SystemNoteService).to receive(:unrelate_issuable).with(source, item, user)
+ expect(SystemNoteService).to receive(:unrelate_issuable).with(item, source, user)
+ end
+
+ destroy_links
+ end
+
+ it 'returns correct response message' do
+ message = "Successfully unlinked IDs: #{linked_item1.id} and #{linked_item2.id}. IDs with errors: " \
+ "#{no_access_item.id} could not be removed due to insufficient permissions, " \
+ "#{not_linked_item.id} could not be removed due to not being linked."
+
+ is_expected.to eq(
+ status: :success,
+ message: message,
+ items_removed: [linked_item1.id, linked_item2.id],
+ items_with_errors: [no_access_item.id]
+ )
+ end
+
+ context 'when all items fail' do
+ let(:ids_to_remove) { [no_access_item.id] }
+ let(:params) { { item_ids: [no_access_item.id] } }
+ let(:error_msg) { "IDs with errors: #{ids_to_remove[0]} could not be removed due to insufficient permissions." }
+
+ it 'returns an error response' do
+ expect { destroy_links }.not_to change { WorkItems::RelatedWorkItemLink.count }
+
+ is_expected.to eq(status: :error, message: error_msg)
+ end
+ end
+
+ context 'when item_ids is empty' do
+ let(:ids_to_remove) { [] }
+
+ it 'returns error response' do
+ is_expected.to eq(message: 'No work item IDs provided.', status: :error, http_status: 409)
+ end
+ end
+ end
+
+ context 'when user cannot `admin_work_item_link` for the work item' do
+ it 'returns error response' do
+ is_expected.to eq(message: 'No work item found.', status: :error, http_status: 403)
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 8e19650d980..38e5d4dc153 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -76,9 +76,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(update_work_item[:status]).to eq(:success)
end
- it_behaves_like 'issue_edit snowplow tracking' do
- let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
+ it_behaves_like 'internal event tracking' do
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_TITLE_CHANGED }
let(:user) { current_user }
+ let(:namespace) { project.namespace }
subject(:service_action) { update_work_item[:status] }
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index d7ceab1289e..f53e930f529 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -24,6 +24,7 @@ CrystalballEnv.start!
ENV["RAILS_ENV"] = 'test'
ENV["IN_MEMORY_APPLICATION_SETTINGS"] = 'true'
ENV["RSPEC_ALLOW_INVALID_URLS"] = 'true'
+ENV['USE_CI_BUILDS_ROUTING_TABLE'] = 'true'
require_relative '../config/environment'
@@ -178,10 +179,12 @@ RSpec.configure do |config|
config.include Devise::Test::IntegrationHelpers, type: :feature
config.include Devise::Test::IntegrationHelpers, type: :request
config.include LoginHelpers, type: :feature
+ config.include SignUpHelpers, type: :feature
config.include SearchHelpers, type: :feature
config.include WaitHelpers, type: :feature
config.include WaitForRequests, type: :feature
config.include Features::DomHelpers, type: :feature
+ config.include Features::HighlightContentHelper, type: :feature
config.include EmailHelpers, :mailer, type: :mailer
config.include Warden::Test::Helpers, type: :request
config.include Gitlab::Routing, type: :routing
@@ -210,6 +213,7 @@ RSpec.configure do |config|
config.include RequestUrgencyMatcher, type: :request
config.include Capybara::RSpecMatchers, type: :request
config.include PendingDirectUploadHelpers, :direct_uploads
+ config.include LabelsHelper, type: :feature
config.include_context 'when rendered has no HTML escapes', type: :view
@@ -294,16 +298,6 @@ RSpec.configure do |config|
stub_feature_flags(ci_queueing_disaster_recovery_disable_fair_scheduling: false)
stub_feature_flags(ci_queueing_disaster_recovery_disable_quota: false)
- # Only a few percent of users will be "enrolled" into the new nav with this flag.
- # Having it enabled globally would make it impossible to test the current nav.
- # https://gitlab.com/gitlab-org/gitlab/-/issues/420121
- stub_feature_flags(super_sidebar_nav_enrolled: false)
-
- # The anonymous super-sidebar is under heavy development and enabling the flag
- # globally leads to a lot of errors. This issue is for fixing all test to work with the
- # new nav: https://gitlab.com/gitlab-org/gitlab/-/issues/420119
- stub_feature_flags(super_sidebar_logged_out: false)
-
# It's disabled in specs because we don't support certain features which
# cause spec failures.
stub_feature_flags(gitlab_error_tracking: false)
@@ -349,6 +343,8 @@ RSpec.configure do |config|
# Postgres is the primary data source, and ClickHouse only when enabled in certain cases.
stub_feature_flags(clickhouse_data_collection: false)
+ stub_feature_flags(vite: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -398,8 +394,8 @@ RSpec.configure do |config|
end
config.around(:example, :quarantine) do |example|
- # Skip tests in quarantine unless we explicitly focus on them.
- example.run if config.inclusion_filter[:quarantine]
+ # Skip tests in quarantine unless we explicitly focus on them or not in CI
+ example.run if config.inclusion_filter[:quarantine] || !ENV['CI']
end
config.around(:example, :request_store) do |example|
@@ -491,6 +487,34 @@ RSpec.configure do |config|
config.before(:each, :js) do
allow_any_instance_of(VersionCheck).to receive(:response).and_return({ "severity" => "success" })
end
+
+ [:migration, :delete].each do |spec_type|
+ message = <<~STRING
+ We detected an open transaction before running the example. This is not allowed with specs that rely on a table
+ deletion strategy like those marked as `:#{spec_type}`.
+
+ A common scenario for this is using `test-prof` methods in your specs. `let_it_be` and `before_all` methods open
+ a transaction before all the specs in a context are run, and this is not compatible with these type of specs.
+ Consider replacing these methods with `let!` and `before(:all)`.
+
+ For more information see
+ https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#testprof-in-migration-specs
+ STRING
+
+ config.around(:each, spec_type) do |example|
+ self.class.use_transactional_tests = false
+
+ if DbCleaner.all_connection_classes.any? { |klass| klass.connection.transaction_open? }
+ raise message
+ end
+
+ example.run
+
+ delete_from_all_tables!(except: deletion_except_tables)
+
+ self.class.use_transactional_tests = true
+ end
+ end
end
# Disabled because it's causing N+1 queries.
diff --git a/spec/support/before_all_adapter.rb b/spec/support/before_all_adapter.rb
index f4946ff271f..35846fcecb8 100644
--- a/spec/support/before_all_adapter.rb
+++ b/spec/support/before_all_adapter.rb
@@ -25,20 +25,9 @@ module TestProfBeforeAllAdapter
end
end
- # This class is required so we can disable transactions on migration specs
- module NoTransactionAdapter
- def self.begin_transaction; end
-
- def self.rollback_transaction; end
- end
-
def self.default_adapter
MultipleDatabaseAdapter
end
-
- def self.no_transaction_adapter
- NoTransactionAdapter
- end
end
TestProf::BeforeAll.adapter = ::TestProfBeforeAllAdapter.default_adapter
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 392743fda4a..65abbe12621 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -54,11 +54,7 @@ Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
# In cases of multiple installations of chromedriver, prioritize the version installed by SeleniumManager
# selenium-manager doesn't work with Linux arm64 yet:
# https://github.com/SeleniumHQ/selenium/issues/11357
- if RUBY_PLATFORM.include?('x86_64-linux') ||
- # Rosetta is required on macOS because the selenium-manager
- # binaries (https://github.com/SeleniumHQ/selenium/tree/trunk/common/manager/macos)
- # are only compiled for macOS x86.
- (RUBY_PLATFORM.include?('darwin') && system('/usr/bin/pgrep -q oahd'))
+ if RUBY_PLATFORM.include?('x86_64-linux') || RUBY_PLATFORM.include?('darwin')
chrome_options = Selenium::WebDriver::Chrome::Options.chrome
chromedriver_path = File.dirname(Selenium::WebDriver::SeleniumManager.driver_path(chrome_options))
ENV['PATH'] = "#{chromedriver_path}:#{ENV['PATH']}" # rubocop:disable RSpec/EnvAssignment
diff --git a/spec/support/capybara_wait_for_all_requests.rb b/spec/support/capybara_wait_for_all_requests.rb
index 36b63619b08..6f272474cf6 100644
--- a/spec/support/capybara_wait_for_all_requests.rb
+++ b/spec/support/capybara_wait_for_all_requests.rb
@@ -9,9 +9,11 @@ module Capybara
include CapybaraHelpers
include WaitForRequests
- def visit(visit_uri)
+ def visit(visit_uri, &block)
super
+ yield if block
+
wait_for_all_requests
end
end
@@ -24,24 +26,26 @@ module Capybara
include CapybaraHelpers
include WaitForRequests
- module WaitForAllRequestsAfterClickButton
+ module WaitForRequestsAfterClickButton
def click_button(locator = nil, **options)
super
- wait_for_all_requests
+ wait_for_requests
end
end
- module WaitForAllRequestsAfterClickLink
- def click_link(locator = nil, **options)
+ module WaitForRequestsAfterClickLink
+ def click_link(locator = nil, **options, &block)
super
- wait_for_all_requests
+ yield if block
+
+ wait_for_requests
end
end
- prepend WaitForAllRequestsAfterClickButton
- prepend WaitForAllRequestsAfterClickLink
+ prepend WaitForRequestsAfterClickButton
+ prepend WaitForRequestsAfterClickLink
end
end
end
diff --git a/spec/support/database/auto_explain.rb b/spec/support/database/auto_explain.rb
index 108d88e37b9..799457034a1 100644
--- a/spec/support/database/auto_explain.rb
+++ b/spec/support/database/auto_explain.rb
@@ -115,11 +115,16 @@ module AutoExplain
private
def record_auto_explain?(connection)
- ENV['CI'] \
- && ENV['CI_MERGE_REQUEST_LABELS']&.include?('pipeline:record-queries') \
- && ENV['CI_JOB_NAME_SLUG'] != 'db-migrate-non-superuser' \
- && connection.database_version.to_s[0..1].to_i >= 14 \
- && connection.select_one('SHOW is_superuser')['is_superuser'] == 'on'
+ return false unless ENV['CI']
+ return false if ENV['CI_JOB_NAME_SLUG'] == 'db-migrate-non-superuser'
+ return false if connection.database_version.to_s[0..1].to_i < 14
+ return false if connection.select_one('SHOW is_superuser')['is_superuser'] != 'on'
+
+ # This condition matches the pipeline rules for if-merge-request-labels-record-queries
+ return true if ENV['CI_MERGE_REQUEST_LABELS']&.include?('pipeline:record-queries')
+
+ # This condition matches the pipeline rules for if-default-branch-refs
+ ENV['CI_COMMIT_REF_NAME'] == ENV['CI_DEFAULT_BRANCH'] && !ENV['CI_MERGE_REQUEST_IID']
end
end
end
diff --git a/spec/support/database/click_house/hooks.rb b/spec/support/database/click_house/hooks.rb
index 27abd19dc3f..b970d3daf84 100644
--- a/spec/support/database/click_house/hooks.rb
+++ b/spec/support/database/click_house/hooks.rb
@@ -4,7 +4,13 @@
class ClickHouseTestRunner
def truncate_tables
ClickHouse::Client.configuration.databases.each_key do |db|
- tables_for(db).each do |table|
+ # Select tables with at least one row
+ query = tables_for(db).map do |table|
+ "(SELECT '#{table}' AS table FROM #{table} LIMIT 1)"
+ end.join(' UNION ALL ')
+
+ tables_with_data = ClickHouse::Client.select(query, db).pluck('table')
+ tables_with_data.each do |table|
ClickHouse::Client.execute("TRUNCATE TABLE #{table}", db)
end
end
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
index 77fa7feacd4..02572d011f7 100644
--- a/spec/support/database/prevent_cross_database_modification.rb
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -1,15 +1,53 @@
# frozen_string_literal: true
-module PreventCrossDatabaseModificationSpecHelpers
- delegate :with_cross_database_modification_prevented,
- :allow_cross_database_modification_within_transaction,
- to: :'::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification'
+module Database
+ module PreventCrossDatabaseModificationSpecHelpers
+ delegate :with_cross_database_modification_prevented,
+ :allow_cross_database_modification_within_transaction,
+ to: :'::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification'
+ end
+
+ module AllowCrossDatabaseFactoryBotBuilt
+ extend ActiveSupport::Concern
+
+ attr_accessor :factory_bot_built
+
+ prepended do
+ around_create :_test_ignore_table_in_transaction, prepend: true, if: :factory_bot_built?
+
+ def _test_ignore_table_in_transaction(&blk)
+ Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction(
+ [self.class.table_name], url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130277', &blk
+ )
+ end
+ end
+
+ def factory_bot_built?
+ return false unless Rails.env.test?
+
+ !!factory_bot_built
+ end
+
+ private
+
+ def ignore_cross_database_tables_if_factory_bot(tables, &blk)
+ return super unless factory_bot_built?
+
+ Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction(
+ tables,
+ url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130277',
+ &blk
+ )
+ end
+ end
end
+ActiveRecord::Base.prepend(Database::AllowCrossDatabaseFactoryBotBuilt)
+
CROSS_DB_MODIFICATION_ALLOW_LIST = Set.new(YAML.load_file(File.join(__dir__, 'cross-database-modification-allowlist.yml'))).freeze
RSpec.configure do |config|
- config.include(PreventCrossDatabaseModificationSpecHelpers)
+ config.include(Database::PreventCrossDatabaseModificationSpecHelpers)
# By default allow cross-modifications as we want to observe only transactions
# within a specific block of execution which is defined be `before(:each)` and `after(:each)`
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
index 7bd1f0c5dfa..1ffc1cc25fd 100644
--- a/spec/support/database_cleaner.rb
+++ b/spec/support/database_cleaner.rb
@@ -12,14 +12,4 @@ RSpec.configure do |config|
setup_database_cleaner
DatabaseCleaner.clean_with(:deletion)
end
-
- config.around(:each, :delete) do |example|
- self.class.use_transactional_tests = false
-
- example.run
-
- delete_from_all_tables!(except: deletion_except_tables)
-
- self.class.use_transactional_tests = true
- end
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 17b4270fa20..3131a22a20b 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -73,7 +73,10 @@ module DbCleaner
end
end
+ disable_ddl_was = Feature.enabled?(:disallow_database_ddl_feature_flags, type: :ops)
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
Gitlab::Database::Partitioning.sync_partitions_ignore_db_error
+ stub_feature_flags(disallow_database_ddl_feature_flags: disable_ddl_was)
puts "Databases re-creation done in #{Gitlab::Metrics::System.monotonic_time - start}"
end
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index 6faa2db3330..d30098a5cc0 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -1,5 +1,19 @@
# frozen_string_literal: true
+FactoryBot.define do
+ after(:build) do |object, _|
+ next unless object.respond_to?(:factory_bot_built=)
+
+ object.factory_bot_built = true
+ end
+
+ before(:create) do |object, _|
+ next unless object.respond_to?(:factory_bot_built=)
+
+ object.factory_bot_built = false
+ end
+end
+
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
include StubMethodCalls
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index 5de8e8cdca2..860045c6ce6 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -7,6 +7,7 @@
- Namespaces::FreeUserCap::UsersFinder # Reason: There is no need to have anything else besides the count
- Groups::EnvironmentScopesFinder # Reason: There is no need to have anything else besides the simple strucutre with the scope name
- Security::RelatedPipelinesFinder # Reason: There is no need to have anything else besides the IDs of pipelines
+- Llm::ExtraResourceFinder # Reason: The finder does not deal with DB-backend resource for now.
# Temporary excludes (aka TODOs)
# For example:
@@ -61,11 +62,6 @@
- Security::PipelineVulnerabilitiesFinder
- Security::ScanExecutionPoliciesFinder
- Security::ScanResultPoliciesFinder
-- Security::TrainingProviders::BaseUrlFinder
-- Security::TrainingUrlsFinder
-- Security::TrainingProviders::KontraUrlFinder
-- Security::TrainingProviders::SecureCodeWarriorUrlFinder
-- Security::TrainingProviders::SecureFlagUrlFinder
- SentryIssueFinder
- ServerlessDomainFinder
- TagsFinder
diff --git a/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml b/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml
index 583d44c452e..097cbf5b0c8 100644
--- a/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml
+++ b/spec/support/gitlab_stubs/gitlab_ci_dast_includes.yml
@@ -1,3 +1,6 @@
+stages:
+ - dast
+
dast:
stage: dast
image:
@@ -7,4 +10,6 @@ dast:
allow_failure: true
dast_configuration:
site_profile: "site_profile_name_included"
- scanner_profile: "scanner_profile_name_included" \ No newline at end of file
+ scanner_profile: "scanner_profile_name_included"
+ script:
+ - echo "Runs DAST!"
diff --git a/spec/support/helpers/database/duplicate_indexes.rb b/spec/support/helpers/database/duplicate_indexes.rb
new file mode 100644
index 00000000000..0ad8ee1e055
--- /dev/null
+++ b/spec/support/helpers/database/duplicate_indexes.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+module Database
+ class DuplicateIndexes
+ attr_accessor :table_name, :indexes
+
+ BTREE_INDEX_STRUCT = Struct.new(:name, :columns, :unique)
+
+ def initialize(table_name, indexes)
+ @table_name = table_name
+ @indexes = indexes
+ end
+
+ def duplicate_indexes
+ ret = {}
+
+ btree_indexes.each do |btree_index|
+ matching_indexes = matching_indexes_for(btree_index)
+ next unless matching_indexes.any?
+
+ ret[btree_index] = matching_indexes
+ end
+
+ ret
+ end
+
+ def self.btree_index_struct(index)
+ BTREE_INDEX_STRUCT.new(
+ index.name,
+ Array.wrap(index.columns).map do |column|
+ # https://apidock.com/rails/ActiveRecord/ConnectionAdapters/PostgreSQL/SchemaStatements/indexes
+ # asc is the default order
+ column_order = index.orders.is_a?(Symbol) ? index.orders : (index.orders[column] || :asc)
+ { name: column, order: column_order }
+ end,
+ index.unique
+ )
+ end
+
+ private
+
+ def btree_indexes
+ return @btree_indexes if @btree_indexes
+
+ # We only scan non-conditional btree indexes
+ @btree_indexes = indexes.select do |index|
+ index.using == :btree && index.where.nil? && index.opclasses.blank?
+ end
+
+ @btree_indexes = @btree_indexes.map { |index| self.class.btree_index_struct(index) }
+ end
+
+ def matching_indexes_for(btree_index)
+ all_matching_indexes = []
+
+ # When comparing btree_index with other_index. btree_index is the index that can have more columns
+ # than the other_index.
+ (1..btree_index.columns.length).each do |subset_length|
+ columns = btree_index.columns.first(subset_length)
+ matching_indexes = btree_indexes.reject { |other_index| other_index == btree_index }.select do |other_index|
+ other_index.columns == columns
+ end
+
+ # For now we ignore other indexes that are UNIQUE and have a matching columns subset of
+ # the btree_index columns, as UNIQUE indexes are still needed to enforce uniqueness
+ # constraints on subset of the columns.
+ matching_indexes = matching_indexes.reject do |other_index|
+ (other_index.unique && (other_index.columns.length < btree_index.columns.length))
+ end
+
+ all_matching_indexes += matching_indexes
+ end
+
+ all_matching_indexes
+ end
+ end
+end
diff --git a/spec/support/helpers/database/duplicate_indexes.yml b/spec/support/helpers/database/duplicate_indexes.yml
new file mode 100644
index 00000000000..02efdabd70b
--- /dev/null
+++ b/spec/support/helpers/database/duplicate_indexes.yml
@@ -0,0 +1,265 @@
+---
+# It maps table_name to {index1: array_of_duplicate_indexes, index2: array_of_duplicate_indexes, ... }
+abuse_reports:
+ idx_abuse_reports_user_id_status_and_category:
+ - index_abuse_reports_on_user_id
+alert_management_http_integrations:
+ index_http_integrations_on_project_and_endpoint:
+ - index_alert_management_http_integrations_on_project_id
+analytics_cycle_analytics_group_stages:
+ index_group_stages_on_group_id_group_value_stream_id_and_name:
+ - index_analytics_ca_group_stages_on_group_id
+approval_project_rules_users:
+ index_approval_project_rules_users_1:
+ - index_approval_project_rules_users_on_approval_project_rule_id
+approvals:
+ index_approvals_on_merge_request_id_and_created_at:
+ - index_approvals_on_merge_request_id
+board_group_recent_visits:
+ index_board_group_recent_visits_on_user_group_and_board:
+ - index_board_group_recent_visits_on_user_id
+board_project_recent_visits:
+ index_board_project_recent_visits_on_user_project_and_board:
+ - index_board_project_recent_visits_on_user_id
+board_user_preferences:
+ index_board_user_preferences_on_user_id_and_board_id:
+ - index_board_user_preferences_on_user_id
+boards_epic_board_recent_visits:
+ index_epic_board_recent_visits_on_user_group_and_board:
+ - index_boards_epic_board_recent_visits_on_user_id
+boards_epic_user_preferences:
+ index_boards_epic_user_preferences_on_board_user_epic_unique:
+ - index_boards_epic_user_preferences_on_board_id
+bulk_import_batch_trackers:
+ i_bulk_import_trackers_id_batch_number:
+ - index_bulk_import_batch_trackers_on_tracker_id
+bulk_import_export_batches:
+ i_bulk_import_export_batches_id_batch_number:
+ - index_bulk_import_export_batches_on_export_id
+ci_job_artifacts:
+ index_ci_job_artifacts_on_id_project_id_and_created_at:
+ - index_ci_job_artifacts_on_project_id
+ index_ci_job_artifacts_on_id_project_id_and_file_type:
+ - index_ci_job_artifacts_on_project_id
+ index_ci_job_artifacts_on_project_id_and_id:
+ - index_ci_job_artifacts_on_project_id
+ci_pipeline_artifacts:
+ index_ci_pipeline_artifacts_on_pipeline_id_and_file_type:
+ - index_ci_pipeline_artifacts_on_pipeline_id
+ci_stages:
+ index_ci_stages_on_pipeline_id_and_name:
+ - index_ci_stages_on_pipeline_id
+ index_ci_stages_on_pipeline_id_and_position:
+ - index_ci_stages_on_pipeline_id
+ index_ci_stages_on_pipeline_id_convert_to_bigint_and_name:
+ - index_ci_stages_on_pipeline_id_convert_to_bigint
+ index_ci_stages_on_pipeline_id_convert_to_bigint_and_position:
+ - index_ci_stages_on_pipeline_id_convert_to_bigint
+dast_site_tokens:
+ index_dast_site_token_on_project_id_and_url:
+ - index_dast_site_tokens_on_project_id
+design_management_designs:
+ index_design_management_designs_on_iid_and_project_id:
+ - index_design_management_designs_on_project_id
+design_management_designs_versions:
+ design_management_designs_versions_uniqueness:
+ - index_design_management_designs_versions_on_design_id
+error_tracking_errors:
+ index_et_errors_on_project_id_and_status_and_id:
+ - index_error_tracking_errors_on_project_id
+ index_et_errors_on_project_id_and_status_events_count_id_desc:
+ - index_error_tracking_errors_on_project_id
+ index_et_errors_on_project_id_and_status_first_seen_at_id_desc:
+ - index_error_tracking_errors_on_project_id
+ index_et_errors_on_project_id_and_status_last_seen_at_id_desc:
+ - index_error_tracking_errors_on_project_id
+geo_node_namespace_links:
+ index_geo_node_namespace_links_on_geo_node_id_and_namespace_id:
+ - index_geo_node_namespace_links_on_geo_node_id
+in_product_marketing_emails:
+ index_in_product_marketing_emails_on_user_campaign:
+ - index_in_product_marketing_emails_on_user_id
+ index_in_product_marketing_emails_on_user_track_series:
+ - index_in_product_marketing_emails_on_user_id
+incident_management_oncall_participants:
+ index_inc_mgmnt_oncall_participants_on_user_id_and_rotation_id:
+ - index_inc_mgmnt_oncall_participants_on_oncall_user_id
+incident_management_oncall_schedules:
+ index_im_oncall_schedules_on_project_id_and_iid:
+ - index_incident_management_oncall_schedules_on_project_id
+instance_audit_events_streaming_headers:
+ idx_instance_external_audit_event_destination_id_key_uniq:
+ - idx_headers_instance_external_audit_event_destination_id
+issue_links:
+ index_issue_links_on_source_id_and_target_id:
+ - index_issue_links_on_source_id
+issues:
+ index_issues_on_author_id_and_id_and_created_at:
+ - index_issues_on_author_id
+jira_connect_subscriptions:
+ idx_jira_connect_subscriptions_on_installation_id_namespace_id:
+ - idx_jira_connect_subscriptions_on_installation_id
+list_user_preferences:
+ index_list_user_preferences_on_user_id_and_list_id:
+ - index_list_user_preferences_on_user_id
+member_tasks:
+ index_member_tasks_on_member_id_and_project_id:
+ - index_member_tasks_on_member_id
+members:
+ index_members_on_member_namespace_id_compound:
+ - index_members_on_member_namespace_id
+merge_request_assignees:
+ index_merge_request_assignees_on_merge_request_id_and_user_id:
+ - index_merge_request_assignees_on_merge_request_id
+merge_request_metrics:
+ index_mr_metrics_on_target_project_id_merged_at_nulls_last:
+ - index_merge_request_metrics_on_target_project_id
+merge_requests:
+ index_merge_requests_on_author_id_and_created_at:
+ - index_merge_requests_on_author_id
+ index_merge_requests_on_author_id_and_id:
+ - index_merge_requests_on_author_id
+ index_merge_requests_on_author_id_and_target_project_id:
+ - index_merge_requests_on_author_id
+ml_candidate_params:
+ index_ml_candidate_params_on_candidate_id_on_name:
+ - index_ml_candidate_params_on_candidate_id
+ml_candidates:
+ index_ml_candidates_on_project_id_on_internal_id:
+ - index_ml_candidates_on_project_id
+ml_model_versions:
+ index_ml_model_versions_on_project_id_and_model_id_and_version:
+ - index_ml_model_versions_on_project_id
+ml_models:
+ index_ml_models_on_project_id_and_name:
+ - index_ml_models_on_project_id
+p_ci_runner_machine_builds:
+ index_p_ci_runner_machine_builds_on_runner_machine_id:
+ - index_ci_runner_machine_builds_on_runner_machine_id
+packages_debian_group_distributions:
+ uniq_pkgs_debian_group_distributions_group_id_and_codename:
+ - index_packages_debian_group_distributions_on_group_id
+ uniq_pkgs_debian_group_distributions_group_id_and_suite:
+ - index_packages_debian_group_distributions_on_group_id
+packages_debian_project_distributions:
+ uniq_pkgs_debian_project_distributions_project_id_and_codename:
+ - index_packages_debian_project_distributions_on_project_id
+ uniq_pkgs_debian_project_distributions_project_id_and_suite:
+ - index_packages_debian_project_distributions_on_project_id
+packages_tags:
+ index_packages_tags_on_package_id_and_updated_at:
+ - index_packages_tags_on_package_id
+pages_domains:
+ index_pages_domains_on_project_id_and_enabled_until:
+ - index_pages_domains_on_project_id
+ index_pages_domains_on_verified_at_and_enabled_until:
+ - index_pages_domains_on_verified_at
+personal_access_tokens:
+ index_pat_on_user_id_and_expires_at:
+ - index_personal_access_tokens_on_user_id
+pm_affected_packages:
+ i_affected_packages_unique_for_upsert:
+ - index_pm_affected_packages_on_pm_advisory_id
+pm_package_version_licenses:
+ i_pm_package_version_licenses_join_ids:
+ - index_pm_package_version_licenses_on_pm_package_version_id
+pm_package_versions:
+ i_pm_package_versions_on_package_id_and_version:
+ - index_pm_package_versions_on_pm_package_id
+project_compliance_standards_adherence:
+ u_project_compliance_standards_adherence_for_reporting:
+ - index_project_compliance_standards_adherence_on_project_id
+project_relation_exports:
+ index_project_export_job_relation:
+ - index_project_relation_exports_on_project_export_job_id
+project_repositories:
+ index_project_repositories_on_shard_id_and_project_id:
+ - index_project_repositories_on_shard_id
+project_topics:
+ index_project_topics_on_project_id_and_topic_id:
+ - index_project_topics_on_project_id
+projects:
+ index_projects_api_path_id_desc:
+ - index_on_projects_path
+ index_projects_on_path_and_id:
+ - index_on_projects_path
+protected_environments:
+ index_protected_environments_on_project_id_and_name:
+ - index_protected_environments_on_project_id
+protected_tags:
+ index_protected_tags_on_project_id_and_name:
+ - index_protected_tags_on_project_id
+related_epic_links:
+ index_related_epic_links_on_source_id_and_target_id:
+ - index_related_epic_links_on_source_id
+requirements_management_test_reports:
+ idx_test_reports_on_issue_id_created_at_and_id:
+ - index_requirements_management_test_reports_on_issue_id
+sbom_component_versions:
+ index_sbom_component_versions_on_component_id_and_version:
+ - index_sbom_component_versions_on_component_id
+sbom_occurrences:
+ index_sbom_occurrences_for_input_file_path_search:
+ - index_sbom_occurrences_on_project_id_component_id
+ - index_sbom_occurrences_on_project_id
+ idx_sbom_occurrences_on_project_id_and_source_id:
+ - index_sbom_occurrences_on_project_id
+ index_sbom_occurrences_on_project_id_and_id:
+ - index_sbom_occurrences_on_project_id
+ index_sbom_occurrences_on_project_id_component_id:
+ - index_sbom_occurrences_on_project_id
+ index_sbom_occurrences_on_project_id_and_component_id_and_id:
+ - index_sbom_occurrences_on_project_id_component_id
+ - index_sbom_occurrences_on_project_id
+ index_sbom_occurrences_on_project_id_and_package_manager:
+ - index_sbom_occurrences_on_project_id
+scan_result_policies:
+ index_scan_result_policies_on_position_in_configuration:
+ - index_scan_result_policies_on_policy_configuration_id
+search_namespace_index_assignments:
+ index_search_namespace_index_assignments_uniqueness_index_type:
+ - index_search_namespace_index_assignments_on_namespace_id
+ index_search_namespace_index_assignments_uniqueness_on_index_id:
+ - index_search_namespace_index_assignments_on_namespace_id
+sprints:
+ sequence_is_unique_per_iterations_cadence_id:
+ - index_sprints_iterations_cadence_id
+taggings:
+ taggings_idx:
+ - index_taggings_on_tag_id
+term_agreements:
+ term_agreements_unique_index:
+ - index_term_agreements_on_user_id
+todos:
+ index_todos_on_author_id_and_created_at:
+ - index_todos_on_author_id
+user_callouts:
+ index_user_callouts_on_user_id_and_feature_name:
+ - index_user_callouts_on_user_id
+users:
+ index_users_on_state_and_user_type:
+ - index_users_on_state
+vulnerabilities:
+ index_vulnerabilities_project_id_state_severity_default_branch:
+ - index_vulnerabilities_on_project_id_and_state_and_severity
+vulnerability_external_issue_links:
+ idx_vulnerability_ext_issue_links_on_vulne_id_and_ext_issue:
+ - index_vulnerability_external_issue_links_on_vulnerability_id
+vulnerability_finding_links:
+ finding_link_name_url_idx:
+ - finding_links_on_vulnerability_occurrence_id
+vulnerability_finding_signatures:
+ idx_vuln_signatures_uniqueness_signature_sha:
+ - index_vulnerability_finding_signatures_on_finding_id
+vulnerability_flags:
+ index_vulnerability_flags_on_unique_columns:
+ - index_vulnerability_flags_on_vulnerability_occurrence_id
+web_hook_logs:
+ index_web_hook_logs_on_web_hook_id_and_created_at:
+ - index_web_hook_logs_part_on_web_hook_id
+web_hooks:
+ index_web_hooks_on_project_id_recent_failures:
+ - index_web_hooks_on_project_id
+work_item_hierarchy_restrictions:
+ index_work_item_hierarchy_restrictions_on_parent_and_child:
+ - index_work_item_hierarchy_restrictions_on_parent_type_id
diff --git a/spec/support/helpers/features/admin_users_helpers.rb b/spec/support/helpers/features/admin_users_helpers.rb
index 9a87ccf113a..b4477537a40 100644
--- a/spec/support/helpers/features/admin_users_helpers.rb
+++ b/spec/support/helpers/features/admin_users_helpers.rb
@@ -4,7 +4,7 @@ module Features
module AdminUsersHelpers
def click_user_dropdown_toggle(user_id)
page.within("[data-testid='user-actions-#{user_id}']") do
- find("[data-testid='dropdown-toggle']").click
+ find("[data-testid='user-actions-dropdown-toggle']").click
end
end
diff --git a/spec/support/helpers/features/highlight_content_helper.rb b/spec/support/helpers/features/highlight_content_helper.rb
new file mode 100644
index 00000000000..f55dd213061
--- /dev/null
+++ b/spec/support/helpers/features/highlight_content_helper.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+# This helper allows you to reliably highlight text within a given Element by
+# simulating mouse actions.
+#
+module Features
+ module HighlightContentHelper
+ def highlight_content(node)
+ height = node.native.rect.height
+ width = node.native.rect.width
+ page.driver.browser.action
+ .move_to(node.native, -(width / 2), -(height / 2))
+ .click_and_hold
+ .move_by(width, height)
+ .release
+ .perform
+ end
+ end
+end
diff --git a/spec/support/helpers/features/runners_helpers.rb b/spec/support/helpers/features/runners_helpers.rb
index dbd1edade8c..7c3618ee799 100644
--- a/spec/support/helpers/features/runners_helpers.rb
+++ b/spec/support/helpers/features/runners_helpers.rb
@@ -23,11 +23,13 @@ module Features
def input_filtered_search_keys(search_term)
focus_filtered_search
- page.find(search_bar_selector).find('input').send_keys(search_term)
- # blur input
- find('body').click
+ page.within(search_bar_selector) do
+ send_keys(search_term)
+ send_keys(:enter)
+
+ click_on 'Search'
+ end
- page.click_on 'Search'
wait_for_requests
end
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index 60638eb06cd..abd5d78e836 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -155,7 +155,7 @@ module FilteredSearchHelpers
end
def default_placeholder
- 'Search or filter results...'
+ 'Search or filter results…'
end
def get_filtered_search_placeholder
diff --git a/spec/support/helpers/loose_foreign_keys_helper.rb b/spec/support/helpers/loose_foreign_keys_helper.rb
new file mode 100644
index 00000000000..c83c60d72ed
--- /dev/null
+++ b/spec/support/helpers/loose_foreign_keys_helper.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# Helper to process deletions of associated records created via loose foreign keys
+
+module LooseForeignKeysHelper
+ def process_loose_foreign_key_deletions(record:)
+ LooseForeignKeys::DeletedRecord.using_connection(record.connection) do
+ LooseForeignKeys::ProcessDeletedRecordsService.new(connection: record.connection).execute
+ end
+ end
+end
diff --git a/spec/support/helpers/sign_up_helpers.rb b/spec/support/helpers/sign_up_helpers.rb
new file mode 100644
index 00000000000..6259467232c
--- /dev/null
+++ b/spec/support/helpers/sign_up_helpers.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module SignUpHelpers
+ def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_password', with: new_user.password
+
+ wait_for_all_requests
+
+ expect_username_to_be_validated
+
+ yield if block_given?
+
+ click_button submit_button_text
+ end
+
+ private
+
+ def expect_username_to_be_validated
+ expect(page).to have_selector('[data-testid="new-user-username-field"].gl-field-success-outline')
+ end
+end
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 6d0e97b0a75..c02ffe07159 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -23,13 +23,17 @@ module StubGitlabCalls
end
def stub_ci_pipeline_yaml_file(ci_yaml_content)
- allow_any_instance_of(Gitlab::Ci::ProjectConfig::Repository)
- .to receive(:file_in_repository?)
- .and_return(ci_yaml_content.present?)
+ blob = instance_double(Blob, empty?: ci_yaml_content.blank?, data: ci_yaml_content)
+ allow(blob).to receive(:load_all_data!)
allow_any_instance_of(Repository)
- .to receive(:gitlab_ci_yml_for)
- .and_return(ci_yaml_content)
+ .to receive(:blob_at)
+ .and_call_original
+
+ allow_any_instance_of(Repository)
+ .to receive(:blob_at)
+ .with(String, '.gitlab-ci.yml')
+ .and_return(blob)
# Ensure we don't hit auto-devops when config not found in repository
unless ci_yaml_content
diff --git a/spec/support/helpers/x509_helpers.rb b/spec/support/helpers/x509_helpers.rb
index 1dc8b1d4845..aa5c360d953 100644
--- a/spec/support/helpers/x509_helpers.rb
+++ b/spec/support/helpers/x509_helpers.rb
@@ -173,6 +173,10 @@ module X509Helpers
Time.at(1561027326)
end
+ def signed_tag_time
+ Time.at(1574261780)
+ end
+
def signed_tag_signature
<<~SIGNATURE
-----BEGIN SIGNED MESSAGE-----
@@ -337,6 +341,10 @@ module X509Helpers
'r.meier@siemens.com'
end
+ def tag_email
+ 'dmitriy.zaporozhets@gmail.com'
+ end
+
def certificate_issuer
'CN=Siemens Issuing CA EE Auth 2016,OU=Siemens Trust Center,serialNumber=ZZZZZZA2,O=Siemens,L=Muenchen,ST=Bayern,C=DE'
end
@@ -357,4 +365,177 @@ module X509Helpers
['r.meier@siemens.com']
end
end
+
+ module User2
+ extend self
+
+ def commit
+ '440bf5b2b499a90d9adcbebe3752f8c6f245a1aa'
+ end
+
+ def path
+ 'gitlab-test'
+ end
+
+ def trust_cert
+ <<~TRUSTCERTIFICATE
+ -----BEGIN CERTIFICATE-----
+ MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMw
+ KjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0y
+ MjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3Jl
+ LmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0C
+ AQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV7
+ 7LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS
+ 0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYB
+ BQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjp
+ KFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZI
+ zj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJR
+ nZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsP
+ mygUY7Ii2zbdCdliiow=
+ -----END CERTIFICATE-----
+ TRUSTCERTIFICATE
+ end
+
+ def signed_commit_signature
+ <<~SIGNATURE
+ -----BEGIN SIGNED MESSAGE-----
+ MIIEOQYJKoZIhvcNAQcCoIIEKjCCBCYCAQExDTALBglghkgBZQMEAgEwCwYJKoZI
+ hvcNAQcBoIIC2jCCAtYwggJdoAMCAQICFC5R9EXk+ljFhyCs4urRxmCuvQNAMAoG
+ CCqGSM49BAMDMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2ln
+ c3RvcmUtaW50ZXJtZWRpYXRlMB4XDTIzMDgxOTE3NTgwNVoXDTIzMDgxOTE4MDgw
+ NVowADBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABBGajWb10Rt36IMxtJmjRDa7
+ 5O6YCLhVq9+LNJSAx2M7p6netqW7W+lwym4z1Y1gXLdGHBshrbx/yr6Trhh2TCej
+ ggF8MIIBeDAOBgNVHQ8BAf8EBAMCB4AwEwYDVR0lBAwwCgYIKwYBBQUHAwMwHQYD
+ VR0OBBYEFBttEjGzNppCqA4tlZY4oaxkdmQbMB8GA1UdIwQYMBaAFN/T6c9WJBGW
+ +ajY6ShVosYuGGQ/MCUGA1UdEQEB/wQbMBmBF2dpdGxhYmdwZ3Rlc3RAZ21haWwu
+ Y29tMCwGCisGAQQBg78wAQEEHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0
+ aDAuBgorBgEEAYO/MAEIBCAMHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0
+ aDCBiwYKKwYBBAHWeQIEAgR9BHsAeQB3AN09MGrGxxEyYxkeHJlnNwKiSl643jyt
+ /4eKcoAvKe6OAAABig7ydOsAAAQDAEgwRgIhAMqJnFLAspeqfbK/gA/7zjceyExq
+ QN7qDXWKRLS01rTvAiEAp/uBShQb9tVa3P3fYVAMiXydvr5dqCpNiuudZiuYq0Yw
+ CgYIKoZIzj0EAwMDZwAwZAIwWKXYyP5FvbfhvfLkV0tN887ax1eg7TmF1Tzkugag
+ cLJ5MzK3xYNcUO/3AxO3H/b8AjBD9DF6R4kFO4cXoqnpsk2FTUeSPiUJ+0x2PDFG
+ gQZvoMWz7CnwjXml8XDEKNpYoPkxggElMIIBIQIBATBPMDcxFTATBgNVBAoTDHNp
+ Z3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlAhQuUfRF
+ 5PpYxYcgrOLq0cZgrr0DQDALBglghkgBZQMEAgGgaTAYBgkqhkiG9w0BCQMxCwYJ
+ KoZIhvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0yMzA4MTkxNzU4MDVaMC8GCSqGSIb3
+ DQEJBDEiBCB4B7DeGk22WmBseJzjjRJcQsyYxu0PNDAFXq55uJ7MSzAKBggqhkjO
+ PQQDAgRHMEUCIQCNegIrK6m1xyGuu4lw06l22VQsmO74/k3H236jCFF+bAIgAX1N
+ rxBFWnjWboZmAV1NuduTD/YToShK6iRmJ/NpILA=
+ -----END SIGNED MESSAGE-----
+ SIGNATURE
+ end
+
+ def signed_commit_base_data
+ <<~SIGNEDDATA
+ tree 7d5ee08cadaa161d731c56a9265feef130143b07
+ parent 4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6
+ author Mona Lisa <gitlabgpgtest@gmail.com> 1692467872 +0000
+ committer Mona Lisa <gitlabgpgtest@gmail.com> 1692467872 +0000
+
+ Sigstore Signed Commit
+ SIGNEDDATA
+ end
+
+ def signed_commit_time
+ Time.at(1692467872)
+ end
+
+ def signed_tag_time
+ Time.at(1692467872)
+ end
+
+ def signed_tag_signature
+ <<~SIGNATURE
+ -----BEGIN SIGNED MESSAGE-----
+ MIIEOgYJKoZIhvcNAQcCoIIEKzCCBCcCAQExDTALBglghkgBZQMEAgEwCwYJKoZI
+ hvcNAQcBoIIC2zCCAtcwggJdoAMCAQICFB5qFHBSNfcJDZecnHK5/tleuX3yMAoG
+ CCqGSM49BAMDMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2ln
+ c3RvcmUtaW50ZXJtZWRpYXRlMB4XDTIzMDgxOTE3NTgzM1oXDTIzMDgxOTE4MDgz
+ M1owADBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABKJtbdL88PM8lE21CuyDYlZm
+ 0xZYCThoXZSGmULrgE5+hfroCIbLswOi5i6TyB8j4CCe0Jxeu94Jn+76SXF+lbej
+ ggF8MIIBeDAOBgNVHQ8BAf8EBAMCB4AwEwYDVR0lBAwwCgYIKwYBBQUHAwMwHQYD
+ VR0OBBYEFBkU3IBENVJYeyK9b56vbGGrjPwYMB8GA1UdIwQYMBaAFN/T6c9WJBGW
+ +ajY6ShVosYuGGQ/MCUGA1UdEQEB/wQbMBmBF2dpdGxhYmdwZ3Rlc3RAZ21haWwu
+ Y29tMCwGCisGAQQBg78wAQEEHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0
+ aDAuBgorBgEEAYO/MAEIBCAMHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0
+ aDCBiwYKKwYBBAHWeQIEAgR9BHsAeQB3AN09MGrGxxEyYxkeHJlnNwKiSl643jyt
+ /4eKcoAvKe6OAAABig7y4tYAAAQDAEgwRgIhAMUjWh8ayhjWDI3faFah3Du/7IuY
+ xzbUXaPQnCyUbvwwAiEAwHgWv8fmKMudbVu37Nbq/c1cdnQqDK9Y2UGtlmzaLrYw
+ CgYIKoZIzj0EAwMDaAAwZQIwZTKZlS4HNJH48km3pxG95JTbldSBhvFlrpIEVRUd
+ TEK6uGQJmpIm1WYQjbJbiVS8AjEA+2NoAdMuRpa2k13HUfWQEMtzQcxZMMNB7Yux
+ 9ZIADOlFp701ujtFSZAXgqGL3FYKMYIBJTCCASECAQEwTzA3MRUwEwYDVQQKEwxz
+ aWdzdG9yZS5kZXYxHjAcBgNVBAMTFXNpZ3N0b3JlLWludGVybWVkaWF0ZQIUHmoU
+ cFI19wkNl5yccrn+2V65ffIwCwYJYIZIAWUDBAIBoGkwGAYJKoZIhvcNAQkDMQsG
+ CSqGSIb3DQEHATAcBgkqhkiG9w0BCQUxDxcNMjMwODE5MTc1ODMzWjAvBgkqhkiG
+ 9w0BCQQxIgQgwpYCAlbS6KnfgxQD3SATWUbdUssLaBWkHwTkmtCye4wwCgYIKoZI
+ zj0EAwIERzBFAiB8y5bGhWJvWCHQyma7oF038ZPLzXmsDJyJffJHoAb6XAIhAOW3
+ gxuYuJAKP86B1fY0vYCZHF8vU6SZAcE6teSDowwq
+ -----END SIGNED MESSAGE-----
+ SIGNATURE
+ end
+
+ def signed_tag_base_data
+ <<~SIGNEDDATA
+ object 440bf5b2b499a90d9adcbebe3752f8c6f245a1aa
+ type commit
+ tag v1.1.2
+ tagger Mona Lisa <gitlabgpgtest@gmail.com> 1692467901 +0000
+
+ Sigstore Signed Tag
+ SIGNEDDATA
+ end
+
+ def certificate_serial
+ 264441215000592123389532407734419590292801651520
+ end
+
+ def tag_certificate_serial
+ 173635382582380059990335547381753891120957980146
+ end
+
+ def certificate_subject_key_identifier
+ '1B:6D:12:31:B3:36:9A:42:A8:0E:2D:95:96:38:A1:AC:64:76:64:1B'
+ end
+
+ def tag_certificate_subject_key_identifier
+ '19:14:DC:80:44:35:52:58:7B:22:BD:6F:9E:AF:6C:61:AB:8C:FC:18'
+ end
+
+ def issuer_subject_key_identifier
+ 'DF:D3:E9:CF:56:24:11:96:F9:A8:D8:E9:28:55:A2:C6:2E:18:64:3F'
+ end
+
+ def tag_issuer_subject_key_identifier
+ 'DF:D3:E9:CF:56:24:11:96:F9:A8:D8:E9:28:55:A2:C6:2E:18:64:3F'
+ end
+
+ def certificate_email
+ 'gitlabgpgtest@gmail.com'
+ end
+
+ def tag_email
+ 'gitlabgpgtest@gmail.com'
+ end
+
+ def certificate_issuer
+ 'CN=sigstore-intermediate,O=sigstore.dev'
+ end
+
+ def tag_certificate_issuer
+ 'CN=sigstore-intermediate,O=sigstore.dev'
+ end
+
+ def certificate_subject
+ ''
+ end
+
+ def names
+ ['Mona Lisa']
+ end
+
+ def emails
+ ['gitlabgpgtest@gmail.com']
+ end
+ end
end
diff --git a/spec/support/matchers/pagination_matcher.rb b/spec/support/matchers/pagination_matcher.rb
index a3e9c3b8474..beaba84d78c 100644
--- a/spec/support/matchers/pagination_matcher.rb
+++ b/spec/support/matchers/pagination_matcher.rb
@@ -11,3 +11,13 @@ RSpec::Matchers.define :include_limited_pagination_headers do |expected|
expect(actual.headers).to include('X-Per-Page', 'X-Page', 'X-Next-Page', 'X-Prev-Page', 'Link')
end
end
+
+RSpec::Matchers.define :include_keyset_url_params do |expected|
+ include KeysetPaginationHelpers
+
+ match do |actual|
+ params_for_next_page = pagination_params_from_next_url(actual)
+
+ expect(params_for_next_page).to include('cursor')
+ end
+end
diff --git a/spec/support/migration.rb b/spec/support/migration.rb
index b1e75d9c9e2..fc8a4bb12fb 100644
--- a/spec/support/migration.rb
+++ b/spec/support/migration.rb
@@ -20,21 +20,11 @@ RSpec.configure do |config|
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
- config.prepend_before(:all, :migration) do
- TestProf::BeforeAll.adapter = ::TestProfBeforeAllAdapter.no_transaction_adapter
- end
-
- config.append_after(:all, :migration) do
- TestProf::BeforeAll.adapter = ::TestProfBeforeAllAdapter.default_adapter
- end
-
config.append_after(:context, :migration) do
recreate_databases_and_seed_if_needed || ensure_schema_and_empty_tables
end
config.around(:each, :migration) do |example|
- self.class.use_transactional_tests = false
-
migration_schema = example.metadata[:migration]
migration_schema = :gitlab_main if migration_schema == true
base_model = Gitlab::Database.schemas_to_base_models.fetch(migration_schema).first
@@ -52,10 +42,6 @@ RSpec.configure do |config|
else
example.run
end
-
- delete_from_all_tables!(except: deletion_except_tables)
-
- self.class.use_transactional_tests = true
end
# Each example may call `migrate!`, so we must ensure we are migrated down every time
diff --git a/spec/support/multiple_databases.rb b/spec/support/multiple_databases.rb
index 616cf00269c..1c556858018 100644
--- a/spec/support/multiple_databases.rb
+++ b/spec/support/multiple_databases.rb
@@ -7,8 +7,6 @@ RSpec.configure do |config|
# at startup, but that generates its own
# `Gitlab::Database::Reflection` so the result is not memoized by
# callers of `ApplicationRecord.database.version`, such as
- # `Gitlab::Database::AsWithMaterialized.materialized_supported?`.
- # TODO This can be removed once https://gitlab.com/gitlab-org/gitlab/-/issues/325639 is completed.
[ApplicationRecord, ::Ci::ApplicationRecord].each { |record| record.database.version }
config.around(:each, :reestablished_active_record_base) do |example|
diff --git a/spec/support/protected_branch_helpers.rb b/spec/support/protected_branch_helpers.rb
index 576275e9d1d..db5118d6f88 100644
--- a/spec/support/protected_branch_helpers.rb
+++ b/spec/support/protected_branch_helpers.rb
@@ -34,7 +34,7 @@ module ProtectedBranchHelpers
select_input.click
wait_for_requests
- within('.dropdown.show .dropdown-menu', &block)
+ within('.dropdown .dropdown-menu.show', &block)
# Enhanced select is used in EE, therefore an extra click is needed.
select_input.click if select_input['aria-expanded'] == 'true'
diff --git a/spec/support/rspec.rb b/spec/support/rspec.rb
index 4479e679d67..7f3aa55fb1d 100644
--- a/spec/support/rspec.rb
+++ b/spec/support/rspec.rb
@@ -20,10 +20,18 @@ RSpec.configure do |config|
config.example_status_persistence_file_path = ENV.fetch('RSPEC_LAST_RUN_RESULTS_FILE', './spec/examples.txt')
# Makes diffs show entire non-truncated values.
- config.before(:each, :unlimited_max_formatted_output_length) do
+ config.around(:each, :unlimited_max_formatted_output_length) do |example|
+ old_max_formatted_output_length = RSpec::Support::ObjectFormatter.default_instance.max_formatted_output_length
+
config.expect_with :rspec do |c|
c.max_formatted_output_length = nil
end
+
+ example.run
+
+ config.expect_with :rspec do |c|
+ c.max_formatted_output_length = old_max_formatted_output_length
+ end
end
unless ENV['CI']
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index f52f843e56a..298f4006c3b 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -19,7 +19,6 @@
- './ee/spec/controllers/admin/elasticsearch_controller_spec.rb'
- './ee/spec/controllers/admin/emails_controller_spec.rb'
- './ee/spec/controllers/admin/geo/nodes_controller_spec.rb'
-- './ee/spec/controllers/admin/geo/projects_controller_spec.rb'
- './ee/spec/controllers/admin/geo/settings_controller_spec.rb'
- './ee/spec/controllers/admin/groups_controller_spec.rb'
- './ee/spec/controllers/admin/impersonations_controller_spec.rb'
@@ -166,7 +165,6 @@
- './ee/spec/controllers/security/vulnerabilities_controller_spec.rb'
- './ee/spec/controllers/sitemap_controller_spec.rb'
- './ee/spec/controllers/subscriptions_controller_spec.rb'
-- './ee/spec/controllers/trial_registrations_controller_spec.rb'
- './ee/spec/controllers/users_controller_spec.rb'
- './ee/spec/db/production/license_spec.rb'
- './ee/spec/elastic_integration/global_search_spec.rb'
@@ -461,7 +459,6 @@
- './ee/spec/features/read_only_spec.rb'
- './ee/spec/features/registrations/combined_registration_spec.rb'
- './ee/spec/features/registrations/one_trust_spec.rb'
-- './ee/spec/features/registrations/welcome_spec.rb'
- './ee/spec/features/search/elastic/global_search_spec.rb'
- './ee/spec/features/search/elastic/group_search_spec.rb'
- './ee/spec/features/search/elastic/project_search_spec.rb'
@@ -526,7 +523,6 @@
- './ee/spec/finders/epics_finder_spec.rb'
- './ee/spec/finders/geo/ci_secure_file_registry_finder_spec.rb'
- './ee/spec/finders/geo/container_repository_registry_finder_spec.rb'
-- './ee/spec/finders/geo/design_registry_finder_spec.rb'
- './ee/spec/finders/geo/group_wiki_repository_registry_finder_spec.rb'
- './ee/spec/finders/geo/lfs_object_registry_finder_spec.rb'
- './ee/spec/finders/geo/merge_request_diff_registry_finder_spec.rb'
@@ -594,7 +590,6 @@
- './ee/spec/graphql/api/vulnerabilities_spec.rb'
- './ee/spec/graphql/ee/mutations/boards/issues/issue_move_list_spec.rb'
- './ee/spec/graphql/ee/mutations/boards/lists/create_spec.rb'
-- './ee/spec/graphql/ee/mutations/ci/project_ci_cd_settings_update_spec.rb'
- './ee/spec/graphql/ee/mutations/ci/runner/update_spec.rb'
- './ee/spec/graphql/ee/mutations/concerns/mutations/resolves_issuable_spec.rb'
- './ee/spec/graphql/ee/resolvers/board_list_issues_resolver_spec.rb'
@@ -999,7 +994,6 @@
- './ee/spec/helpers/security_helper_spec.rb'
- './ee/spec/helpers/subscriptions_helper_spec.rb'
- './ee/spec/helpers/timeboxes_helper_spec.rb'
-- './ee/spec/helpers/trial_registrations/reassurances_helper_spec.rb'
- './ee/spec/helpers/trial_status_widget_helper_spec.rb'
- './ee/spec/helpers/users_helper_spec.rb'
- './ee/spec/helpers/vulnerabilities_helper_spec.rb'
@@ -1147,7 +1141,6 @@
- './ee/spec/lib/ee/gitlab/group_search_results_spec.rb'
- './ee/spec/lib/ee/gitlab/hook_data/group_member_builder_spec.rb'
- './ee/spec/lib/ee/gitlab/hook_data/issue_builder_spec.rb'
-- './ee/spec/lib/ee/gitlab/hook_data/user_builder_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/after_export_strategies/custom_template_export_import_strategy_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/group/tree_restorer_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/group/tree_saver_spec.rb'
@@ -1395,7 +1388,6 @@
- './ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb'
- './ee/spec/lib/gitlab/geo/log_cursor/event_logs_spec.rb'
- './ee/spec/lib/gitlab/geo/log_cursor/events/cache_invalidation_event_spec.rb'
-- './ee/spec/lib/gitlab/geo/log_cursor/events/design_repository_updated_event_spec.rb'
- './ee/spec/lib/gitlab/geo/log_cursor/events/event_spec.rb'
- './ee/spec/lib/gitlab/geo/log_cursor/events/hashed_storage_attachments_event_spec.rb'
- './ee/spec/lib/gitlab/geo/log_cursor/events/hashed_storage_migrated_event_spec.rb'
@@ -1758,7 +1750,6 @@
- './ee/spec/models/geo/ci_secure_file_registry_spec.rb'
- './ee/spec/models/geo/container_repository_registry_spec.rb'
- './ee/spec/models/geo/deleted_project_spec.rb'
-- './ee/spec/models/geo/design_registry_spec.rb'
- './ee/spec/models/geo/event_log_spec.rb'
- './ee/spec/models/geo/event_log_state_spec.rb'
- './ee/spec/models/geo/every_geo_event_spec.rb'
@@ -2750,7 +2741,6 @@
- './ee/spec/services/geo/wiki_sync_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/activate_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/check_future_renewal_service_spec.rb'
-- './ee/spec/services/gitlab_subscriptions/create_hand_raise_lead_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/create_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/create_trial_or_lead_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/fetch_purchase_eligible_namespaces_service_spec.rb'
@@ -2828,7 +2818,6 @@
- './ee/spec/services/milestones/destroy_service_spec.rb'
- './ee/spec/services/milestones/promote_service_spec.rb'
- './ee/spec/services/milestones/update_service_spec.rb'
-- './ee/spec/services/namespaces/in_product_marketing_emails_service_spec.rb'
- './ee/spec/services/namespaces/storage/email_notification_service_spec.rb'
- './ee/spec/services/path_locks/lock_service_spec.rb'
- './ee/spec/services/path_locks/unlock_service_spec.rb'
@@ -3057,7 +3046,6 @@
- './ee/spec/views/profiles/preferences/show.html.haml_spec.rb'
- './ee/spec/views/projects/edit.html.haml_spec.rb'
- './ee/spec/views/projects/issues/show.html.haml_spec.rb'
-- './ee/spec/views/projects/_merge_request_status_checks_settings.html.haml_spec.rb'
- './ee/spec/views/projects/on_demand_scans/index.html.haml_spec.rb'
- './ee/spec/views/projects/security/corpus_management/show.html.haml_spec.rb'
- './ee/spec/views/projects/security/dast_profiles/show.html.haml_spec.rb'
@@ -3068,6 +3056,7 @@
- './ee/spec/views/projects/security/discover/show.html.haml_spec.rb'
- './ee/spec/views/projects/security/policies/index.html.haml_spec.rb'
- './ee/spec/views/projects/security/sast_configuration/show.html.haml_spec.rb'
+- './ee/spec/views/projects/settings/merge_requests/_merge_request_status_checks_settings.html.haml_spec.rb'
- './ee/spec/views/projects/settings/subscriptions/_index.html.haml_spec.rb'
- './ee/spec/views/registrations/groups/new.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plan_actions.html.haml_spec.rb'
@@ -3133,7 +3122,6 @@
- './ee/spec/workers/ee/arkose/blocked_users_report_worker_spec.rb'
- './ee/spec/workers/ee/ci/build_finished_worker_spec.rb'
- './ee/spec/workers/ee/issuable_export_csv_worker_spec.rb'
-- './ee/spec/workers/ee/namespaces/in_product_marketing_emails_worker_spec.rb'
- './ee/spec/workers/ee/namespaces/root_statistics_worker_spec.rb'
- './ee/spec/workers/ee/projects/inactive_projects_deletion_cron_worker_spec.rb'
- './ee/spec/workers/ee/repository_check/batch_worker_spec.rb'
@@ -8579,7 +8567,6 @@
- './spec/requests/groups/crm/contacts_controller_spec.rb'
- './spec/requests/groups/crm/organizations_controller_spec.rb'
- './spec/requests/groups/deploy_tokens_controller_spec.rb'
-- './spec/requests/groups/email_campaigns_controller_spec.rb'
- './spec/requests/groups/harbor/artifacts_controller_spec.rb'
- './spec/requests/groups/harbor/repositories_controller_spec.rb'
- './spec/requests/groups/harbor/tags_controller_spec.rb'
@@ -9323,7 +9310,6 @@
- './spec/services/milestones/transfer_service_spec.rb'
- './spec/services/milestones/update_service_spec.rb'
- './spec/services/namespace_settings/update_service_spec.rb'
-- './spec/services/namespaces/in_product_marketing_emails_service_spec.rb'
- './spec/services/namespaces/package_settings/update_service_spec.rb'
- './spec/services/namespaces/statistics_refresher_service_spec.rb'
- './spec/services/notes/build_service_spec.rb'
@@ -10126,7 +10112,6 @@
- './spec/workers/metrics/dashboard/schedule_annotations_prune_worker_spec.rb'
- './spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb'
- './spec/workers/migrate_external_diffs_worker_spec.rb'
-- './spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb'
- './spec/workers/namespaces/process_sync_events_worker_spec.rb'
- './spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb'
- './spec/workers/namespaces/root_statistics_worker_spec.rb'
diff --git a/spec/support/shared_contexts/dependency_proxy_shared_context.rb b/spec/support/shared_contexts/dependency_proxy_shared_context.rb
new file mode 100644
index 00000000000..02625722a8c
--- /dev/null
+++ b/spec/support/shared_contexts/dependency_proxy_shared_context.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with a server running the dependency proxy' do
+ def run_server(handler)
+ default_server = Capybara.server
+
+ Capybara.server = Capybara.servers[:puma]
+ server = Capybara::Server.new(handler)
+ server.boot
+ server
+ ensure
+ Capybara.server = default_server
+ end
+end
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index 8b4c1c1e243..3098fc3dc5e 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -191,7 +191,7 @@ RSpec.shared_examples 'note handler shared examples' do |forwardable|
context 'when the service desk' do
let(:project) { create(:project, :public, service_desk_enabled: true) }
- let(:support_bot) { User.support_bot }
+ let(:support_bot) { Users::Internal.support_bot }
let(:noteable) { create(:issue, project: project, author: support_bot, title: 'service desk issue') }
let!(:note) { create(:note, project: project, noteable: noteable) }
let(:email_raw) { with_quick_actions }
diff --git a/spec/support/shared_contexts/features/integrations/group_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/group_integrations_shared_context.rb
index 5996fcc6593..befc231f04f 100644
--- a/spec/support/shared_contexts/features/integrations/group_integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/group_integrations_shared_context.rb
@@ -22,7 +22,7 @@ RSpec.shared_context 'group integration activation' do
visit_group_integrations
within('#content-body') do
- click_link(name)
+ click_link(name, match: :first)
end
end
end
diff --git a/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
index 3b02db994a3..c740917cec4 100644
--- a/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
@@ -18,7 +18,7 @@ RSpec.shared_context 'instance integration activation' do
visit_instance_integrations
within('#content-body') do
- click_link(name)
+ click_link(name, match: :first)
end
end
end
diff --git a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
index a9b9a5246e6..c3da9435e05 100644
--- a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb
@@ -4,7 +4,7 @@ RSpec.shared_context 'project integration activation' do
include_context 'with integration activation'
let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { create(:user, :no_super_sidebar) }
before do
project.add_maintainer(user)
@@ -19,7 +19,7 @@ RSpec.shared_context 'project integration activation' do
visit_project_integrations
within('#content-body') do
- click_link(name)
+ click_link(name, match: :first)
end
end
diff --git a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
index ef1c01f72f9..b89b1aabd87 100644
--- a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
@@ -8,5 +8,5 @@ RSpec.shared_context 'UsersFinder#execute filter by project context' do
let_it_be(:external_user) { create(:user, :external) }
let_it_be(:unconfirmed_user) { create(:user, confirmed_at: nil) }
let_it_be(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:internal_user) { User.alert_bot.tap { |u| u.confirm } }
+ let_it_be(:internal_user) { Users::Internal.alert_bot.tap { |u| u.confirm } }
end
diff --git a/spec/support/shared_contexts/lib/gitlab/database/load_balancing/wal_tracking_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/database/load_balancing/wal_tracking_shared_context.rb
index cbbd3754108..46d6a1fbac0 100644
--- a/spec/support/shared_contexts/lib/gitlab/database/load_balancing/wal_tracking_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/database/load_balancing/wal_tracking_shared_context.rb
@@ -62,7 +62,13 @@ RSpec.shared_context 'when tracking WAL location reference' do
def stub_replica_available!(available)
::Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
- allow(lb).to receive(:select_up_to_date_host).with(current_location).and_return(available)
+ result = if available
+ ::Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP
+ else
+ ::Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP
+ end
+
+ allow(lb).to receive(:select_up_to_date_host).with(current_location).and_return(result)
end
end
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 112b90029b8..a09319b4980 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -82,7 +82,6 @@ RSpec.shared_context 'project navbar structure' do
{
nav_item: _('Monitor'),
nav_sub_items: [
- _('Tracing'),
_('Error Tracking'),
_('Alerts'),
_('Incidents')
@@ -165,15 +164,6 @@ RSpec.shared_context 'group navbar structure' do
}
end
- let(:ci_cd_nav_item) do
- {
- nav_item: _('CI/CD'),
- nav_sub_items: [
- s_('Runners|Runners')
- ]
- }
- end
-
let(:issues_nav_items) do
[
_('List'),
@@ -207,6 +197,12 @@ RSpec.shared_context 'group navbar structure' do
},
(security_and_compliance_nav_item if Gitlab.ee?),
{
+ nav_item: _('CI/CD'),
+ nav_sub_items: [
+ s_('Runners|Runners')
+ ]
+ },
+ {
nav_item: _('Kubernetes'),
nav_sub_items: []
},
@@ -231,6 +227,10 @@ RSpec.shared_context 'dashboard navbar structure' do
nav_sub_items: []
},
{
+ nav_item: _('Organizations'),
+ nav_sub_items: []
+ },
+ {
nav_item: _("Issues"),
nav_sub_items: []
},
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 07a4cbdb534..70b48322efd 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -58,6 +58,7 @@ RSpec.shared_context 'GroupPolicy context' do
destroy_upload
admin_achievement
award_achievement
+ read_group_runners
]
end
@@ -73,7 +74,6 @@ RSpec.shared_context 'GroupPolicy context' do
create_subgroup
read_statistics
update_default_branch_protection
- read_group_runners
register_group_runners
read_billing
edit_billing
diff --git a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
index 36103b94542..cf5ac849f63 100644
--- a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb
@@ -5,7 +5,7 @@ RSpec.shared_context 'npm api setup' do
include HttpBasicAuthHelpers
let_it_be(:user, reload: true) { create(:user) }
- let_it_be(:group) { create(:group, name: 'test-group') }
+ let_it_be(:group, reload: true) { create(:group, name: 'test-group') }
let_it_be(:namespace) { group }
let_it_be(:project, reload: true) { create(:project, :public, namespace: namespace) }
let_it_be(:package, reload: true) { create(:npm_package, project: project, name: "@#{group.path}/scoped_package", version: '1.2.3') }
diff --git a/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb b/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb
index cb7001a9faf..21bba14f3e6 100644
--- a/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb
+++ b/spec/support/shared_examples/channels/noteable/notes_channel_shared_examples.rb
@@ -15,16 +15,4 @@ RSpec.shared_examples 'handle subscription based on user access' do
expect(subscription).to be_rejected
end
-
- context 'when action_cable_notes is disabled' do
- before do
- stub_feature_flags(action_cable_notes: false)
- end
-
- it 'rejects the subscription' do
- subscribe(subscribe_params)
-
- expect(subscription).to be_rejected
- end
- end
end
diff --git a/spec/support/shared_examples/ci/create_pipeline_service_environment_shared_examples.rb b/spec/support/shared_examples/ci/create_pipeline_service_environment_shared_examples.rb
new file mode 100644
index 00000000000..77a352a8326
--- /dev/null
+++ b/spec/support/shared_examples/ci/create_pipeline_service_environment_shared_examples.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'creating a pipeline with environment keyword' do
+ context 'with environment' do
+ let(:config) do
+ YAML.dump(
+ deploy: {
+ environment: { name: "review/$CI_COMMIT_REF_NAME" },
+ **base_config
+ })
+ end
+
+ it 'creates the environment', :sidekiq_inline do
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(Environment.find_by(name: "review/master")).to be_present
+ expect(result.all_jobs.first.deployment).to be_persisted
+ expect(result.all_jobs.first.deployment.deployable).to be_a(expected_deployable_class)
+ end
+
+ it 'sets tags when build job' do
+ skip unless expected_deployable_class == Ci::Build
+
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(result.all_jobs.first.tag_list).to match_array(expected_tag_names)
+ end
+ end
+
+ context 'with environment with auto_stop_in' do
+ let(:config) do
+ YAML.dump(
+ deploy: {
+ environment: { name: "review/$CI_COMMIT_REF_NAME", auto_stop_in: '1 day' },
+ **base_config
+ })
+ end
+
+ it 'creates the environment with auto stop in' do
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(result.all_jobs.first.options[:environment][:auto_stop_in]).to eq('1 day')
+ end
+ end
+
+ context 'with environment name including persisted variables' do
+ let(:config) do
+ YAML.dump(
+ deploy: {
+ environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_JOB_ID" },
+ **base_config
+ }
+ )
+ end
+
+ it 'skips persisted variables in environment name' do
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(Environment.find_by(name: "review/id1/id2")).to be_present
+ end
+ end
+
+ context 'when environment with Kubernetes configuration' do
+ let(:kubernetes_namespace) { 'custom-namespace' }
+ let(:config) do
+ YAML.dump(
+ deploy: {
+ environment: {
+ name: "environment-name",
+ kubernetes: { namespace: kubernetes_namespace }
+ },
+ **base_config
+ }
+ )
+ end
+
+ it 'stores the requested namespace' do
+ result = execute_service.payload
+ job = result.all_jobs.first
+
+ expect(result).to be_persisted
+ expect(job.options.dig(:environment, :kubernetes, :namespace)).to eq(kubernetes_namespace)
+ end
+ end
+
+ context 'when environment with invalid name' do
+ let(:config) do
+ YAML.dump(deploy: { environment: { name: 'name,with,commas' }, **base_config })
+ end
+
+ it 'does not create an environment' do
+ expect do
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ end.not_to change { Environment.count }
+ end
+ end
+
+ context 'when environment with duplicate names' do
+ let(:config) do
+ YAML.dump({
+ deploy: { environment: { name: 'production' }, **base_config },
+ deploy_2: { environment: { name: 'production' }, **base_config }
+ })
+ end
+
+ it 'creates a pipeline with the environment', :sidekiq_inline do
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(Environment.find_by(name: 'production')).to be_present
+ expect(result.all_jobs.first.deployment).to be_persisted
+ expect(result.all_jobs.first.deployment.deployable).to be_a(expected_deployable_class)
+ end
+ end
+
+ context 'when pipeline has a job with environment' do
+ let(:pipeline) { execute_service.payload }
+
+ context 'when environment name is valid' do
+ let(:config) do
+ YAML.dump({
+ review_app: {
+ environment: {
+ name: 'review/${CI_COMMIT_REF_NAME}',
+ url: 'http://${CI_COMMIT_REF_SLUG}-staging.example.com'
+ },
+ **base_config
+ }
+ })
+ end
+
+ it 'has a job with environment', :sidekiq_inline do
+ expect(pipeline.all_jobs.count).to eq(1)
+ expect(pipeline.all_jobs.first.persisted_environment.name).to eq('review/master')
+ expect(pipeline.all_jobs.first.deployment.status).to eq(expected_deployment_status)
+ expect(pipeline.all_jobs.first.status).to eq(expected_job_status)
+ end
+ end
+
+ context 'when environment name is invalid' do
+ let(:config) do
+ YAML.dump({
+ 'job:deploy-to-test-site': {
+ environment: {
+ name: '${CI_JOB_NAME}',
+ url: 'https://$APP_URL'
+ },
+ **base_config
+ }
+ })
+ end
+
+ it 'has a job without environment' do
+ expect(pipeline.all_jobs.count).to eq(1)
+ expect(pipeline.all_jobs.first.persisted_environment).to be_nil
+ expect(pipeline.all_jobs.first.deployment).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci/deployable_shared_examples.rb b/spec/support/shared_examples/ci/deployable_shared_examples.rb
index b51a8fa20e2..4f43d38e604 100644
--- a/spec/support/shared_examples/ci/deployable_shared_examples.rb
+++ b/spec/support/shared_examples/ci/deployable_shared_examples.rb
@@ -96,7 +96,7 @@ RSpec.shared_examples 'a deployable job' do
ActiveRecord::QueryRecorder.new { subject }
end
- index_for_build = recorded.log.index { |l| l.include?("UPDATE #{Ci::Build.quoted_table_name}") }
+ index_for_build = recorded.log.index { |l| l.include?("UPDATE #{described_class.quoted_table_name}") }
index_for_deployment = recorded.log.index { |l| l.include?("UPDATE \"deployments\"") }
expect(index_for_build).to be < index_for_deployment
@@ -259,7 +259,7 @@ RSpec.shared_examples 'a deployable job' do
describe '#environment_tier_from_options' do
subject { job.environment_tier_from_options }
- let(:job) { Ci::Build.new(options: options) }
+ let(:job) { described_class.new(options: options) }
let(:options) { { environment: { deployment_tier: 'production' } } }
it { is_expected.to eq('production') }
@@ -276,7 +276,7 @@ RSpec.shared_examples 'a deployable job' do
let(:options) { { environment: { deployment_tier: 'production' } } }
let!(:environment) { create(:environment, name: 'production', tier: 'development', project: project) }
- let(:job) { Ci::Build.new(options: options, environment: 'production', project: project) }
+ let(:job) { described_class.new(options: options, environment: 'production', project: project) }
it { is_expected.to eq('production') }
@@ -295,6 +295,52 @@ RSpec.shared_examples 'a deployable job' do
end
end
+ describe '#environment_url' do
+ subject { job.environment_url }
+
+ let!(:job) { create(factory_type, :with_deployment, :deploy_to_production, pipeline: pipeline) }
+
+ it { is_expected.to eq('http://prd.example.com/$CI_JOB_NAME') }
+
+ context 'when options does not include url' do
+ before do
+ job.update!(options: { environment: { url: nil } })
+ job.persisted_environment.update!(external_url: 'http://prd.example.com/$CI_JOB_NAME')
+ end
+
+ it 'fetches from the persisted environment' do
+ expect_any_instance_of(::Environment) do |environment|
+ expect(environment).to receive(:external_url).once
+ end
+
+ is_expected.to eq('http://prd.example.com/$CI_JOB_NAME')
+ end
+
+ context 'when persisted environment is absent' do
+ before do
+ job.clear_memoization(:persisted_environment)
+ job.persisted_environment = nil
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe '#environment_slug' do
+ subject { job.environment_slug }
+
+ let!(:job) { create(factory_type, :with_deployment, :start_review_app, pipeline: pipeline) }
+
+ it { is_expected.to eq('review-master-8dyme2') }
+
+ context 'when persisted environment is absent' do
+ let!(:job) { create(factory_type, :start_review_app, pipeline: pipeline) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe 'environment' do
describe '#has_environment_keyword?' do
subject { job.has_environment_keyword? }
@@ -536,10 +582,6 @@ RSpec.shared_examples 'a deployable job' do
end
describe '#deployment_status' do
- before do
- allow_any_instance_of(Ci::Build).to receive(:create_deployment) # rubocop:disable RSpec/AnyInstanceOf
- end
-
context 'when job is a last deployment' do
let(:job) { create(factory_type, :success, environment: 'production', pipeline: pipeline) }
let(:environment) { create(:environment, name: 'production', project: job.project) }
diff --git a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
index e61c884cd2b..14d0ac81250 100644
--- a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
+++ b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
@@ -121,8 +121,7 @@ RSpec.shared_examples 'every metric definition' do
let(:ignored_classes) do
[
Gitlab::Usage::Metrics::Instrumentations::IssuesWithAlertManagementAlertsMetric,
- Gitlab::Usage::Metrics::Instrumentations::IssuesWithPrometheusAlertEvents,
- Gitlab::Usage::Metrics::Instrumentations::IssuesWithSelfManagedPrometheusAlertEvents
+ Gitlab::Usage::Metrics::Instrumentations::IssuesWithPrometheusAlertEvents
].freeze
end
diff --git a/spec/support/shared_examples/controllers/concerns/web_hooks/integrations_hook_log_actions_shared_examples.rb b/spec/support/shared_examples/controllers/concerns/web_hooks/integrations_hook_log_actions_shared_examples.rb
index 56a5dcb10b2..cb4e68122d9 100644
--- a/spec/support/shared_examples/controllers/concerns/web_hooks/integrations_hook_log_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/concerns/web_hooks/integrations_hook_log_actions_shared_examples.rb
@@ -43,5 +43,14 @@ RSpec.shared_examples WebHooks::HookLogActions do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it 'redirects back with a warning if the hook log url is outdated' do
+ web_hook_log.update!(url_hash: 'some_other_value')
+
+ post retry_path, headers: { 'REFERER' => show_path }
+
+ expect(response).to redirect_to(show_path)
+ expect(flash[:warning]).to eq(_('The hook URL has changed, and this log entry cannot be retried'))
+ end
end
end
diff --git a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
index a4eb6a839c0..bd9c2582d2f 100644
--- a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
@@ -18,23 +18,6 @@ RSpec.shared_examples 'issuable notes filter' do
expect(UserPreference.count).to eq(1)
end
- it 'expires notes e-tag cache for issuable if filter changed' do
- notes_filter = UserPreference::NOTES_FILTERS[:only_comments]
-
- expect_any_instance_of(issuable.class).to receive(:expire_note_etag_cache)
-
- get :discussions, params: params.merge(notes_filter: notes_filter)
- end
-
- it 'does not expires notes e-tag cache for issuable if filter did not change' do
- notes_filter = UserPreference::NOTES_FILTERS[:only_comments]
- user.set_notes_filter(notes_filter, issuable)
-
- expect_any_instance_of(issuable.class).not_to receive(:expire_note_etag_cache)
-
- get :discussions, params: params.merge(notes_filter: notes_filter)
- end
-
it 'does not set notes filter when database is in read-only mode' do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
notes_filter = UserPreference::NOTES_FILTERS[:only_comments]
diff --git a/spec/support/shared_examples/controllers/labels_controller_shared_examples.rb b/spec/support/shared_examples/controllers/labels_controller_shared_examples.rb
new file mode 100644
index 00000000000..aea552f6ac7
--- /dev/null
+++ b/spec/support/shared_examples/controllers/labels_controller_shared_examples.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'lock_on_merge when editing labels' do
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(enforce_locked_labels_on_merge: false)
+ visit edit_label_path_unlocked
+ end
+
+ it 'does not display the checkbox/help text' do
+ expect(page).not_to have_content(_('Lock label after a merge request is merged'))
+ expect(page).not_to have_content(label_lock_on_merge_help_text)
+ end
+ end
+
+ it 'updates lock_on_merge' do
+ expect(page).to have_content(_('Lock label after a merge request is merged'))
+ expect(page).to have_content(label_lock_on_merge_help_text)
+
+ check(_('Lock label after a merge request is merged'))
+ click_button 'Save changes'
+
+ expect(label_unlocked.reload.lock_on_merge).to be_truthy
+ end
+
+ it 'checkbox is disabled if lock_on_merge already set' do
+ visit edit_label_path_locked
+
+ expect(page.find('#label_lock_on_merge')).to be_disabled
+ end
+end
+
+RSpec.shared_examples 'lock_on_merge when creating labels' do
+ it 'is not supported when creating a label' do
+ expect(page).not_to have_content(_('Lock label after a merge request is merged'))
+ expect(page).not_to have_content(label_lock_on_merge_help_text)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/search_rate_limit_shared_examples.rb b/spec/support/shared_examples/controllers/search_rate_limit_shared_examples.rb
new file mode 100644
index 00000000000..aefcdc70082
--- /dev/null
+++ b/spec/support/shared_examples/controllers/search_rate_limit_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# - user
+# - params
+
+RSpec.shared_examples 'search request exceeding rate limit' do
+ include_examples 'rate limited endpoint', rate_limit_key: :search_rate_limit
+
+ it 'allows user in allow-list to search without applying rate limit', :freeze_time,
+ :clean_gitlab_redis_rate_limiting do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1)
+
+ stub_application_setting(search_rate_limit_allowlist: [current_user.username])
+
+ request
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
index 3d3b619451d..29d6f202498 100644
--- a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
+++ b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
@@ -57,7 +57,7 @@ RSpec.shared_examples 'Snowplow event tracking with Redis context' do |overrides
it_behaves_like 'Snowplow event tracking', overrides: overrides do
let(:context) do
key_path = try(:label) || action
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: key_path).to_context.to_json]
+ [Gitlab::Usage::MetricDefinition.context_for(key_path).to_context.to_json]
end
end
end
diff --git a/spec/support/shared_examples/features/2fa_shared_examples.rb b/spec/support/shared_examples/features/2fa_shared_examples.rb
index 6c4e98c9989..f50874b6b05 100644
--- a/spec/support/shared_examples/features/2fa_shared_examples.rb
+++ b/spec/support/shared_examples/features/2fa_shared_examples.rb
@@ -14,7 +14,7 @@ RSpec.shared_examples 'hardware device for 2fa' do |device_type|
end
describe "registration" do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
gitlab_sign_in(user)
@@ -67,7 +67,7 @@ RSpec.shared_examples 'hardware device for 2fa' do |device_type|
end
describe 'fallback code authentication' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, :no_super_sidebar) }
before do
# Register and logout
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index fff8ef915eb..3e81f969462 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -244,7 +244,8 @@ RSpec.shared_examples 'edits content using the content editor' do |params = { wi
end
end
- it 'expands the link, updates the link attributes and text if text is updated' do
+ it 'expands the link, updates the link attributes and text if text is updated',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/419684' do
page.within '[data-testid="link-bubble-menu"]' do
fill_in 'link-text', with: 'new text'
fill_in 'link-href', with: 'https://about.gitlab.com'
diff --git a/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
index d410653ca43..58bf461c733 100644
--- a/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_features_apply_to_issuables_shared_examples.rb
@@ -4,8 +4,8 @@ RSpec.shared_examples 'project features apply to issuables' do |klass|
let(:described_class) { klass }
let(:group) { create(:group) }
- let(:user_in_group) { create(:group_member, :developer, user: create(:user), group: group ).user }
- let(:user_outside_group) { create(:user) }
+ let(:user_in_group) { create(:group_member, :developer, user: create(:user, :no_super_sidebar), group: group ).user }
+ let(:user_outside_group) { create(:user, :no_super_sidebar) }
let(:project) { create(:project, :public, project_args) }
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
index fb882ef8a23..fc717fbac20 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
@@ -2,6 +2,7 @@
RSpec.shared_examples "protected branches > access control > CE" do
let(:no_one) { ProtectedRef::AccessLevel.humanize(::Gitlab::Access::NO_ACCESS) }
+ let_it_be(:edit_form) { '.js-protected-branch-edit-form' }
ProtectedRef::AccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can push to" do
@@ -30,7 +31,8 @@ RSpec.shared_examples "protected branches > access control > CE" do
expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to eq([access_type_id])
end
- it "allows updating protected branches so that #{access_type_name} can push to them" do
+ it "allows updating protected branches so that #{access_type_name} can push to them",
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/425080' do
visit project_protected_branches_path(project)
show_add_form
@@ -41,18 +43,14 @@ RSpec.shared_examples "protected branches > access control > CE" do
expect(ProtectedBranch.count).to eq(1)
- within(".protected-branches-list") do
- within_select(".js-allowed-to-push") do
- click_on(access_type_name)
- end
- end
-
+ set_allowed_to('push', access_type_name, form: edit_form)
wait_for_requests
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(access_type_id)
end
- it "allows updating protected branches so that #{access_type_name} can merge to them" do
+ it "allows updating protected branches so that #{access_type_name} can merge to them",
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/425080' do
visit project_protected_branches_path(project)
show_add_form
@@ -63,12 +61,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
expect(ProtectedBranch.count).to eq(1)
- within(".protected-branches-list") do
- within_select(".js-allowed-to-merge") do
- click_on(access_type_name)
- end
- end
-
+ set_allowed_to('merge', access_type_name, form: edit_form)
wait_for_requests
expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to include(access_type_id)
diff --git a/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
index 703ba5b018a..2b7147fa4b4 100644
--- a/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
+++ b/spec/support/shared_examples/features/protected_tags_with_deploy_keys_examples.rb
@@ -19,7 +19,7 @@ RSpec.shared_examples 'Deploy keys with protected tags' do
find(".js-allowed-to-create").click
wait_for_requests
- within('[data-testid="allowed-to-create-dropdown"]') do
+ within('.dropdown-menu') do
dropdown_headers = page.all('.dropdown-header').map(&:text)
expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
@@ -53,7 +53,7 @@ RSpec.shared_examples 'Deploy keys with protected tags' do
find(".js-allowed-to-create").click
wait_for_requests
- within('[data-testid="allowed-to-create-dropdown"]') do
+ within('.dropdown-menu') do
dropdown_headers = page.all('.dropdown-header').map(&:text)
expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index 0c043f48c5f..861c205337a 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -94,6 +94,17 @@ RSpec.shared_examples 'shows runner in list' do
end
end
+RSpec.shared_examples 'shows runner details from list' do
+ it 'shows runner details page' do
+ click_link("##{runner.id} (#{runner.short_sha})")
+
+ expect(current_url).to include(runner_page_path)
+
+ expect(page).to have_selector 'h1', text: "##{runner.id} (#{runner.short_sha})"
+ expect(page).to have_content "#{s_('Runners|Description')} #{runner.description}"
+ end
+end
+
RSpec.shared_examples 'pauses, resumes and deletes a runner' do
include Spec::Support::Helpers::ModalHelpers
@@ -191,6 +202,13 @@ RSpec.shared_examples 'shows runner jobs tab' do
end
end
+RSpec.shared_examples 'shows locked field' do
+ it 'shows locked checkbox with description', :js do
+ expect(page).to have_selector('input[type="checkbox"][name="locked"]')
+ expect(page).to have_content(_('Lock to current projects'))
+ end
+end
+
RSpec.shared_examples 'submits edit runner form' do
it 'breadcrumb contains runner id and token' do
page.within '[data-testid="breadcrumb-links"]' do
diff --git a/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
index 8ebec19a884..c2d144bef3b 100644
--- a/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
@@ -86,7 +86,7 @@ RSpec.shared_examples 'labels sidebar widget' do
context 'creating a label', :js do
before do
page.within(labels_widget) do
- page.find('[data-testid="create-label-button"]').click
+ click_button 'Create project label'
end
end
@@ -96,12 +96,11 @@ RSpec.shared_examples 'labels sidebar widget' do
end
end
- it 'creates new label', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/391240' do
+ it 'creates new label' do
page.within(labels_widget) do
fill_in 'Name new label', with: 'wontfix'
- page.find('.suggest-colors a', match: :first).click
- page.find('button', text: 'Create').click
- wait_for_requests
+ click_link 'Magenta-pink'
+ click_button 'Create'
expect(page).to have_content 'wontfix'
end
diff --git a/spec/support/shared_examples/features/snippets_shared_examples.rb b/spec/support/shared_examples/features/snippets_shared_examples.rb
index bf870b3ce66..383f81d048f 100644
--- a/spec/support/shared_examples/features/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/features/snippets_shared_examples.rb
@@ -52,7 +52,7 @@ RSpec.shared_examples 'tabs with counts' do
end
RSpec.shared_examples 'does not show New Snippet button' do
- let(:user) { create(:user, :external) }
+ let(:user) { create(:user, :external, :no_super_sidebar) }
specify do
sign_in(user)
diff --git a/spec/support/shared_examples/features/variable_list_pagination_shared_examples.rb b/spec/support/shared_examples/features/variable_list_pagination_shared_examples.rb
index 0b0c9edcb42..c1057671699 100644
--- a/spec/support/shared_examples/features/variable_list_pagination_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_pagination_shared_examples.rb
@@ -41,15 +41,15 @@ RSpec.shared_examples 'variable list pagination' do |variable_type|
it 'sorts variables alphabetically in ASC and DESC order' do
page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq(variable.key)
- expect(find('.js-ci-variable-row:nth-child(20) td[data-label="Key"]').text).to eq('test_key_8')
+ expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]')).to have_content(variable.key)
+ expect(find('.js-ci-variable-row:nth-child(20) td[data-label="Key"]')).to have_content('test_key_8')
end
click_button 'Next'
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq('test_key_9')
+ expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]')).to have_content('test_key_9')
end
page.within('[data-testid="ci-variable-table"]') do
@@ -59,8 +59,8 @@ RSpec.shared_examples 'variable list pagination' do |variable_type|
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq('test_key_9')
- expect(find('.js-ci-variable-row:nth-child(20) td[data-label="Key"]').text).to eq('test_key_0')
+ expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]')).to have_content('test_key_9')
+ expect(find('.js-ci-variable-row:nth-child(20) td[data-label="Key"]')).to have_content('test_key_0')
end
end
end
diff --git a/spec/support/shared_examples/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index 3a91b798bbd..5951d3e781b 100644
--- a/spec/support/shared_examples/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'variable list' do
it 'shows a list of variables' do
page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq(variable.key)
+ expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]')).to have_content(variable.key)
end
end
@@ -17,7 +17,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq('key')
+ expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]')).to have_content('key')
end
end
@@ -31,8 +31,8 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']").text).to eq('key')
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Protected'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).to have_content('key')
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).to have_content(s_('CiVariables|Protected'))
end
end
@@ -46,25 +46,25 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']").text).to eq('key')
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).not_to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).to have_content('key')
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).not_to have_content(s_('CiVariables|Masked'))
end
end
it 'reveals and hides variables' do
page.within('[data-testid="ci-variable-table"]') do
- expect(first('.js-ci-variable-row td[data-label="Key"]').text).to eq(variable.key)
+ expect(first('.js-ci-variable-row td[data-label="Key"]')).to have_content(variable.key)
expect(page).to have_content('*' * 5)
click_button('Reveal value')
- expect(first('.js-ci-variable-row td[data-label="Key"]').text).to eq(variable.key)
- expect(first('.js-ci-variable-row td[data-label="Value"]').text).to eq(variable.value)
+ expect(first('.js-ci-variable-row td[data-label="Key"]')).to have_content(variable.key)
+ expect(first('.js-ci-variable-row td[data-label="Value"]')).to have_content(variable.value)
expect(page).not_to have_content('*' * 5)
click_button('Hide value')
- expect(first('.js-ci-variable-row td[data-label="Key"]').text).to eq(variable.key)
+ expect(first('.js-ci-variable-row td[data-label="Key"]')).to have_content(variable.key)
expect(page).to have_content('*' * 5)
end
end
@@ -98,7 +98,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
- expect(first('.js-ci-variable-row td[data-label="Key"]').text).to eq('new_key')
+ expect(first('.js-ci-variable-row td[data-label="Key"]')).to have_content('new_key')
end
it 'edits a variable to be unmasked' do
@@ -116,8 +116,8 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Protected'))
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).not_to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).to have_content(s_('CiVariables|Protected'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).not_to have_content(s_('CiVariables|Masked'))
end
end
@@ -145,7 +145,7 @@ RSpec.shared_examples 'variable list' do
end
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']")).to have_content(s_('CiVariables|Masked'))
end
end
@@ -234,9 +234,9 @@ RSpec.shared_examples 'variable list' do
# expect to find 3 rows of variables in alphabetical order
expect(page).to have_selector('.js-ci-variable-row', count: 3)
rows = all('.js-ci-variable-row')
- expect(rows[0].find('td[data-label="Key"]').text).to eq('ckey')
- expect(rows[1].find('td[data-label="Key"]').text).to eq('test_key')
- expect(rows[2].find('td[data-label="Key"]').text).to eq('zkey')
+ expect(rows[0].find('td[data-label="Key"]')).to have_content('ckey')
+ expect(rows[1].find('td[data-label="Key"]')).to have_content('test_key')
+ expect(rows[2].find('td[data-label="Key"]')).to have_content('zkey')
end
context 'defaults to the application setting' do
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index d3863c9a675..18e0cfdad00 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -179,6 +179,45 @@ RSpec.shared_examples 'work items assignees' do
expect(work_item.reload.assignees).to include(user)
end
+
+ it 'successfully assigns the current user by clicking `Assign myself` button' do
+ find('[data-testid="work-item-assignees-input"]').hover
+ find('[data-testid="assign-self"]').click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user)
+ end
+
+ it 'successfully removes all users on clear all button click' do
+ find('[data-testid="work-item-assignees-input"]').hover
+ find('[data-testid="assign-self"]').click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user)
+
+ find('[data-testid="work-item-assignees-input"]').click
+ find('[data-testid="clear-all-button"]').click
+ find("body").click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).not_to include(user)
+ end
+
+ it 'successfully removes user on clicking badge cross button' do
+ find('[data-testid="work-item-assignees-input"]').hover
+ find('[data-testid="assign-self"]').click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).to include(user)
+
+ within('[data-testid="work-item-assignees-input"]') do
+ find('[data-testid="close-icon"]').click
+ end
+ find("body").click
+ wait_for_requests
+
+ expect(work_item.reload.assignees).not_to include(user)
+ end
end
RSpec.shared_examples 'work items labels' do
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 19001abcbe2..ed8feebf1f6 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -1406,7 +1406,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
expect(finder.execute.to_sql)
- .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "issues" AS MATERIALIZED/)
end
end
@@ -1416,7 +1416,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
expect(finder.execute.to_sql)
- .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "issues" AS MATERIALIZED/)
end
end
@@ -1426,7 +1426,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
expect(finder.execute.to_sql)
- .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "issues" AS MATERIALIZED/)
end
end
@@ -1436,7 +1436,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
expect(finder.execute.to_sql)
- .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ .to match(/^WITH "issues" AS MATERIALIZED/)
end
end
end
diff --git a/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb
index d6d360bb413..a69b56c3d58 100644
--- a/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/update_time_estimate_shared_examples.rb
@@ -6,11 +6,25 @@ RSpec.shared_examples 'updating time estimate' do
let(:input_params) { input.merge(extra_params).merge({ timeEstimate: time_estimate }) }
+ before do
+ resource.update!(time_estimate: 1800)
+ end
+
+ context 'when time estimate is not provided' do
+ let(:input_params) { input.merge(extra_params).except(:timeEstimate) }
+
+ it 'does not update' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }
+ .not_to change { resource.reload.time_estimate }
+ end
+ end
+
context 'when time estimate is not a valid numerical value' do
let(:time_estimate) { '-3.5d' }
it 'does not update' do
- expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change { resource.time_estimate }
+ expect { post_graphql_mutation(mutation, current_user: current_user) }
+ .not_to change { resource.reload.time_estimate }
end
it 'returns error' do
@@ -24,7 +38,8 @@ RSpec.shared_examples 'updating time estimate' do
let(:time_estimate) { 'nonsense' }
it 'does not update' do
- expect { post_graphql_mutation(mutation, current_user: current_user) }.not_to change { resource.time_estimate }
+ expect { post_graphql_mutation(mutation, current_user: current_user) }
+ .not_to change { resource.reload.time_estimate }
end
it 'returns error' do
@@ -47,6 +62,7 @@ RSpec.shared_examples 'updating time estimate' do
'1h' | 3600
'0h' | 0
'-0h' | 0
+ nil | 0
end
with_them do
diff --git a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
index b346f35bdc9..2c94f21e144 100644
--- a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
@@ -61,14 +61,6 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
expect(deprecable.deprecation_reason).to eq('This was renamed. Deprecated in 1.10.')
end
- it 'supports named reasons: alpha' do
- deprecable = subject(deprecated: { milestone: '1.10', reason: :alpha })
-
- expect(deprecable.deprecation_reason).to eq(
- 'This feature is an Experiment. It can be changed or removed at any time. Introduced in 1.10.'
- )
- end
-
it 'supports :alpha' do
deprecable = subject(alpha: { milestone: '1.10' })
@@ -82,7 +74,7 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
subject(alpha: { milestone: '1.10' }, deprecated: { milestone: '1.10', reason: 'my reason' } )
end.to raise_error(
ArgumentError,
- eq("`experiment` and `deprecated` arguments cannot be passed at the same time")
+ eq("`alpha` and `deprecated` arguments cannot be passed at the same time")
)
end
diff --git a/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb b/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb
index 85fcd426e3d..16e25bf96dd 100644
--- a/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb
+++ b/spec/support/shared_examples/harbor/artifacts_controller_shared_examples.rb
@@ -87,17 +87,7 @@ RSpec.shared_examples 'a harbor artifacts controller' do |args|
get harbor_artifact_url(container, repository_id), headers: json_header
end
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status with json'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'responds with 200 status with json'
context 'with anonymous user' do
before do
diff --git a/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb b/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb
index b35595a10b2..a0d47d1a2d1 100644
--- a/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb
+++ b/spec/support/shared_examples/harbor/repositories_controller_shared_examples.rb
@@ -87,17 +87,7 @@ RSpec.shared_examples 'a harbor repositories controller' do |args|
get harbor_repository_url(container)
end
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status with html'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'responds with 200 status with html'
context 'with anonymous user' do
before do
@@ -121,17 +111,7 @@ RSpec.shared_examples 'a harbor repositories controller' do |args|
get harbor_repository_url(container), headers: json_header
end
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status with json'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'responds with 200 status with json'
context 'with valid params' do
context 'with valid page params' do
diff --git a/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb b/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb
index 46fea7fdff6..aee728295de 100644
--- a/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb
+++ b/spec/support/shared_examples/harbor/tags_controller_shared_examples.rb
@@ -76,17 +76,7 @@ RSpec.shared_examples 'a harbor tags controller' do |args|
headers: json_header)
end
- context 'with harbor registry feature flag enabled' do
- it_behaves_like 'responds with 200 status with json'
- end
-
- context 'with harbor registry feature flag disabled' do
- before do
- stub_feature_flags(harbor_registry_integration: false)
- end
-
- it_behaves_like 'responds with 404 status'
- end
+ it_behaves_like 'responds with 200 status with json'
context 'with anonymous user' do
before do
diff --git a/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb b/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb
deleted file mode 100644
index d4fe45a91a0..00000000000
--- a/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'behind AI related feature flags' do |provider_flag|
- context "when #{provider_flag} is disabled" do
- before do
- stub_feature_flags(provider_flag => false)
- end
-
- it 'responds as not found' do
- post api(url, current_user), params: input_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when ai_experimentation_api is disabled' do
- before do
- stub_feature_flags(ai_experimentation_api: false)
- end
-
- it 'responds as not found' do
- post api(url, current_user), params: input_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-end
-
-RSpec.shared_examples 'delegates AI request to Workhorse' do
- it 'responds with Workhorse send-url headers' do
- post api(url, current_user), params: input_params
-
- expect(response.body).to eq('""')
- expect(response).to have_gitlab_http_status(:ok)
-
- send_url_prefix, encoded_data = response.headers['Gitlab-Workhorse-Send-Data'].split(':')
- data = Gitlab::Json.parse(Base64.urlsafe_decode64(encoded_data))
-
- expect(send_url_prefix).to eq('send-url')
- expect(data).to eq({
- 'AllowRedirects' => false,
- 'Method' => 'POST'
- }.merge(expected_params))
- end
-end
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb
new file mode 100644
index 00000000000..3dbe43d822f
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Gitlab::BitbucketImport::ObjectImporter do
+ include AfterNextHelpers
+
+ describe '.sidekiq_retries_exhausted' do
+ let(:job) { { 'args' => [1, {}, 'key'], 'jid' => 'jid' } }
+
+ it 'notifies the waiter' do
+ expect(Gitlab::JobWaiter).to receive(:notify).with('key', 'jid')
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
+ end
+ end
+
+ describe '#perform' do
+ let_it_be(:import_started_project) { create(:project, :import_started) }
+
+ let(:project_id) { project_id }
+ let(:waiter_key) { 'key' }
+
+ shared_examples 'notifies the waiter' do
+ specify do
+ allow_next(worker.importer_class).to receive(:execute)
+
+ expect(Gitlab::JobWaiter).to receive(:notify).with(waiter_key, anything)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+ end
+
+ context 'when project does not exist' do
+ let(:project_id) { non_existing_record_id }
+
+ it_behaves_like 'notifies the waiter'
+ end
+
+ context 'when project has import started' do
+ let_it_be(:project) do
+ create(:project, :import_started, import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'token' => 'token' }
+ })
+ end
+
+ let(:project_id) { project.id }
+
+ it 'calls the importer' do
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info).twice
+ expect_next(worker.importer_class, project, kind_of(Hash)).to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+
+ context 'when the importer raises an ActiveRecord::RecordInvalid error' do
+ before do
+ allow_next(worker.importer_class).to receive(:execute).and_raise(ActiveRecord::RecordInvalid)
+ end
+
+ it 'tracks the error' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+ end
+
+ context 'when the importer raises a StandardError' do
+ before do
+ allow_next(worker.importer_class).to receive(:execute).and_raise(StandardError)
+ end
+
+ it 'tracks the error and raises the error' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ expect { worker.perform(project_id, {}, waiter_key) }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ context 'when project import has been cancelled' do
+ let_it_be(:project_id) { create(:project, :import_canceled).id }
+
+ it 'does not call the importer' do
+ expect_next(worker.importer_class).not_to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+ end
+ end
+
+ describe '#importer_class' do
+ it 'does not raise a NotImplementedError' do
+ expect(worker.importer_class).not_to be_nil
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_import/stage_methods_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_import/stage_methods_shared_examples.rb
new file mode 100644
index 00000000000..f128aa92a53
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_import/stage_methods_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Gitlab::BitbucketImport::StageMethods do
+ describe '.sidekiq_retries_exhausted' do
+ let(:job) { { 'args' => [project.id] } }
+
+ it 'tracks the import failure' do
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: StandardError.new,
+ fail_import: true
+ )
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
+ end
+ end
+
+ describe '.perform' do
+ let(:worker) { described_class.new }
+
+ it 'executes the import' do
+ expect(worker).to receive(:import).with(project).once
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info).twice
+
+ worker.perform(project.id)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
index ec2ae0b8a73..4eae8632467 100644
--- a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
@@ -7,7 +7,7 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::ObjectImporter do
let(:job) { { 'args' => [1, {}, 'key'], 'jid' => 'jid' } }
it 'notifies the waiter' do
- expect(Gitlab::JobWaiter).to receive(:notify).with('key', 'jid')
+ expect(Gitlab::JobWaiter).to receive(:notify).with('key', 'jid', ttl: Gitlab::Import::JOB_WAITER_TTL)
described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
end
@@ -23,7 +23,7 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::ObjectImporter do
specify do
allow_next(worker.importer_class).to receive(:execute)
- expect(Gitlab::JobWaiter).to receive(:notify).with(waiter_key, anything)
+ expect(Gitlab::JobWaiter).to receive(:notify).with(waiter_key, anything, ttl: Gitlab::Import::JOB_WAITER_TTL)
worker.perform(project_id, {}, waiter_key)
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 10f58748698..7cfab5c8295 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -35,8 +35,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: trace.job.project)
end
- it 'calls ::Ci::Build.sticking.unstick_or_continue_sticking' do
- expect(::Ci::Build.sticking).to receive(:unstick_or_continue_sticking)
+ it 'calls ::Ci::Build.sticking.find_caught_up_replica' do
+ expect(::Ci::Build.sticking).to receive(:find_caught_up_replica)
.with(described_class::LOAD_BALANCING_STICKING_NAMESPACE, trace.job.id)
.and_call_original
@@ -49,8 +49,8 @@ RSpec.shared_examples 'common trace features' do
stub_feature_flags(gitlab_ci_archived_trace_consistent_reads: false)
end
- it 'does not call ::Ci::Build.sticking.unstick_or_continue_sticking' do
- expect(::Ci::Build.sticking).not_to receive(:unstick_or_continue_sticking)
+ it 'does not call ::Ci::Build.sticking.find_caught_up_replica' do
+ expect(::Ci::Build.sticking).not_to receive(:find_caught_up_replica)
trace.read { |stream| stream }
end
diff --git a/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
index df795723874..b80a51a1fc6 100644
--- a/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
@@ -4,40 +4,17 @@ RSpec.shared_examples 'CTE with MATERIALIZED keyword examples' do
describe 'adding MATERIALIZE to the CTE' do
let(:options) { {} }
- before do
- # Clear the cached value before the test
- Gitlab::Database::AsWithMaterialized.clear_memoization(:materialized_supported)
- end
-
- context 'when PG version is <12' do
- it 'does not add MATERIALIZE keyword' do
- allow(ApplicationRecord.database).to receive(:version).and_return('11.1')
+ it 'adds MATERIALIZE keyword' do
+ allow(ApplicationRecord.database).to receive(:version).and_return('12.1')
- expect(query).to include(expected_query_block_without_materialized)
- end
+ expect(query).to include(expected_query_block_with_materialized)
end
- context 'when PG version is >=12' do
- it 'adds MATERIALIZE keyword' do
- allow(ApplicationRecord.database).to receive(:version).and_return('12.1')
-
- expect(query).to include(expected_query_block_with_materialized)
- end
+ context 'when materialized is disabled' do
+ let(:options) { { materialized: false } }
- context 'when version is higher than 12' do
- it 'adds MATERIALIZE keyword' do
- allow(ApplicationRecord.database).to receive(:version).and_return('15.1')
-
- expect(query).to include(expected_query_block_with_materialized)
- end
- end
-
- context 'when materialized is disabled' do
- let(:options) { { materialized: false } }
-
- it 'does not add MATERIALIZE keyword' do
- expect(query).to include(expected_query_block_without_materialized)
- end
+ it 'does not add MATERIALIZE keyword' do
+ expect(query).to include(expected_query_block_without_materialized)
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
new file mode 100644
index 00000000000..0fef5269ab6
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/import/advance_stage_shared_examples.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:import_state) { create(factory, :started, project: project, jid: '123') }
+ let(:worker) { described_class.new }
+ let(:next_stage) { :finish }
+
+ describe '#perform', :clean_gitlab_redis_shared_state do
+ context 'when the project no longer exists' do
+ it 'does not perform any work' do
+ expect(worker).not_to receive(:wait_for_jobs)
+
+ worker.perform(non_existing_record_id, { '123' => 2 }, next_stage)
+ end
+ end
+
+ context 'when there are remaining jobs' do
+ it 'reschedules itself' do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage)
+
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
+
+ context 'when the project import is not running' do
+ before do
+ import_state.update_column(:status, :failed)
+ end
+
+ it 'does not perform any work' do
+ expect(worker).not_to receive(:wait_for_jobs)
+ expect(described_class).not_to receive(:perform_in)
+
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
+
+ it 'clears the JobWaiter cache' do
+ expect(Gitlab::JobWaiter).to receive(:delete_key).with('123')
+
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
+ end
+ end
+
+ context 'when there are no remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({})
+ end
+
+ it 'schedules the next stage' do
+ next_worker = described_class::STAGES[next_stage]
+
+ expect_next_found_instance_of(import_state.class) do |state|
+ expect(state).to receive(:refresh_jid_expiration)
+ end
+
+ expect(next_worker).to receive(:perform_async).with(project.id)
+
+ worker.perform(project.id, { '123' => 2 }, next_stage)
+ end
+
+ it 'raises KeyError when the stage name is invalid' do
+ expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
+ .to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#wait_for_jobs' do
+ it 'waits for jobs to complete and returns a new pair of keys to wait for' do
+ waiter1 = instance_double("Gitlab::JobWaiter", jobs_remaining: 1, key: '123')
+ waiter2 = instance_double("Gitlab::JobWaiter", jobs_remaining: 0, key: '456')
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(2, '123')
+ .and_return(waiter1)
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(1, '456')
+ .and_return(waiter2)
+
+ expect(waiter1)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ expect(waiter2)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
+
+ expect(new_waiters).to eq({ '123' => 1 })
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb
index c2898513424..025f0d5c7ea 100644
--- a/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/repo_type_shared_examples.rb
@@ -15,7 +15,7 @@ RSpec.shared_examples 'a repo type' do
describe '#repository_for' do
it 'finds the repository for the repo type' do
- expect(described_class.repository_for(expected_repository_resolver)).to eq(expected_repository)
+ expect(described_class.repository_for(expected_container)).to eq(expected_repository)
end
it 'returns nil when container is nil' do
diff --git a/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
index 6b296d0e78a..ac72b31d5a4 100644
--- a/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'search results filtered by archived' do |feature_flag_name|
+RSpec.shared_examples 'search results filtered by archived' do |feature_flag_name, migration_name|
context 'when filter not provided (all behavior)' do
let(:filters) { {} }
@@ -28,16 +28,33 @@ RSpec.shared_examples 'search results filtered by archived' do |feature_flag_nam
end
end
- context "when the #{feature_flag_name} feature flag is disabled" do
- let(:filters) { {} }
+ if feature_flag_name.present?
+ context "when the #{feature_flag_name} feature flag is disabled" do
+ let(:filters) { {} }
+
+ before do
+ stub_feature_flags("#{feature_flag_name}": false)
+ end
- before do
- stub_feature_flags("#{feature_flag_name}": false)
+ it 'returns archived and unarchived results' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).to include archived_result
+ end
end
+ end
- it 'returns archived and unarchived results' do
- expect(results.objects(scope)).to include unarchived_result
- expect(results.objects(scope)).to include archived_result
+ if migration_name.present?
+ context "when the #{migration_name} is not completed" do
+ let(:filters) { {} }
+
+ before do
+ set_elasticsearch_migration_to(migration_name.to_s, including: false)
+ end
+
+ it 'returns archived and unarchived results' do
+ expect(results.objects(scope)).to include unarchived_result
+ expect(results.objects(scope)).to include archived_result
+ end
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
deleted file mode 100644
index 9dc18555340..00000000000
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'tracked issuable events' do
- before do
- stub_application_setting(usage_ping_enabled: true)
- end
-
- def count_unique(date_from: Date.today.beginning_of_week, date_to: 1.week.from_now)
- Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: action, start_date: date_from, end_date: date_to)
- end
-
- specify do
- aggregate_failures do
- expect(track_action({ author: user1 }.merge(track_params))).to be_truthy
- expect(track_action({ author: user1 }.merge(track_params))).to be_truthy
- expect(track_action({ author: user2 }.merge(track_params))).to be_truthy
- expect(count_unique).to eq(2)
- end
- end
-
- it 'does not track edit actions if author is not present' do
- expect(track_action({ author: nil }.merge(track_params))).to be_nil
- end
-end
-
-RSpec.shared_examples 'tracked issuable snowplow and service ping events for given event params' do
- it_behaves_like 'tracked issuable events'
-
- it 'emits snowplow event' do
- track_action({ author: user1 }.merge(track_params))
-
- expect_snowplow_event(**{ category: category, action: event_action, user: user1 }.merge(event_params))
- end
-end
-
-RSpec.shared_examples 'tracked issuable internal event for given event params' do
- it_behaves_like 'tracked issuable events'
-
- it_behaves_like 'internal event tracking' do
- subject(:track_event) { track_action({ author: user1 }.merge(track_params)) }
-
- let(:user) { user1 }
- let(:namespace) { project&.namespace }
- end
-end
-
-RSpec.shared_examples 'tracked issuable internal event with project' do
- it_behaves_like 'tracked issuable internal event for given event params' do
- let(:track_params) { original_params || { project: project } }
- end
-end
-
-RSpec.shared_examples 'tracked issuable snowplow and service ping events with project' do
- it_behaves_like 'tracked issuable snowplow and service ping events for given event params' do
- let(:context) do
- Gitlab::Tracking::ServicePingContext
- .new(data_source: :redis_hll, event: event_property)
- .to_h
- end
-
- let(:track_params) { original_params || { project: project } }
- let(:event_params) { { project: project }.merge(label: event_label, property: event_property, namespace: project.namespace, context: [context]) }
- end
-end
-
-RSpec.shared_examples 'tracked issuable snowplow and service ping events with namespace' do
- it_behaves_like 'tracked issuable snowplow and service ping events for given event params' do
- let(:context) do
- Gitlab::Tracking::ServicePingContext
- .new(data_source: :redis_hll, event: event_property)
- .to_h
- end
-
- let(:track_params) { { namespace: namespace } }
- let(:event_params) { track_params.merge(label: event_label, property: event_property, context: [context]) }
- end
-end
-
-RSpec.shared_examples 'does not track with namespace when feature flag is disabled' do |feature_flag|
- context "when feature flag #{feature_flag} is disabled" do
- it 'does not track action' do
- stub_feature_flags(feature_flag => false)
-
- expect(track_action(author: user1, namespace: namespace)).to be_nil
- end
- end
-end
diff --git a/spec/support/shared_examples/lib/menus_shared_examples.rb b/spec/support/shared_examples/lib/menus_shared_examples.rb
index 0aa98517444..575f48c43e0 100644
--- a/spec/support/shared_examples/lib/menus_shared_examples.rb
+++ b/spec/support/shared_examples/lib/menus_shared_examples.rb
@@ -62,6 +62,13 @@ RSpec.shared_examples_for 'not serializable as super_sidebar_menu_args' do
end
end
+RSpec.shared_examples_for 'a panel instantiable by the anonymous user' do
+ it do
+ context.instance_variable_set(:@current_user, nil)
+ expect(described_class.new(context)).to be_a(described_class)
+ end
+end
+
RSpec.shared_examples_for 'a panel with uniquely identifiable menu items' do
let(:menu_items) do
subject.instance_variable_get(:@menus)
diff --git a/spec/support/shared_examples/lib/sidebars/user_profile/user_profile_menus_shared_examples.rb b/spec/support/shared_examples/lib/sidebars/user_profile/user_profile_menus_shared_examples.rb
index 5e8aebb4f29..6a43b9b4300 100644
--- a/spec/support/shared_examples/lib/sidebars/user_profile/user_profile_menus_shared_examples.rb
+++ b/spec/support/shared_examples/lib/sidebars/user_profile/user_profile_menus_shared_examples.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
-RSpec.shared_examples 'User profile menu' do |title:, icon:, active_route:|
+RSpec.shared_examples 'User profile menu' do |
+ icon:, active_route:, avatar_shape: 'rect', expect_avatar: false, entity_id: nil,
+ # A nil title will fall back to user.name.
+ title: nil
+|
let_it_be(:current_user) { build(:user) }
let_it_be(:user) { build(:user) }
@@ -17,13 +21,19 @@ RSpec.shared_examples 'User profile menu' do |title:, icon:, active_route:|
end
it 'renders the correct title' do
- expect(subject.title).to eq title
+ expect(subject.title).to eq(title || user.name)
end
it 'renders the correct icon' do
expect(subject.sprite_icon).to eq icon
end
+ it 'renders the correct avatar' do
+ expect(subject.avatar).to eq(expect_avatar ? user.avatar_url : nil)
+ expect(subject.avatar_shape).to eq(avatar_shape)
+ expect(subject.entity_id).to eq(entity_id)
+ end
+
it 'defines correct active route' do
expect(subject.active_routes[:path]).to be active_route
end
diff --git a/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb b/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb
index 5f59d43ad19..179bbc8734d 100644
--- a/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb
+++ b/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb
@@ -55,6 +55,8 @@ RSpec.shared_examples 'it has loose foreign keys' do
end
RSpec.shared_examples 'cleanup by a loose foreign key' do
+ include LooseForeignKeysHelper
+
let(:foreign_key_definition) do
foreign_keys_for_parent = Gitlab::Database::LooseForeignKeys.definitions_by_table[parent.class.table_name]
foreign_keys_for_parent.find { |definition| definition.from_table == model.class.table_name }
@@ -75,9 +77,7 @@ RSpec.shared_examples 'cleanup by a loose foreign key' do
expect(find_model).to be_present
- LooseForeignKeys::DeletedRecord.using_connection(parent.connection) do
- LooseForeignKeys::ProcessDeletedRecordsService.new(connection: parent.connection).execute
- end
+ process_loose_foreign_key_deletions(record: parent)
if foreign_key_definition.on_delete.eql?(:async_delete)
expect(find_model).not_to be_present
diff --git a/spec/support/shared_examples/mailers/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb
index cf1ab7697ab..987060d73b9 100644
--- a/spec/support/shared_examples/mailers/notify_shared_examples.rb
+++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb
@@ -54,6 +54,14 @@ RSpec.shared_examples 'an email with X-GitLab headers containing IDs' do
expect(subject.header["X-GitLab-#{model.class.name}-IID"]).to eq nil
end
end
+
+ it 'has X-GitLab-*-State header if model has state defined' do
+ if model.respond_to?(:state)
+ is_expected.to have_header "X-GitLab-#{model.class.name}-State", model.state.to_s
+ else
+ expect(subject.header["X-GitLab-#{model.class.name}-State"]).to eq nil
+ end
+ end
end
RSpec.shared_examples 'an email with X-GitLab headers containing project details' do
diff --git a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
index fdb31fa5d9d..37c338a7712 100644
--- a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
+++ b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
@@ -32,3 +32,110 @@ RSpec.shared_examples 'migration that adds widget to work items definitions' do
end
end
end
+
+# Shared examples for testing migration that adds a single widget to a work item type
+#
+# It expects the following variables
+# - `target_type_enum_value`: Int, enum value for the target work item type, typically defined in the migration
+# as a constant
+# - `target_type`: Symbol, the target type's name
+# - `additional_types`: Hash (optional), name of work item types and their corresponding enum value that are defined
+# at the time the migration was created but are missing from `base_types`.
+# - `widgets_for_type`: Hash, name of the widgets included in the target type with their corresponding enum value
+RSpec.shared_examples 'migration that adds a widget to a work item type' do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:work_item_types) { table(:work_item_types) }
+ let(:work_item_widget_definitions) { table(:work_item_widget_definitions) }
+ let(:additional_base_types) { try(:additional_types) || {} }
+ let(:base_types) do
+ {
+ issue: 0,
+ incident: 1,
+ test_case: 2,
+ requirement: 3,
+ task: 4,
+ objective: 5,
+ key_result: 6,
+ epic: 7
+ }.merge!(additional_base_types)
+ end
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ reset_work_item_types
+ end
+
+ before do
+ # Database needs to be in a similar state as when the migration was created
+ reset_db_state_prior_to_migration
+ end
+
+ describe '#up' do
+ it "adds widget to work item type", :aggregate_failures do
+ expect do
+ migrate!
+ end.to change { work_item_widget_definitions.count }.by(1)
+
+ work_item_type = work_item_types.find_by(namespace_id: nil, base_type: target_type_enum_value)
+ created_widget = work_item_widget_definitions.last
+
+ expect(created_widget).to have_attributes(
+ widget_type: described_class::WIDGET_ENUM_VALUE,
+ name: described_class::WIDGET_NAME,
+ work_item_type_id: work_item_type.id
+ )
+ end
+
+ context 'when type does not exist' do
+ it 'skips creating the new widget definition' do
+ work_item_types.where(namespace_id: nil, base_type: base_types[target_type]).delete_all
+
+ expect do
+ migrate!
+ end.to not_change(work_item_widget_definitions, :count)
+ end
+ end
+ end
+
+ describe '#down' do
+ it "removes widget from work item type" do
+ migrate!
+
+ expect { schema_migrate_down! }.to change { work_item_widget_definitions.count }.by(-1)
+ end
+ end
+
+ def reset_db_state_prior_to_migration
+ work_item_types.delete_all
+
+ base_types.each do |type_sym, type_enum|
+ create_work_item_type!(type_sym.to_s.titleize, type_enum)
+ end
+
+ target_type_record = work_item_types.find_by_name(target_type.to_s.titleize)
+
+ widgets = widgets_for_type.map do |widget_name_value, widget_enum_value|
+ {
+ work_item_type_id: target_type_record.id,
+ name: widget_name_value,
+ widget_type: widget_enum_value
+ }
+ end
+
+ # Creating all widgets for the type so the state in the DB is as close as possible to the actual state
+ work_item_widget_definitions.upsert_all(
+ widgets,
+ unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
+ )
+ end
+
+ def create_work_item_type!(type_name, type_enum_value)
+ work_item_types.create!(
+ name: type_name,
+ namespace_id: nil,
+ base_type: type_enum_value
+ )
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb b/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb
index efd27a051fe..eb37fe66c11 100644
--- a/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/linkable_items_shared_examples.rb
@@ -78,5 +78,16 @@ RSpec.shared_examples 'includes LinkableItem concern' do
expect(described_class.for_items(item, item2)).to contain_exactly(target_link)
end
end
+
+ describe '.for_source_and_target' do
+ let_it_be(:item3) { create(:work_item, project: project) }
+ let_it_be(:link1) { create(link_factory, source: item, target: item1) }
+ let_it_be(:link2) { create(link_factory, source: item, target: item2) }
+ let_it_be(:link3) { create(link_factory, source: item, target: item3) }
+
+ it 'includes links for provided source and target' do
+ expect(described_class.for_source_and_target(item, [item1, item2])).to contain_exactly(link1, link2)
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/models/group_shared_examples.rb b/spec/support/shared_examples/models/group_shared_examples.rb
index 9f3359ba4ab..6397e7a87d7 100644
--- a/spec/support/shared_examples/models/group_shared_examples.rb
+++ b/spec/support/shared_examples/models/group_shared_examples.rb
@@ -3,7 +3,6 @@
RSpec.shared_examples 'checks self and root ancestor feature flag' do
let_it_be(:root_group) { create(:group) }
let_it_be(:group) { create(:group, parent: root_group) }
- let_it_be(:project) { create(:project, group: group) }
subject { group.public_send(feature_flag_method) }
@@ -41,3 +40,47 @@ RSpec.shared_examples 'checks self and root ancestor feature flag' do
it { is_expected.to be_truthy }
end
end
+
+RSpec.shared_examples 'checks self (project) and root ancestor feature flag' do
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: root_group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ subject { project.public_send(feature_flag_method) }
+
+ context 'when FF is enabled for the root group' do
+ before do
+ stub_feature_flags(feature_flag => root_group)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when FF is enabled for the group' do
+ before do
+ stub_feature_flags(feature_flag => group)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when FF is enabled for the project' do
+ before do
+ stub_feature_flags(feature_flag => project)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when FF is disabled globally' do
+ before do
+ stub_feature_flags(feature_flag => false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when FF is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+end
diff --git a/spec/support/shared_examples/models/members_notifications_shared_example.rb b/spec/support/shared_examples/models/members_notifications_shared_example.rb
index 329cb812a08..5c783b5cfa7 100644
--- a/spec/support/shared_examples/models/members_notifications_shared_example.rb
+++ b/spec/support/shared_examples/models/members_notifications_shared_example.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'members notifications' do |entity_type|
+ let_it_be(:user) { create(:user) }
+
let(:notification_service) { double('NotificationService').as_null_object }
before do
@@ -8,7 +10,7 @@ RSpec.shared_examples 'members notifications' do |entity_type|
end
describe "#after_create" do
- let(:member) { build(:"#{entity_type}_member", "#{entity_type}": create(entity_type.to_s)) }
+ let(:member) { build(:"#{entity_type}_member", "#{entity_type}": create(entity_type.to_s), user: user) }
it "sends email to user" do
expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
@@ -35,7 +37,9 @@ RSpec.shared_examples 'members notifications' do |entity_type|
describe '#after_commit' do
context 'on creation of a member requesting access' do
- let(:member) { build(:"#{entity_type}_member", :access_request, "#{entity_type}": create(entity_type.to_s)) }
+ let(:member) do
+ build(:"#{entity_type}_member", :access_request, "#{entity_type}": create(entity_type.to_s), user: user)
+ end
it "calls NotificationService.new_access_request" do
expect(notification_service).to receive(:new_access_request).with(member)
diff --git a/spec/support/shared_examples/models/users/pages_visits_shared_examples.rb b/spec/support/shared_examples/models/users/pages_visits_shared_examples.rb
new file mode 100644
index 00000000000..0b3e8516d25
--- /dev/null
+++ b/spec/support/shared_examples/models/users/pages_visits_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'namespace visits model' do
+ it { is_expected.to validate_presence_of(:entity_id) }
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:visited_at) }
+
+ describe '#visited_around?' do
+ context 'when the checked time matches a recent visit' do
+ [-15.minutes, 15.minutes].each do |time_diff|
+ it 'returns true' do
+ expect(described_class.visited_around?(entity_id: entity.id, user_id: user.id,
+ time: base_time + time_diff)).to be(true)
+ end
+ end
+ end
+
+ context 'when the checked time does not match a recent visit' do
+ [-16.minutes, 16.minutes].each do |time_diff|
+ it 'returns false' do
+ expect(described_class.visited_around?(entity_id: entity.id, user_id: user.id,
+ time: base_time + time_diff)).to be(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 9224e01b1fe..23ec4a632b7 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -365,6 +365,90 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ describe "#parse_client_tls_options" do
+ let(:dummy_certificate) { OpenSSL::X509::Certificate.new }
+ let(:dummy_key) { OpenSSL::PKey::RSA.new }
+ let(:resque_yaml_config_without_tls) { { url: 'redis://localhost:6379' } }
+ let(:resque_yaml_config_with_tls) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ cert_file: '/tmp/client.crt',
+ key_file: '/tmp/client.key'
+ }
+ }
+ end
+
+ let(:parsed_config_with_tls) do
+ {
+ url: 'rediss://localhost:6380',
+ ssl_params: {
+ cert: dummy_certificate,
+ key: dummy_key
+ }
+ }
+ end
+
+ before do
+ allow(::File).to receive(:exist?).and_call_original
+ allow(::File).to receive(:read).and_call_original
+ end
+
+ context 'when configuration does not have TLS related options' do
+ it 'returns the coniguration as-is' do
+ expect(subject.send(:parse_client_tls_options,
+ resque_yaml_config_without_tls)).to eq(resque_yaml_config_without_tls)
+ end
+ end
+
+ context 'when specified certificate file does not exist' do
+ before do
+ allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(false)
+ allow(::File).to receive(:exist?).with("/tmp/client.key").and_return(true)
+ end
+
+ it 'raises error about missing certificate file' do
+ expect do
+ subject.send(:parse_client_tls_options,
+ resque_yaml_config_with_tls)
+ end.to raise_error(Gitlab::Redis::Wrapper::InvalidPathError,
+ "Certificate file /tmp/client.crt specified in in `resque.yml` does not exist.")
+ end
+ end
+
+ context 'when specified key file does not exist' do
+ before do
+ allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(true)
+ allow(::File).to receive(:read).with("/tmp/client.crt").and_return("DUMMY_CERTIFICATE")
+ allow(OpenSSL::X509::Certificate).to receive(:new).with("DUMMY_CERTIFICATE").and_return(dummy_certificate)
+ allow(::File).to receive(:exist?).with("/tmp/client.key").and_return(false)
+ end
+
+ it 'raises error about missing key file' do
+ expect do
+ subject.send(:parse_client_tls_options,
+ resque_yaml_config_with_tls)
+ end.to raise_error(Gitlab::Redis::Wrapper::InvalidPathError,
+ "Key file /tmp/client.key specified in in `resque.yml` does not exist.")
+ end
+ end
+
+ context 'when configuration valid TLS related options' do
+ before do
+ allow(::File).to receive(:exist?).with("/tmp/client.crt").and_return(true)
+ allow(::File).to receive(:exist?).with("/tmp/client.key").and_return(true)
+ allow(::File).to receive(:read).with("/tmp/client.crt").and_return("DUMMY_CERTIFICATE")
+ allow(::File).to receive(:read).with("/tmp/client.key").and_return("DUMMY_KEY")
+ allow(OpenSSL::X509::Certificate).to receive(:new).with("DUMMY_CERTIFICATE").and_return(dummy_certificate)
+ allow(OpenSSL::PKey).to receive(:read).with("DUMMY_KEY").and_return(dummy_key)
+ end
+
+ it "converts cert_file and key_file appropriately" do
+ expect(subject.send(:parse_client_tls_options, resque_yaml_config_with_tls)).to eq(parsed_config_with_tls)
+ end
+ end
+ end
+
describe '#fetch_config' do
before do
FileUtils.mkdir_p(File.join(rails_root, 'config'))
diff --git a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
index 74dbec063e0..625f16824b4 100644
--- a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
+++ b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
@@ -72,7 +72,7 @@ RSpec.shared_examples 'GET access tokens are paginated and ordered' do
first_token = assigns(:active_access_tokens).first.as_json
expect(first_token['name']).to eq("Token1")
- expect(first_token['expires_at']).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+ expect(first_token['expires_at']).to eq(expires_1_day_from_now.iso8601)
end
it "orders tokens on id in case token has same expires_at" do
@@ -82,11 +82,11 @@ RSpec.shared_examples 'GET access tokens are paginated and ordered' do
first_token = assigns(:active_access_tokens).first.as_json
expect(first_token['name']).to eq("Token3")
- expect(first_token['expires_at']).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+ expect(first_token['expires_at']).to eq(expires_1_day_from_now.iso8601)
second_token = assigns(:active_access_tokens).second.as_json
expect(second_token['name']).to eq("Token1")
- expect(second_token['expires_at']).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+ expect(second_token['expires_at']).to eq(expires_1_day_from_now.iso8601)
end
end
diff --git a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
index 6eceb7c350d..04f340fef37 100644
--- a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
@@ -518,6 +518,39 @@ RSpec.shared_examples 'graphql issue list request spec' do
end
end
+ context 'when fetching external participants' do
+ before_all do
+ issue_a.update!(external_author: 'user@example.com')
+ end
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ id
+ externalAuthor
+ }
+ QUERY
+ end
+
+ it 'returns the email address' do
+ post_query
+
+ emails = issues_data.pluck('externalAuthor').compact
+ expect(emails).to contain_exactly('user@example.com')
+ end
+
+ context 'when user does not have access to view emails' do
+ let(:current_user) { external_user }
+
+ it 'obfuscates the email address' do
+ post_query
+
+ emails = issues_data.pluck('externalAuthor').compact
+ expect(emails).to contain_exactly("us*****@e*****.c**")
+ end
+ end
+ end
+
context 'when fetching escalation status' do
let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, issue: issue_a) }
let_it_be(:incident_type) { WorkItems::Type.default_by_type(:incident) }
diff --git a/spec/support/shared_examples/requests/api/graphql/work_item_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/work_item_list_shared_examples.rb
new file mode 100644
index 00000000000..a9c422c8f2d
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/work_item_list_shared_examples.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'graphql work item list request spec' do
+ let(:work_item_ids) { graphql_dig_at(work_item_data, :id) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_query
+ end
+ end
+
+ describe 'filters' do
+ before do
+ post_query
+ end
+
+ context 'when filtering by author username' do
+ let(:author) { create(:author) }
+ let(:authored_work_item) { create(:work_item, author: author, **container_build_params) }
+
+ let(:item_filter_params) { { author_username: authored_work_item.author.username } }
+
+ it 'returns correct results' do
+ expect(work_item_ids).to contain_exactly(authored_work_item.to_global_id.to_s)
+ end
+ end
+
+ context 'when filtering by state' do
+ let_it_be(:opened_work_item) { create(:work_item, :opened, **container_build_params) }
+ let_it_be(:closed_work_item) { create(:work_item, :closed, **container_build_params) }
+
+ context 'when filtering by state opened' do
+ let(:item_filter_params) { { state: :opened } }
+
+ it 'filters by state' do
+ expect(work_item_ids).to include(opened_work_item.to_global_id.to_s)
+ expect(work_item_ids).not_to include(closed_work_item.to_global_id.to_s)
+ end
+ end
+
+ context 'when filtering by state closed' do
+ let(:item_filter_params) { { state: :closed } }
+
+ it 'filters by state' do
+ expect(work_item_ids).not_to include(opened_work_item.to_global_id.to_s)
+ expect(work_item_ids).to include(closed_work_item.to_global_id.to_s)
+ end
+ end
+ end
+
+ context 'when filtering by type' do
+ let_it_be(:issue_work_item) { create(:work_item, :issue, **container_build_params) }
+ let_it_be(:task_work_item) { create(:work_item, :task, **container_build_params) }
+
+ context 'when filtering by issue type' do
+ let(:item_filter_params) { { types: [:ISSUE] } }
+
+ it 'filters by type' do
+ expect(work_item_ids).to include(issue_work_item.to_global_id.to_s)
+ expect(work_item_ids).not_to include(task_work_item.to_global_id.to_s)
+ end
+ end
+
+ context 'when filtering by task type' do
+ let(:item_filter_params) { { types: [:TASK] } }
+
+ it 'filters by type' do
+ expect(work_item_ids).not_to include(issue_work_item.to_global_id.to_s)
+ expect(work_item_ids).to include(task_work_item.to_global_id.to_s)
+ end
+ end
+ end
+
+ context 'when filtering by iid' do
+ let_it_be(:work_item_by_iid) { create(:work_item, **container_build_params) }
+
+ context 'when using the iid filter' do
+ let(:item_filter_params) { { iid: work_item_by_iid.iid.to_s } }
+
+ it 'returns only items by the given iid' do
+ expect(work_item_ids).to contain_exactly(work_item_by_iid.to_global_id.to_s)
+ end
+ end
+
+ context 'when using the iids filter' do
+ let(:item_filter_params) { { iids: [work_item_by_iid.iid.to_s] } }
+
+ it 'returns only items by the given iid' do
+ expect(work_item_ids).to contain_exactly(work_item_by_iid.to_global_id.to_s)
+ end
+ end
+ end
+ end
+
+ def work_item_data
+ graphql_data.dig(*work_item_node_path)
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
index f2c38d70508..00e50b07909 100644
--- a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
@@ -8,12 +8,25 @@ RSpec.shared_examples 'MLflow|Not Found - Resource Does Not Exist' do
end
end
-RSpec.shared_examples 'MLflow|Requires api scope' do
+RSpec.shared_examples 'MLflow|Requires api scope and write permission' do
context 'when user has access but token has wrong scope' do
let(:access_token) { tokens[:read] }
it { is_expected.to have_gitlab_http_status(:forbidden) }
end
+
+ context 'when user has access but is not allowed to write' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :write_model_experiments, project)
+ .and_return(false)
+ end
+
+ it "is Unauthorized" do
+ is_expected.to have_gitlab_http_status(:unauthorized)
+ end
+ end
end
RSpec.shared_examples 'MLflow|Requires read_api scope' do
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index 6b6bf375827..5f043cdd996 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -280,7 +280,10 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
end
end
- status = :not_found if scope == :group && params[:package_name_type] == :non_existing && !params[:request_forward]
+ if (scope == :group && params[:package_name_type] == :non_existing) &&
+ (!params[:request_forward] || (!params[:auth] && params[:request_forward] && params[:visibility] != :public))
+ status = :not_found
+ end
# Check the error message for :not_found
example_name = 'returning response status with error' if status == :not_found
@@ -873,3 +876,67 @@ RSpec.shared_examples 'rejects invalid package names' do
expect(Gitlab::Json.parse(response.body)).to eq({ 'error' => 'package_name should be a valid file path' })
end
end
+
+RSpec.shared_examples 'handling get metadata requests for packages in multiple projects' do
+ let_it_be(:project2) { create(:project, namespace: namespace) }
+ let_it_be(:package2) do
+ create(:npm_package,
+ project: project2,
+ name: "@#{group.path}/scoped_package",
+ version: '1.2.0')
+ end
+
+ let(:headers) { build_token_auth_header(personal_access_token.token) }
+
+ subject { get(url, headers: headers) }
+
+ before_all do
+ project.update!(visibility: 'private')
+
+ group.add_guest(user)
+ project.add_reporter(user)
+ project2.add_reporter(user)
+ end
+
+ it 'includes all matching package versions in the response' do
+ subject
+
+ expect(json_response['versions'].keys).to match_array([package.version, package2.version])
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(npm_allow_packages_in_multiple_projects: false)
+ end
+
+ it 'returns matching package versions from only one project' do
+ subject
+
+ expect(json_response['versions'].keys).to match_array([package2.version])
+ end
+ end
+
+ context 'with limited access to the project with the last package version' do
+ before_all do
+ project2.add_guest(user)
+ end
+
+ it 'includes matching package versions from authorized projects in the response' do
+ subject
+
+ expect(json_response['versions'].keys).to contain_exactly(package.version)
+ end
+ end
+
+ context 'with limited access to the project with the first package version' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'includes matching package versions from authorized projects in the response' do
+ subject
+
+ expect(json_response['versions'].keys).to contain_exactly(package2.version)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 2e66bae26ba..1be99040ae5 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -373,14 +373,6 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
end
it_behaves_like 'bumping the package last downloaded at field'
-
- context 'when nuget_normalized_version feature flag is disabled' do
- before do
- stub_feature_flags(nuget_normalized_version: false)
- end
-
- it_behaves_like 'returning response status', :not_found
- end
end
end
end
@@ -710,4 +702,38 @@ RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
it_behaves_like 'returning response status', :forbidden
end
+
+ context 'when package duplicates are not allowed' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_headers) }
+ let_it_be(:existing_package) { create(:nuget_package, project: project) }
+ let_it_be(:metadata) { { package_name: existing_package.name, package_version: existing_package.version } }
+ let_it_be(:package_settings) do
+ create(:namespace_package_setting, :group, namespace: project.namespace, nuget_duplicates_allowed: false)
+ end
+
+ before do
+ allow_next_instance_of(::Packages::Nuget::MetadataExtractionService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.success(payload: metadata))
+ end
+ end
+
+ it_behaves_like 'returning response status', :conflict unless symbol_package
+ it_behaves_like 'returning response status', :created if symbol_package
+
+ context 'when exception_regex is set' do
+ before do
+ package_settings.update_column(:nuget_duplicate_exception_regex, ".*#{existing_package.name.last(3)}.*")
+ end
+
+ it_behaves_like 'returning response status', :created
+ end
+
+ context 'when nuget_duplicates_option feature flag is disabled' do
+ before do
+ stub_feature_flags(nuget_duplicates_option: false)
+ end
+
+ it_behaves_like 'returning response status', :created
+ end
+ end
end
diff --git a/spec/support/shared_examples/requests/api_keyset_pagination_shared_examples.rb b/spec/support/shared_examples/requests/api_keyset_pagination_shared_examples.rb
new file mode 100644
index 00000000000..85db36013dd
--- /dev/null
+++ b/spec/support/shared_examples/requests/api_keyset_pagination_shared_examples.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'an endpoint with keyset pagination' do |invalid_order: 'name', invalid_sort: 'asc'|
+ include KeysetPaginationHelpers
+
+ let(:keyset_params) { { pagination: 'keyset', per_page: 1 } }
+ let(:additional_params) { {} }
+
+ subject do
+ get api_call, params: keyset_params.merge(additional_params)
+ response
+ end
+
+ context 'on making requests with supported ordering structure' do
+ it 'includes keyset url params in the url response' do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to include_keyset_url_params
+ end
+
+ it 'does not include pagination headers' do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.not_to include_pagination_headers
+ end
+
+ it 'paginates the records correctly', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ records = json_response
+ expect(records.size).to eq(1)
+ expect(records.first['id']).to eq(first_record.id)
+
+ get api_call, params: pagination_params_from_next_url(response)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ records = Gitlab::Json.parse(response.body)
+ expect(records.size).to eq(1)
+ expect(records.first['id']).to eq(second_record.id)
+ end
+ end
+
+ context 'on making requests with unsupported ordering structure' do
+ let(:additional_params) { { order_by: invalid_order, sort: invalid_sort } }
+
+ if invalid_order
+ it 'returns error', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:method_not_allowed)
+ expect(json_response['error']).to eq('Keyset pagination is not yet available for this type of request')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index dafa324b3c6..48d3e438322 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -148,6 +148,12 @@ RSpec.shared_examples 'rate-limited token requests' do
expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
+ matched = throttle_types[throttle_setting_prefix]
+
+ if request_method == 'GET' && throttle_setting_prefix == 'throttle_protected_paths'
+ matched = 'throttle_authenticated_get_protected_paths_api'
+ end
+
arguments = a_hash_including({
message: 'Rack_Attack',
status: 429,
@@ -155,7 +161,7 @@ RSpec.shared_examples 'rate-limited token requests' do
remote_ip: '127.0.0.1',
request_method: request_method,
path: request_args.first,
- matched: throttle_types[throttle_setting_prefix]
+ matched: matched
}.merge(log_data))
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
@@ -166,7 +172,14 @@ RSpec.shared_examples 'rate-limited token requests' do
end
it_behaves_like 'tracking when dry-run mode is set' do
- let(:throttle_name) { throttle_types[throttle_setting_prefix] }
+ let(:throttle_name) do
+ name = throttle_types[throttle_setting_prefix]
+ if request_method == 'GET' && throttle_setting_prefix == 'throttle_protected_paths'
+ name = 'throttle_authenticated_get_protected_paths_api'
+ end
+
+ name
+ end
def do_request
make_request(request_args)
@@ -315,7 +328,13 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
expect(response).not_to have_gitlab_http_status(:too_many_requests)
end
- arguments = a_hash_including({
+ matched = throttle_types[throttle_setting_prefix]
+
+ if request_method == 'GET' && throttle_setting_prefix == 'throttle_protected_paths'
+ matched = 'throttle_authenticated_get_protected_paths_web'
+ end
+
+ arguments = a_hash_including(
message: 'Rack_Attack',
status: 429,
env: :throttle,
@@ -324,15 +343,22 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
path: url_that_requires_authentication,
user_id: user.id,
'meta.user' => user.username,
- matched: throttle_types[throttle_setting_prefix]
- })
+ matched: matched
+ )
expect(Gitlab::AuthLogger).to receive(:error).with(arguments).once
expect { request_authenticated_web_url }.not_to exceed_query_limit(control_count)
end
it_behaves_like 'tracking when dry-run mode is set' do
- let(:throttle_name) { throttle_types[throttle_setting_prefix] }
+ let(:throttle_name) do
+ name = throttle_types[throttle_setting_prefix]
+ if request_method == 'GET' && throttle_setting_prefix == 'throttle_protected_paths'
+ name = 'throttle_authenticated_get_protected_paths_web'
+ end
+
+ name
+ end
def do_request
request_authenticated_web_url
diff --git a/spec/support/shared_examples/services/incident_shared_examples.rb b/spec/support/shared_examples/services/incident_shared_examples.rb
index db2b448f567..94467ad53fa 100644
--- a/spec/support/shared_examples/services/incident_shared_examples.rb
+++ b/spec/support/shared_examples/services/incident_shared_examples.rb
@@ -40,7 +40,7 @@ end
RSpec.shared_examples 'incident management label service' do
let_it_be(:project) { create(:project, :private) }
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let(:service) { described_class.new(project, user) }
subject(:execute) { service.execute }
diff --git a/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
index 3f95d6060ea..9624f7a4450 100644
--- a/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb
@@ -145,6 +145,77 @@ RSpec.shared_examples 'updating issuable labels' do
end
end
+RSpec.shared_examples 'updating merged MR with locked labels' do
+ context 'when add_label_ids and label_ids are passed' do
+ let(:params) { { label_ids: [label_a.id], add_label_ids: [label_c.id] } }
+
+ it 'replaces unlocked labels with the ones in label_ids and adds those in add_label_ids' do
+ issuable.update!(labels: [label_b, label_unlocked])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_a.id, label_b.id, label_c.id)
+ end
+ end
+
+ context 'when remove_label_ids and label_ids are passed' do
+ let(:params) { { label_ids: [label_a.id, label_b.id, label_c.id], remove_label_ids: [label_a.id] } }
+
+ it 'replaces unlocked labels with the ones in label_ids and does not remove locked label in remove_label_ids' do
+ issuable.update!(labels: [label_a, label_c, label_unlocked])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_a.id, label_b.id, label_c.id)
+ end
+ end
+
+ context 'when add_label_ids and remove_label_ids are passed' do
+ let(:params) { { add_label_ids: [label_c.id], remove_label_ids: [label_a.id, label_unlocked.id] } }
+
+ before do
+ issuable.update!(labels: [label_a, label_unlocked])
+ update_issuable(params)
+ end
+
+ it 'adds the passed labels' do
+ expect(issuable.label_ids).to include(label_c.id)
+ end
+
+ it 'removes the passed unlocked labels' do
+ expect(issuable.label_ids).to include(label_a.id)
+ expect(issuable.label_ids).not_to include(label_unlocked.id)
+ end
+ end
+
+ context 'when same id is passed as add_label_ids and remove_label_ids' do
+ let(:params) { { add_label_ids: [label_a.id], remove_label_ids: [label_a.id] } }
+
+ context 'for a label assigned to an issue' do
+ it 'does not remove the label' do
+ issuable.update!(labels: [label_a])
+ update_issuable(params)
+
+ expect(issuable.label_ids).to contain_exactly(label_a.id)
+ end
+ end
+
+ context 'for a label not assigned to an issue' do
+ it 'does not add the label' do
+ expect(issuable.label_ids).to be_empty
+ end
+ end
+ end
+
+ context 'when duplicate label titles are given' do
+ let(:params) { { labels: [label_c.title, label_c.title] } }
+
+ it 'assigns the label once' do
+ update_issuable(params)
+
+ expect(issuable.labels).to contain_exactly(label_c)
+ end
+ end
+end
+
RSpec.shared_examples 'keeps issuable labels sorted after update' do
before do
update_issuable(label_ids: [label_b.id])
diff --git a/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
index 1532e870dcc..b955b71a6bb 100644
--- a/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
@@ -1,13 +1,7 @@
# frozen_string_literal: true
-RSpec.shared_examples 'a destroyable issuable link' do |required_role: :reporter|
+RSpec.shared_examples 'a destroyable issuable link' do
context 'when successfully removes an issuable link' do
- before do
- [issuable_link.target, issuable_link.source].each do |issuable|
- issuable.resource_parent.try(:"add_#{required_role}", user)
- end
- end
-
it 'removes related issue' do
expect { subject }.to change { issuable_link.class.count }.by(-1)
end
@@ -28,6 +22,9 @@ RSpec.shared_examples 'a destroyable issuable link' do |required_role: :reporter
end
context 'when failing to remove an issuable link' do
+ let_it_be(:non_member) { create(:user) }
+ let(:user) { non_member }
+
it 'does not remove relation' do
expect { subject }.not_to change { issuable_link.class.count }.from(1)
end
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
deleted file mode 100644
index 9b2e038a331..00000000000
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ /dev/null
@@ -1,193 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'misconfigured dashboard service response' do |status_code, message = nil|
- it 'returns an appropriate message and status code', :aggregate_failures do
- result = service_call
-
- expect(result.keys).to contain_exactly(:message, :http_status, :status)
- expect(result[:status]).to eq(:error)
- expect(result[:http_status]).to eq(status_code)
- expect(result[:message]).to eq(message) if message
- end
-end
-
-RSpec.shared_examples 'valid dashboard service response for schema' do
- it 'returns a json representation of the dashboard' do
- result = service_call
-
- expect(result.keys).to contain_exactly(:dashboard, :status)
- expect(result[:status]).to eq(:success)
-
- schema_path = Rails.root.join('spec/fixtures', dashboard_schema)
- validator = JSONSchemer.schema(schema_path)
- expect(validator.valid?(result[:dashboard].with_indifferent_access)).to be true
- end
-end
-
-RSpec.shared_examples 'valid dashboard service response' do
- let(:dashboard_schema) { 'lib/gitlab/metrics/dashboard/schemas/dashboard.json' }
-
- it_behaves_like 'valid dashboard service response for schema'
-end
-
-RSpec.shared_examples 'caches the unprocessed dashboard for subsequent calls' do
- specify do
- expect_next_instance_of(::Gitlab::Config::Loader::Yaml) do |loader|
- expect(loader).to receive(:load_raw!).once.and_call_original
- end
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
-end
-
-# This spec is applicable for predefined/out-of-the-box dashboard services.
-RSpec.shared_examples 'refreshes cache when dashboard_version is changed' do
- specify do
- allow_next_instance_of(described_class) do |service|
- allow(service).to receive(:dashboard_version).and_return('1', '2')
- end
-
- expect_file_read(Rails.root.join(described_class::DASHBOARD_PATH)).twice.and_call_original
-
- service = described_class.new(*service_params)
-
- service.get_dashboard
- service.get_dashboard
- end
-end
-
-# This spec is applicable for predefined/out-of-the-box dashboard services.
-# This shared_example requires the following variables to be defined:
-# dashboard_path: Relative path to the dashboard, ex: 'config/prometheus/common_metrics.yml'
-# dashboard_version: The version string used in the cache_key.
-RSpec.shared_examples 'dashboard_version contains SHA256 hash of dashboard file content' do
- specify do
- dashboard = File.read(Rails.root.join(dashboard_path))
- expect(dashboard_version).to eq(Digest::SHA256.hexdigest(dashboard))
- end
-end
-
-RSpec.shared_examples 'valid embedded dashboard service response' do
- let(:dashboard_schema) { 'lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json' }
-
- it_behaves_like 'valid dashboard service response for schema'
-end
-
-RSpec.shared_examples 'raises error for users with insufficient permissions' do
- context 'when the user does not have sufficient access' do
- let(:user) { build(:user) }
-
- it_behaves_like 'misconfigured dashboard service response', :unauthorized
- end
-
- context 'when the user is anonymous' do
- let(:user) { nil }
-
- it_behaves_like 'misconfigured dashboard service response', :unauthorized
- end
-end
-
-RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template, sequence|
- context "dashboard template: #{dashboard_template}" do
- let(:dashboard) { dashboard_template }
- let(:dashboard_attrs) do
- {
- commit_message: commit_message,
- branch_name: branch,
- start_branch: project.default_branch,
- encoding: 'text',
- file_path: ".gitlab/dashboards/#{file_name}",
- file_content: file_content_hash.to_yaml
- }
- end
-
- it 'delegates commit creation to Files::CreateService', :aggregate_failures do
- service_instance = instance_double(::Files::CreateService)
- allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
- expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
- expect(service_instance).to receive(:execute).and_return(status: :success)
-
- service_call
- end
-
- context 'user has defined custom metrics' do
- it 'uses external service to includes them into new file content', :aggregate_failures do
- service_instance = double(::Gitlab::Metrics::Dashboard::Processor)
- expect(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).with(project, file_content_hash, sequence, {}).and_return(service_instance)
- expect(service_instance).to receive(:process).and_return(file_content_hash)
- expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(double(execute: { status: :success }))
-
- service_call
- end
- end
- end
-end
-
-RSpec.shared_examples 'misconfigured dashboard service response with stepable' do |status_code, message = nil|
- it 'returns an appropriate message and status code', :aggregate_failures do
- result = service_call
-
- expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
- expect(result[:status]).to eq(:error)
- expect(result[:http_status]).to eq(status_code)
- expect(result[:message]).to eq(message) if message
- end
-end
-
-RSpec.shared_examples 'updates gitlab_metrics_dashboard_processing_time_ms metric' do
- specify :prometheus do
- service_call
- metric = subject.send(:processing_time_metric)
- labels = subject.send(:processing_time_metric_labels)
-
- expect(metric.get(labels)).to be > 0
- end
-end
-
-RSpec.shared_examples '#raw_dashboard raises error if dashboard loading fails' do
- context 'when yaml is too large' do
- before do
- allow_next_instance_of(::Gitlab::Config::Loader::Yaml) do |loader|
- allow(loader).to receive(:load_raw!)
- .and_raise(Gitlab::Config::Loader::Yaml::DataTooLargeError, 'The parsed YAML is too big')
- end
- end
-
- it 'raises error' do
- expect { subject.raw_dashboard }.to raise_error(
- Gitlab::Metrics::Dashboard::Errors::LayoutError,
- 'The parsed YAML is too big'
- )
- end
- end
-
- context 'when yaml loader returns error' do
- before do
- allow_next_instance_of(::Gitlab::Config::Loader::Yaml) do |loader|
- allow(loader).to receive(:load_raw!)
- .and_raise(Gitlab::Config::Loader::FormatError, 'Invalid configuration format')
- end
- end
-
- it 'raises error' do
- expect { subject.raw_dashboard }.to raise_error(
- Gitlab::Metrics::Dashboard::Errors::LayoutError,
- 'Invalid yaml'
- )
- end
- end
-
- context 'when yaml is not a hash' do
- before do
- allow_next_instance_of(::Gitlab::Config::Loader::Yaml) do |loader|
- allow(loader).to receive(:load_raw!)
- .and_raise(Gitlab::Config::Loader::Yaml::NotHashError, 'Invalid configuration format')
- end
- end
-
- it 'returns nil' do
- expect(subject.raw_dashboard).to eq({})
- end
- end
-end
diff --git a/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb
index e77d73d1c72..621fb99afe5 100644
--- a/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb
@@ -38,7 +38,7 @@ RSpec.shared_examples "migrating a deleted user's associated records to the ghos
migrated_record = record_class.find_by_id(record.id)
migrated_fields.each do |field|
- expect(migrated_record.public_send(field)).to eq(User.ghost)
+ expect(migrated_record.public_send(field)).to eq(Users::Internal.ghost)
end
end
@@ -47,7 +47,7 @@ RSpec.shared_examples "migrating a deleted user's associated records to the ghos
migrated_record = record_class.find_by_id(record.id)
- check_user = always_ghost ? User.ghost : user
+ check_user = always_ghost ? Users::Internal.ghost : user
migrated_fields.each do |field|
expect(migrated_record.public_send(field)).to eq(check_user)
diff --git a/spec/support/shared_examples/services/pages_size_limit_shared_examples.rb b/spec/support/shared_examples/services/pages_size_limit_shared_examples.rb
deleted file mode 100644
index d9e906ebb75..00000000000
--- a/spec/support/shared_examples/services/pages_size_limit_shared_examples.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'pages size limit is' do |size_limit|
- context "when size is below the limit" do
- before do
- allow(metadata).to receive(:total_size).and_return(size_limit - 1.megabyte)
- allow(metadata).to receive(:entries).and_return([])
- end
-
- it 'updates pages correctly' do
- subject.execute
-
- expect(deploy_status.description).not_to be_present
- expect(project.pages_metadatum).to be_deployed
- end
- end
-
- context "when size is above the limit" do
- before do
- allow(metadata).to receive(:total_size).and_return(size_limit + 1.megabyte)
- allow(metadata).to receive(:entries).and_return([])
- end
-
- it 'limits the maximum size of gitlab pages' do
- subject.execute
-
- expect(deploy_status.description)
- .to match(/artifacts for pages are too large/)
- expect(deploy_status).to be_script_failure
- end
- end
-end
diff --git a/spec/support/shared_examples/services/protected_branches_shared_examples.rb b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
new file mode 100644
index 00000000000..ce607a6b956
--- /dev/null
+++ b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with scan result policy blocking protected branches' do
+ before do
+ create(
+ :scan_result_policy_read,
+ :blocking_protected_branches,
+ project: project)
+
+ stub_licensed_features(security_orchestration_policies: true)
+ end
+end
diff --git a/spec/support/shared_examples/services/users/build_service_shared_examples.rb b/spec/support/shared_examples/services/users/build_service_shared_examples.rb
index e448f2f874b..45ebc837e6d 100644
--- a/spec/support/shared_examples/services/users/build_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/users/build_service_shared_examples.rb
@@ -33,57 +33,6 @@ RSpec.shared_examples 'common user build items' do
end
RSpec.shared_examples_for 'current user not admin build items' do
- using RSpec::Parameterized::TableSyntax
-
- context 'with "user_default_external" application setting' do
- where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
- true | nil | 'fl@example.com' | nil | true
- true | true | 'fl@example.com' | nil | true
- true | false | 'fl@example.com' | nil | true # admin difference
-
- true | nil | 'fl@example.com' | '' | true
- true | true | 'fl@example.com' | '' | true
- true | false | 'fl@example.com' | '' | true # admin difference
-
- true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
-
- true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
-
- false | nil | 'fl@example.com' | nil | false
- false | true | 'fl@example.com' | nil | false # admin difference
- false | false | 'fl@example.com' | nil | false
-
- false | nil | 'fl@example.com' | '' | false
- false | true | 'fl@example.com' | '' | false # admin difference
- false | false | 'fl@example.com' | '' | false
-
- false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
-
- false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- end
-
- with_them do
- before do
- stub_application_setting(user_default_external: user_default_external)
- stub_application_setting(user_default_internal_regex: user_default_internal_regex)
-
- params.merge!({ external: external, email: email }.compact)
- end
-
- it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
- expect(user.external).to eq(result)
- end
- end
- end
-
context 'when "email_confirmation_setting" application setting is set to `hard`' do
before do
stub_application_setting_enum('email_confirmation_setting', 'hard')
diff --git a/spec/support/shared_examples/users/migrate_records_to_ghost_user_service_shared_examples.rb b/spec/support/shared_examples/users/migrate_records_to_ghost_user_service_shared_examples.rb
index eb03f0888b9..5d24aa10453 100644
--- a/spec/support/shared_examples/users/migrate_records_to_ghost_user_service_shared_examples.rb
+++ b/spec/support/shared_examples/users/migrate_records_to_ghost_user_service_shared_examples.rb
@@ -32,7 +32,7 @@ RSpec.shared_examples 'migrating records to the ghost user' do |record_class, fi
migrated_record = record_class.find_by_id(record.id)
migrated_fields.each do |field|
- expect(migrated_record.public_send(field)).to eq(User.ghost)
+ expect(migrated_record.public_send(field)).to eq(Users::Internal.ghost)
end
end
end
diff --git a/spec/support/shared_examples/users/pages_visits_shared_examples.rb b/spec/support/shared_examples/users/pages_visits_shared_examples.rb
new file mode 100644
index 00000000000..1a613fa0aed
--- /dev/null
+++ b/spec/support/shared_examples/users/pages_visits_shared_examples.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'namespace visits tracking worker' do
+ let_it_be(:base_time) { DateTime.now }
+
+ context 'when params are provided' do
+ before do
+ worker.perform(entity_type, entity.id, user.id, base_time)
+ end
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { [entity_type, entity.id, user.id, base_time] }
+
+ it 'tracks the entity visit' do
+ latest_visit = model.last
+
+ expect(model.count).to be(1)
+ expect(latest_visit[:entity_id]).to be(entity.id)
+ expect(latest_visit.user_id).to be(user.id)
+ end
+ end
+
+ context 'when a visit occurs within 15 minutes of a previously tracked one' do
+ [-15.minutes, 15.minutes].each do |time_diff|
+ it 'does not track the visit' do
+ worker.perform(entity_type, entity.id, user.id, base_time + time_diff)
+
+ expect(model.count).to be(1)
+ end
+ end
+ end
+
+ context 'when a visit occurs more than 15 minutes away from a previously tracked one' do
+ [-16.minutes, 16.minutes].each do |time_diff|
+ it 'tracks the visit' do
+ worker.perform(entity_type, entity.id, user.id, base_time + time_diff)
+
+ expect(model.count).to be > 1
+ end
+ end
+ end
+ end
+
+ context 'when user is missing' do
+ before do
+ worker.perform(entity_type, entity.id, nil, base_time)
+ end
+
+ it 'does not do anything' do
+ expect(model.count).to be(0)
+ end
+ end
+
+ context 'when entity is missing' do
+ before do
+ worker.perform(entity_type, nil, user.id, base_time)
+ end
+
+ it 'does not do anything' do
+ expect(model.count).to be(0)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
index 8ecb04bfdd6..1ea5eb6fd2e 100644
--- a/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
RSpec.shared_examples 'it runs background migration jobs' do |tracking_database|
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
describe 'defining the job attributes' do
it 'defines the data_consistency as always' do
expect(described_class.get_data_consistency).to eq(:always)
@@ -74,6 +78,38 @@ RSpec.shared_examples 'it runs background migration jobs' do |tracking_database|
end
end
+ context 'when disallow_database_ddl_feature_flags feature flag is disabled' do
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+ end
+
+ it 'does not perform the job, reschedules it in the future, and logs a message' do
+ expect(worker).not_to receive(:perform_with_connection)
+
+ expect(Sidekiq.logger).to receive(:info) do |payload|
+ expect(payload[:class]).to eq(described_class.name)
+ expect(payload[:database]).to eq(tracking_database)
+ expect(payload[:message]).to match(/skipping execution, migration rescheduled/)
+ end
+
+ lease_attempts = 3
+ delay = described_class::BACKGROUND_MIGRATIONS_DELAY
+ job_args = [10, 20]
+
+ freeze_time do
+ worker.perform('Foo', job_args, lease_attempts)
+
+ job = described_class.jobs.find { |job| job['args'] == ['Foo', job_args, lease_attempts] }
+ expect(job).to be, "Expected the job to be rescheduled with (#{job_args}, #{lease_attempts}), but it was not."
+
+ expected_time = delay.to_i + Time.now.to_i
+ expect(job['at']).to eq(expected_time),
+ "Expected the job to be rescheduled in #{expected_time} seconds, " \
+ "but it was rescheduled in #{job['at']} seconds."
+ end
+ end
+ end
+
context 'when execute_background_migrations feature flag is enabled' do
before do
stub_feature_flags(execute_background_migrations: true)
diff --git a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
index 8fdd59d1d8c..cf488a4d753 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
@@ -3,6 +3,10 @@
RSpec.shared_examples 'batched background migrations execution worker' do
include ExclusiveLeaseHelpers
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
it 'is a limited capacity worker' do
expect(described_class.new).to be_a(LimitedCapacity::Worker)
end
@@ -100,6 +104,23 @@ RSpec.shared_examples 'batched background migrations execution worker' do
end
end
+ context 'when disable ddl flag is enabled' do
+ let(:migration) do
+ create(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
+ end
+
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+ end
+
+ it 'does nothing' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).not_to receive(:find_executable)
+ expect(worker).not_to receive(:run_migration_job)
+
+ worker.perform_work(database_name, migration.id)
+ end
+ end
+
context 'when the feature flag is enabled' do
before do
stub_feature_flags(execute_batched_migrations_on_schedule: true)
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index e7385f9abb6..003b8d07819 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -3,6 +3,10 @@
RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database, table_name|
include ExclusiveLeaseHelpers
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
describe 'defining the job attributes' do
it 'defines the data_consistency as always' do
expect(described_class.get_data_consistency).to eq(:always)
@@ -51,6 +55,12 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
expect(described_class.enabled?).to be_falsey
end
+
+ it 'returns false when disallow_database_ddl_feature_flags feature flag is enabled' do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+
+ expect(described_class.enabled?).to be_falsey
+ end
end
describe '#perform' do
@@ -116,6 +126,18 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
+ context 'when the disallow_database_ddl_feature_flags feature flag is enabled' do
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+ end
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:queue_migrations_for_execution)
+
+ worker.perform
+ end
+ end
+
context 'when the execute_batched_migrations_on_schedule feature flag is enabled' do
let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
let(:connection) { base_model.connection }
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index dc475b92c0b..b25f39c5e74 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -5,9 +5,11 @@ RSpec.configure do |config|
# We need to cleanup the queues before running jobs in specs because the
# middleware might have written to redis
redis_queues_cleanup!
+ redis_queues_metadata_cleanup!
Sidekiq::Testing.inline!(&block)
ensure
redis_queues_cleanup!
+ redis_queues_metadata_cleanup!
end
# As we'll review the examples with this tag, we should either:
diff --git a/spec/support_specs/capybara_wait_for_all_requests_spec.rb b/spec/support_specs/capybara_wait_for_all_requests_spec.rb
index ddd4be6c644..64b89ba0b56 100644
--- a/spec/support_specs/capybara_wait_for_all_requests_spec.rb
+++ b/spec/support_specs/capybara_wait_for_all_requests_spec.rb
@@ -23,37 +23,37 @@ RSpec.describe 'capybara_wait_for_all_requests', feature_category: :tooling do #
end
end
- context 'for Capybara::Node::Actions::WaitForAllRequestsAfterClickButton' do
+ context 'for Capybara::Node::Actions::WaitForRequestsAfterClickButton' do
let(:node) do
Class.new do
def click_button(locator = nil, **_options)
locator
end
- prepend Capybara::Node::Actions::WaitForAllRequestsAfterClickButton
+ prepend Capybara::Node::Actions::WaitForRequestsAfterClickButton
end.new
end
- it 'waits for all requests after a click button' do
- expect(node).to receive(:wait_for_all_requests)
+ it 'waits for requests after a click button' do
+ expect(node).to receive(:wait_for_requests)
node.click_button
end
end
- context 'for Capybara::Node::Actions::WaitForAllRequestsAfterClickLink' do
+ context 'for Capybara::Node::Actions::WaitForRequestsAfterClickLink' do
let(:node) do
Class.new do
def click_link(locator = nil, **_options)
locator
end
- prepend Capybara::Node::Actions::WaitForAllRequestsAfterClickLink
+ prepend Capybara::Node::Actions::WaitForRequestsAfterClickLink
end.new
end
- it 'waits for all requests after a click link' do
- expect(node).to receive(:wait_for_all_requests)
+ it 'waits for requests after a click link' do
+ expect(node).to receive(:wait_for_requests)
node.click_link
end
diff --git a/spec/support_specs/database/duplicate_indexes_spec.rb b/spec/support_specs/database/duplicate_indexes_spec.rb
new file mode 100644
index 00000000000..bbcb80f0593
--- /dev/null
+++ b/spec/support_specs/database/duplicate_indexes_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::DuplicateIndexes, feature_category: :database do
+ index_class = ActiveRecord::ConnectionAdapters::IndexDefinition
+ let(:default_index_options) { { using: :btree, orders: {}, unique: false, opclasses: {}, where: nil } }
+
+ let(:table_name) { 'foobar' }
+ let(:index1) { instance_double(index_class, default_index_options.merge(name: 'index1', columns: %w[user_id])) }
+ let(:index1_copy) { instance_double(index_class, default_index_options.merge(name: 'index1b', columns: %w[user_id])) }
+ let(:index2) { instance_double(index_class, default_index_options.merge(name: 'index2', columns: %w[project_id])) }
+ let(:index3) do
+ instance_double(index_class, default_index_options.merge(name: 'index3', columns: %w[user_id project_id]))
+ end
+
+ let(:index3_inverse) do
+ instance_double(index_class, default_index_options.merge(name: 'index3_inverse', columns: %w[project_id user_id]))
+ end
+
+ let(:index1_unique) do
+ instance_double(index_class, default_index_options.merge(name: 'index1_unique', columns: %w[user_id], unique: true))
+ end
+
+ let(:index1_desc) do
+ instance_double(
+ index_class,
+ default_index_options.merge(name: 'index1', columns: %w[user_id], orders: { user_id: 'desc' })
+ )
+ end
+
+ let(:index3_with_where) do
+ instance_double(
+ index_class,
+ default_index_options.merge(name: 'index3_with_where', columns: %w[user_id project_id], where: "id > 100")
+ )
+ end
+
+ subject(:duplicate_indexes) do
+ described_class.new(table_name, indexes).duplicate_indexes
+ end
+
+ context 'when there are no duplicate indexes' do
+ let(:indexes) { [index1, index2] }
+
+ it { expect(duplicate_indexes).to be_empty }
+ end
+
+ context 'when overlapping indexes' do
+ let(:indexes) { [index1, index3] }
+
+ it 'detects a duplicate index between index1 and index3' do
+ expected_duplicate_indexes = { index_struct(index3) => [index_struct(index1)] }
+
+ expect(duplicate_indexes).to eq(expected_duplicate_indexes)
+ end
+ end
+
+ context 'when the indexes have the inverse order of columns' do
+ let(:indexes) { [index3, index3_inverse] }
+
+ it 'does not detect duplicate indexes between index3 and index3_inverse' do
+ expect(duplicate_indexes).to eq({})
+ end
+ end
+
+ # For now we ignore other indexes that are UNIQUE and have a matching columns subset of
+ # the btree_index columns, as UNIQUE indexes are still needed to enforce uniqueness
+ # constraints on subset of the columns.
+ context 'when the index with matching sub-columns is unique' do
+ let(:indexes) { [index3, index1_unique] }
+
+ it 'does not detect duplicate indexes between index3 and index1_unique' do
+ expect(duplicate_indexes).to eq({})
+ end
+ end
+
+ context 'when the one of the indexes is a conditional index' do
+ let(:indexes) { [index3, index3_with_where] }
+
+ it 'does not detect duplicate indexes between index3 and index3_with_where' do
+ expect(duplicate_indexes).to eq({})
+ end
+ end
+
+ context 'when identical indexes' do
+ let(:indexes) { [index1, index1_copy] }
+
+ it 'detects a duplicate index between index1 and index3' do
+ expected_duplicate_indexes = {
+ index_struct(index1) => [index_struct(index1_copy)],
+ index_struct(index1_copy) => [index_struct(index1)]
+ }
+
+ expect(duplicate_indexes).to eq(expected_duplicate_indexes)
+ end
+ end
+
+ context 'when indexes have the same columns but with different order' do
+ let(:indexes) { [index1, index1_desc] }
+
+ it { expect(duplicate_indexes).to be_empty }
+ end
+
+ def index_struct(index)
+ Database::DuplicateIndexes.btree_index_struct(index)
+ end
+end
diff --git a/spec/support_specs/database/multiple_databases_helpers_spec.rb b/spec/support_specs/database/multiple_databases_helpers_spec.rb
index 2577b64f214..0191a759605 100644
--- a/spec/support_specs/database/multiple_databases_helpers_spec.rb
+++ b/spec/support_specs/database/multiple_databases_helpers_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Database::MultipleDatabasesHelpers' do
let(:query) do
<<~SQL
- WITH cte AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (SELECT 1) SELECT 1;
+ WITH cte AS MATERIALIZED (SELECT 1) SELECT 1;
SQL
end
diff --git a/spec/support_specs/helpers/redis_commands/recorder_spec.rb b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
index f41624d8dcc..ef46db5e29e 100644
--- a/spec/support_specs/helpers/redis_commands/recorder_spec.rb
+++ b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
@@ -8,12 +8,6 @@ RSpec.describe RedisCommands::Recorder, :use_clean_rails_redis_caching do
let(:cache) { Rails.cache }
let(:pattern) { nil }
- before do
- # do not need to test for positive case since this is testing
- # a spec support class
- stub_feature_flags(use_primary_and_secondary_stores_for_cache: false)
- end
-
describe '#initialize' do
context 'with a block' do
it 'records Redis commands' do
diff --git a/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
index 5dd7599696b..b492289e99e 100644
--- a/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
+++ b/spec/tasks/gitlab/audit_event_types/check_docs_task_spec.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'rake_helper'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/check_docs_task'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
RSpec.describe Tasks::Gitlab::AuditEventTypes::CheckDocsTask, feature_category: :audit_events do
- let_it_be(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
- let_it_be(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
- let_it_be(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
+ let(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
+ let(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
+ let(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
subject(:check_docs_task) { described_class.new(docs_dir, docs_path, template_erb_path) }
@@ -37,7 +37,7 @@ RSpec.describe Tasks::Gitlab::AuditEventTypes::CheckDocsTask, feature_category:
end
context 'when an existing audit event type is removed' do
- let_it_be(:updated_definition) { Gitlab::Audit::Type::Definition.definitions.except(:feature_flag_created) }
+ let(:updated_definition) { Gitlab::Audit::Type::Definition.definitions.except(:feature_flag_created) }
it 'raises an error' do
expect(Gitlab::Audit::Type::Definition).to receive(:definitions).and_return(updated_definition)
@@ -50,7 +50,7 @@ RSpec.describe Tasks::Gitlab::AuditEventTypes::CheckDocsTask, feature_category:
end
context 'when an existing audit event type is updated' do
- let_it_be(:updated_definition) { Gitlab::Audit::Type::Definition.definitions }
+ let(:updated_definition) { Gitlab::Audit::Type::Definition.definitions }
it 'raises an error' do
updated_definition[:feature_flag_created].attributes[:streamed] = false
diff --git a/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
index a881d17d3b8..0ee85b1283b 100644
--- a/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
+++ b/spec/tasks/gitlab/audit_event_types/compile_docs_task_spec.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'rake_helper'
require_relative '../../../../lib/tasks/gitlab/audit_event_types/compile_docs_task'
RSpec.describe Tasks::Gitlab::AuditEventTypes::CompileDocsTask, feature_category: :audit_events do
- let_it_be(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
- let_it_be(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
- let_it_be(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
+ let(:docs_dir) { Rails.root.join("tmp/tests/doc/administration/audit_event_streaming") }
+ let(:docs_path) { Rails.root.join(docs_dir, 'audit_event_types.md') }
+ let(:template_erb_path) { Rails.root.join("tooling/audit_events/docs/templates/audit_event_types.md.erb") }
subject(:compile_docs_task) { described_class.new(docs_dir, docs_path, template_erb_path) }
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 04634af12a8..fda27d5827f 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
%w[db repositories]
end
- before(:all) do # rubocop:disable RSpec/BeforeAll
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -222,7 +222,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
- create(:project, :repository)
+ create(:project_with_design, :repository)
end
it 'removes stale data' do
@@ -241,7 +241,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
end
context 'when the backup is restored' do
- let!(:included_project) { create(:project, :repository) }
+ let!(:included_project) { create(:project_with_design, :repository) }
let!(:original_checksum) { included_project.repository.checksum }
before do
@@ -286,7 +286,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
allow(Ci::ApplicationRecord.connection).to receive(:reconnect!)
end
- let!(:project) { create(:project, :repository) }
+ let!(:project) { create(:project_with_design, :repository) }
context 'with specific backup tasks' do
before do
@@ -463,9 +463,9 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
shared_examples 'includes repositories in all repository storages' do
specify :aggregate_failures do
- project_a = create(:project, :repository)
+ project_a = create(:project_with_design, :repository)
project_snippet_a = create(:project_snippet, :repository, project: project_a, author: project_a.first_owner)
- project_b = create(:project, :repository, repository_storage: second_storage_name)
+ project_b = create(:project_with_design, :repository, repository_storage: second_storage_name)
project_snippet_b = create(
:project_snippet,
:repository,
@@ -474,7 +474,6 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
repository_storage: second_storage_name
)
create(:wiki_page, container: project_a)
- create(:design, :with_file, issue: create(:issue, project: project_a))
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
@@ -517,9 +516,9 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
end
it 'includes repositories in default repository storage', :aggregate_failures do
- project_a = create(:project, :repository)
+ project_a = create(:project_with_design, :repository)
project_snippet_a = create(:project_snippet, :repository, project: project_a, author: project_a.first_owner)
- project_b = create(:project, :repository, repository_storage: second_storage_name)
+ project_b = create(:project_with_design, :repository, repository_storage: second_storage_name)
project_snippet_b = create(
:project_snippet,
:repository,
@@ -528,7 +527,6 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
repository_storage: second_storage_name
)
create(:wiki_page, container: project_a)
- create(:design, :with_file, issue: create(:issue, project: project_a))
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
@@ -564,7 +562,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
- create(:project, :repository)
+ create(:project_with_design, :repository)
end
it 'passes through concurrency environment variables' do
@@ -602,7 +600,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
before do
stub_env('SKIP', 'an-unknown-type,repositories,uploads,anotherunknowntype')
- create(:project, :repository)
+ create(:project_with_design, :repository)
end
it "does not contain repositories and uploads" do
@@ -660,7 +658,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
before do
stub_env('SKIP', 'tar')
- create(:project, :repository)
+ create(:project_with_design, :repository)
end
it 'created files with backup content and no tar archive' do
diff --git a/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
index b3bd6be8fde..fc9aae3597e 100644
--- a/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
+++ b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
@@ -4,7 +4,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:ci_secure_files', factory_default: :keep, feature_category: :mobile_devops do
describe 'check' do
- let_it_be(:project) { create_default(:project).freeze }
+ let!(:project) { create_default(:project).freeze }
let!(:secure_file) { create(:ci_secure_file) }
before do
diff --git a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
index 37ae0d694eb..f3856969a6e 100644
--- a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
@@ -3,12 +3,12 @@
require 'rake_helper'
RSpec.describe 'gitlab:ci_secure_files', feature_category: :mobile_devops do
- let_it_be(:local_file) { create(:ci_secure_file) }
+ let!(:local_file) { create(:ci_secure_file) }
let(:logger) { instance_double(Logger) }
let(:helper) { double }
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/ci_secure_files/migrate'
end
diff --git a/spec/tasks/gitlab/container_registry_rake_spec.rb b/spec/tasks/gitlab/container_registry_rake_spec.rb
index f4bd8560cd0..d0c728bf36d 100644
--- a/spec/tasks/gitlab/container_registry_rake_spec.rb
+++ b/spec/tasks/gitlab/container_registry_rake_spec.rb
@@ -3,9 +3,9 @@
require 'rake_helper'
RSpec.describe 'gitlab:container_registry namespace rake tasks', :silence_stdout do
- let_it_be(:api_url) { 'http://registry.gitlab' }
+ let(:api_url) { 'http://registry.gitlab' }
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/container_registry'
end
diff --git a/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
index a5dd7c0ff09..a1725d6fed7 100644
--- a/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/cells/bump_cell_sequences_rake_spec.rb
@@ -4,7 +4,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:db:cells:bump_cell_sequences', :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/cells/bump_cell_sequences'
# empty task as env is already loaded
diff --git a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
index 5116ee5663e..352e3d944fc 100644
--- a/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :c
subject { run_rake_task('gitlab:db:decomposition:connection_status') }
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/connection_status'
end
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
index f923d09bdaa..3d4b977644f 100644
--- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -4,7 +4,7 @@ require 'rake_helper'
RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :cell, query_analyzers: false do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
# empty task as env is already loaded
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index 069f5dc7d84..5baf13b9847 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, feature_category: :cell do
- before_all do
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
index 41d77d6efc7..9a101921b68 100644
--- a/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
+++ b/spec/tasks/gitlab/db/migration_fix_15_11_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'migration_fix_15_11', :reestablished_active_record_base, feature
let(:target_init_schema) { '20220314184009' }
let(:earlier_init_schema) { '20210101010101' }
- before_all do
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/db/migration_fix_15_11'
diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
index 78d2bcba8a2..518acfc5d81 100644
--- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
+++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
let(:test_gitlab_main_table) { '_test_gitlab_main_table' }
let(:test_gitlab_ci_table) { '_test_gitlab_ci_table' }
- before_all do
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
index e2e1cf249f0..e58667578b2 100644
--- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb
+++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_sc
# which would not be cleaned either by `DbCleaner`
self.use_transactional_tests = false
- before_all do
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db/validate_config'
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 344429dc6ec..c35c162c99a 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -1,10 +1,9 @@
# frozen_string_literal: true
-require 'spec_helper'
-require 'rake'
+require 'rake_helper'
RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_category: :database do
- before(:all) do # rubocop:disable RSpec/BeforeAll
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
Rake.application.rake_require 'tasks/gitlab/db'
@@ -18,6 +17,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
allow(Rake::Task['db:migrate']).to receive(:invoke).and_return(true)
allow(Rake::Task['db:schema:load']).to receive(:invoke).and_return(true)
allow(Rake::Task['db:seed_fu']).to receive(:invoke).and_return(true)
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
end
describe 'mark_migration_complete' do
@@ -551,9 +551,9 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
describe 'clean_structure_sql' do
- let_it_be(:clean_rake_task) { 'gitlab:db:clean_structure_sql' }
- let_it_be(:test_task_name) { 'gitlab:db:_test_multiple_structure_cleans' }
- let_it_be(:input) { 'this is structure data' }
+ let(:clean_rake_task) { 'gitlab:db:clean_structure_sql' }
+ let(:test_task_name) { 'gitlab:db:_test_multiple_structure_cleans' }
+ let(:input) { 'this is structure data' }
let(:output) { StringIO.new }
@@ -882,6 +882,16 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
end
+ context 'when database ddl feature flag is enabled' do
+ it 'is a no-op' do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:execute_pending_actions!)
+
+ expect { run_rake_task('gitlab:db:execute_async_index_operations:main') }.to raise_error(SystemExit)
+ end
+ end
+
context 'with geo configured' do
before do
skip_unless_geo_configured
@@ -956,6 +966,16 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
end
+ context 'when database ddl feature flag is enabled' do
+ it 'is a no-op' do
+ stub_feature_flags(disallow_database_ddl_feature_flags: true)
+
+ expect(Gitlab::Database::AsyncConstraints).not_to receive(:validate_pending_entries!)
+
+ expect { run_rake_task('gitlab:db:validate_async_constraints:main') }.to raise_error(SystemExit)
+ end
+ end
+
context 'with geo configured' do
before do
skip_unless_geo_configured
diff --git a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
index e1504a8aaf5..0bda879bd7c 100644
--- a/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/dependency_proxy/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:dependency_proxy namespace rake task', :silence_stdout do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/dependency_proxy/migrate'
end
diff --git a/spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb b/spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb
deleted file mode 100644
index 67bf512c6da..00000000000
--- a/spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'rake_helper'
-
-RSpec.describe 'gitlab:generate_sample_prometheus_data rake task', :silence_stdout do
- let(:cluster) { create(:cluster, :provided_by_user, :project) }
- let(:environment) { create(:environment, project: cluster.project) }
- let(:sample_query_file) { File.join(Rails.root, Metrics::SampleMetricsService::DIRECTORY, 'test_query_result.yml') }
- let!(:metric) { create(:prometheus_metric, project: cluster.project, identifier: 'test_query_result') }
-
- around do |example|
- example.run
- ensure
- FileUtils.rm(sample_query_file)
- end
-
- it 'creates the file correctly' do
- Rake.application.rake_require 'tasks/gitlab/generate_sample_prometheus_data'
- allow(Environment).to receive(:find).and_return(environment)
- allow(environment).to receive_message_chain(:prometheus_adapter, :prometheus_client, :query_range) { sample_query_result[30] }
- run_rake_task('gitlab:generate_sample_prometheus_data', [environment.id])
-
- expect(File.exist?(sample_query_file)).to be true
-
- query_file_content = YAML.load_file(sample_query_file)
-
- expect(query_file_content).to eq(sample_query_result)
- end
-end
-
-def sample_query_result
- file = File.join(Rails.root, 'spec/fixtures/gitlab/sample_metrics', 'sample_metric_query_result.yml')
- YAML.load_file(File.expand_path(file, __dir__))
-end
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index 7eca2773cf2..5a395c8f6ef 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/gitaly'
end
diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
index 09c95783867..cbc39c6b093 100644
--- a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:lfs namespace rake task', :silence_stdout do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
end
diff --git a/spec/tasks/gitlab/packages/migrate_rake_spec.rb b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
index be69990a745..cdc817cdf38 100644
--- a/spec/tasks/gitlab/packages/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:packages namespace rake task', :silence_stdout do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/packages/migrate'
end
diff --git a/spec/tasks/gitlab/password_rake_spec.rb b/spec/tasks/gitlab/password_rake_spec.rb
index 5d5e5af2536..21a6dc102e6 100644
--- a/spec/tasks/gitlab/password_rake_spec.rb
+++ b/spec/tasks/gitlab/password_rake_spec.rb
@@ -3,8 +3,8 @@
require 'rake_helper'
RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
- let_it_be(:user_1) { create(:user, username: 'foobar', password: User.random_password) }
- let_it_be(:password) { User.random_password }
+ let!(:user_1) { create(:user, username: 'foobar', password: User.random_password) }
+ let(:password) { User.random_password }
def stub_username(username)
allow(Gitlab::TaskHelpers).to receive(:prompt).with('Enter username: ').and_return(username)
diff --git a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
index f0fc3c501c5..60c0d80223e 100644
--- a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
+++ b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
@@ -6,9 +6,9 @@ RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task
let(:rake_task) { 'gitlab:refresh_project_statistics_build_artifacts_size' }
describe 'enqueuing build artifacts size statistics refresh for given list of project IDs' do
- let_it_be(:project_1) { create(:project) }
- let_it_be(:project_2) { create(:project) }
- let_it_be(:project_3) { create(:project) }
+ let!(:project_1) { create(:project) }
+ let!(:project_2) { create(:project) }
+ let!(:project_3) { create(:project) }
let(:csv_body) do
<<~BODY
diff --git a/spec/tasks/gitlab/snippets_rake_spec.rb b/spec/tasks/gitlab/snippets_rake_spec.rb
index f0ba5ac2d92..231c2dae006 100644
--- a/spec/tasks/gitlab/snippets_rake_spec.rb
+++ b/spec/tasks/gitlab/snippets_rake_spec.rb
@@ -3,13 +3,13 @@
require 'rake_helper'
RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
- let_it_be(:user) { create(:user) }
- let_it_be(:migrated) { create(:personal_snippet, :repository, author: user) }
+ let!(:user) { create(:user) }
+ let!(:migrated) { create(:personal_snippet, :repository, author: user) }
let(:non_migrated) { create_list(:personal_snippet, 3, author: user) }
let(:non_migrated_ids) { non_migrated.pluck(:id) }
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/snippets'
end
diff --git a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
index 0547d351065..3797c01a9cb 100644
--- a/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/terraform/migrate_rake_spec.rb
@@ -3,12 +3,12 @@
require 'rake_helper'
RSpec.describe 'gitlab:terraform_states', :silence_stdout do
- let_it_be(:version) { create(:terraform_state_version) }
+ let!(:version) { create(:terraform_state_version) }
let(:logger) { instance_double(Logger) }
let(:helper) { double }
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/terraform/migrate'
end
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
index cb6a6e72ab1..6ad65f55142 100644
--- a/spec/tasks/gitlab/web_hook_rake_spec.rb
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -3,10 +3,10 @@
require 'rake_helper'
RSpec.describe 'gitlab:web_hook namespace rake tasks', :silence_stdout do
- let_it_be(:group, refind: true) { create(:group) }
- let_it_be(:project1, reload: true) { create(:project, namespace: group) }
- let_it_be(:project2, reload: true) { create(:project, namespace: group) }
- let_it_be(:other_group_project, reload: true) { create(:project) }
+ let!(:group) { create(:group) }
+ let!(:project1) { create(:project, namespace: group) }
+ let!(:project2) { create(:project, namespace: group) }
+ let!(:other_group_project) { create(:project) }
let(:url) { 'http://example.com' }
let(:hook_urls) { (project1.hooks + project2.hooks).map(&:url) }
diff --git a/spec/tasks/gitlab/workhorse_rake_spec.rb b/spec/tasks/gitlab/workhorse_rake_spec.rb
index 17f3133ecdc..e87bef9f01f 100644
--- a/spec/tasks/gitlab/workhorse_rake_spec.rb
+++ b/spec/tasks/gitlab/workhorse_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:workhorse namespace rake task', :silence_stdout, feature_category: :source_code_management do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/workhorse'
end
diff --git a/spec/tasks/gitlab/x509/update_rake_spec.rb b/spec/tasks/gitlab/x509/update_rake_spec.rb
index abf8316d978..118b0b2b960 100644
--- a/spec/tasks/gitlab/x509/update_rake_spec.rb
+++ b/spec/tasks/gitlab/x509/update_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:x509 namespace rake task', :silence_stdout do
- before_all do
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/x509/update'
end
diff --git a/spec/tasks/migrate/schema_check_rake_spec.rb b/spec/tasks/migrate/schema_check_rake_spec.rb
index 5afad752982..4d0f59295a6 100644
--- a/spec/tasks/migrate/schema_check_rake_spec.rb
+++ b/spec/tasks/migrate/schema_check_rake_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'schema_version_check rake task', :silence_stdout do
include StubENV
let(:valid_schema_version) { 20211004170422 }
- before_all do
+ before(:all) do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/migrate/schema_check'
diff --git a/spec/tooling/danger/clickhouse_spec.rb b/spec/tooling/danger/clickhouse_spec.rb
new file mode 100644
index 00000000000..ad2f0b4a827
--- /dev/null
+++ b/spec/tooling/danger/clickhouse_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'rspec-parameterized'
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/internal/helper'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/clickhouse'
+
+RSpec.describe Tooling::Danger::Clickhouse, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:migration_files) do
+ %w[
+ db/click_house/20220901010203_add_widgets_table.rb
+ db/click_house/20220909010203_add_properties_column.rb
+ db/click_house/20220910010203_drop_tools_table.rb
+ db/click_house/20220912010203_add_index_to_widgets_table.rb
+ ]
+ end
+
+ subject(:clickhouse) { fake_danger.new(helper: fake_helper) }
+
+ describe '#changes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ 'with click_house gem changes' => {
+ modified_files: %w[gems/click_house-client/lib/click_house/client.rb],
+ changes_by_category: {
+ database: [],
+ clickhouse: %w[gems/click_house-client/lib/click_house/client.rb]
+ },
+ impacted_files: %w[gems/click_house-client/lib/click_house/client.rb]
+ },
+ 'with clickhouse data changes' => {
+ modified_files: %w[db/clickhouse/20230720114001_add_magic_table_migration.rb],
+ changes_by_category: {
+ database: [],
+ clickhouse: %w[db/clickhouse/20230720114001_add_magic_table_migration.rb]
+ },
+ impacted_files: %w[db/clickhouse/20230720114001_add_magic_table_migration.rb]
+ },
+ 'with clickhouse app changes' => {
+ modified_files: %w[lib/click_house/query_builder.rb],
+ changes_by_category: {
+ database: [],
+ clickhouse: %w[lib/click_house/query_builder.rb]
+ },
+ impacted_files: %w[lib/click_house/query_builder.rb]
+ }
+ }
+ end
+
+ with_them do
+ before do
+ allow(fake_helper).to receive(:modified_files).and_return(modified_files)
+ allow(fake_helper).to receive(:all_changed_files).and_return(modified_files)
+ allow(fake_helper).to receive(:changes_by_category).and_return(changes_by_category)
+ end
+
+ it 'returns only clickhouse changes' do
+ expect(clickhouse.changes).to match impacted_files
+ end
+ end
+ end
+end
diff --git a/spec/tooling/danger/ignored_model_columns_spec.rb b/spec/tooling/danger/ignored_model_columns_spec.rb
new file mode 100644
index 00000000000..737b6cce077
--- /dev/null
+++ b/spec/tooling/danger/ignored_model_columns_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'danger'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/ignored_model_columns'
+require_relative '../../../tooling/danger/project_helper'
+
+RSpec.describe Tooling::Danger::IgnoredModelColumns, feature_category: :tooling do
+ subject(:ignored_model_columns) { fake_danger.new(helper: fake_helper) }
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
+ let(:comment) { described_class::COMMENT.chomp }
+ let(:file_diff) do
+ File.read(File.expand_path("../fixtures/#{fixture}", __dir__)).split("\n")
+ end
+
+ include_context "with dangerfile"
+
+ describe '#add_comment_for_ignored_model_columns' do
+ let(:file_lines) { file_diff.map { |line| line.delete_prefix('+').delete_prefix('-') } }
+
+ before do
+ allow(ignored_model_columns).to receive(:project_helper).and_return(fake_project_helper)
+ allow(ignored_model_columns.project_helper).to receive(:file_lines).and_return(file_lines)
+ allow(ignored_model_columns.helper).to receive(:all_changed_files).and_return([filename])
+ allow(ignored_model_columns.helper).to receive(:changed_lines).with(filename).and_return(file_diff)
+ end
+
+ context 'when table column is renamed in a regular migration' do
+ let(:filename) { 'db/migrate/rename_my_column_migration.rb' }
+ let(:fixture) { 'rename_column_migration.txt' }
+ let(:matching_lines) { [7, 11, 15, 19, 23, 27, 31, 35, 39] }
+
+ it 'adds comment at the correct line' do
+ matching_lines.each do |line_number|
+ expect(ignored_model_columns).to receive(:markdown).with("\n#{comment}", file: filename, line: line_number)
+ end
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+
+ context 'when table column is renamed in a post migration' do
+ let(:filename) { 'db/post_migrate/remove_column_migration.rb' }
+ let(:fixture) { 'remove_column_migration.txt' }
+ let(:matching_lines) { [7, 8, 16, 24, 32, 40, 48, 56, 64, 72] }
+
+ it 'adds comment at the correct line' do
+ matching_lines.each do |line_number|
+ expect(ignored_model_columns).to receive(:markdown).with("\n#{comment}", file: filename, line: line_number)
+ end
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+
+ context 'when table cleanup is performed in a post migration' do
+ let(:filename) { 'db/post_migrate/cleanup_conversion_big_int_migration.rb' }
+ let(:fixture) { 'cleanup_conversion_migration.txt' }
+ let(:matching_lines) { [7, 11, 15, 19, 23, 27, 31, 35, 39] }
+
+ it 'adds comment at the correct line' do
+ matching_lines.each do |line_number|
+ expect(ignored_model_columns).to receive(:markdown).with("\n#{comment}", file: filename, line: line_number)
+ end
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+
+ context 'when a regular migration does not rename table column' do
+ let(:filename) { 'db/migrate/my_migration.rb' }
+ let(:file_diff) do
+ [
+ "+ undo_cleanup_concurrent_column_rename(:my_table, :old_column, :new_column)",
+ "- cleanup_concurrent_column_rename(:my_table, :new_column, :old_column)"
+ ]
+ end
+
+ let(:file_lines) do
+ [
+ ' def up',
+ ' undo_cleanup_concurrent_column_rename(:my_table, :old_column, :new_column)',
+ ' end'
+ ]
+ end
+
+ it 'does not add comment' do
+ expect(ignored_model_columns).not_to receive(:markdown)
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+
+ context 'when a post migration does not remove table column' do
+ let(:filename) { 'db/migrate/my_migration.rb' }
+ let(:file_diff) do
+ [
+ "+ add_column(:my_table, :my_column, :type)",
+ "- remove_column(:my_table, :my_column)"
+ ]
+ end
+
+ let(:file_lines) do
+ [
+ ' def up',
+ ' add_column(:my_table, :my_column, :type)',
+ ' end'
+ ]
+ end
+
+ it 'does not add comment' do
+ expect(ignored_model_columns).not_to receive(:markdown)
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+
+ context 'when a post migration does not convert table column' do
+ let(:filename) { 'db/migrate/my_migration.rb' }
+ let(:file_diff) do
+ [
+ "+ restore_conversion_of_integer_to_bigint(TABLE, COLUMNS)",
+ "- cleanup_conversion_of_integer_to_bigint(TABLE, COLUMNS)"
+ ]
+ end
+
+ let(:file_lines) do
+ [
+ ' def up',
+ ' cleanup_conversion_of_integer_to_bigint(TABLE, COLUMNS)',
+ ' end'
+ ]
+ end
+
+ it 'does not add comment' do
+ expect(ignored_model_columns).not_to receive(:markdown)
+
+ ignored_model_columns.add_comment_for_ignored_model_columns
+ end
+ end
+ end
+end
diff --git a/spec/tooling/fixtures/cleanup_conversion_migration.txt b/spec/tooling/fixtures/cleanup_conversion_migration.txt
new file mode 100644
index 00000000000..14a7937b469
--- /dev/null
+++ b/spec/tooling/fixtures/cleanup_conversion_migration.txt
@@ -0,0 +1,44 @@
++# frozen_string_literal: true
++
++class TestMigration < Gitlab::Database::Migration[2.1]
++ disable_ddl_transaction!
++
++ def up
++ cleanup_conversion_of_integer_to_bigint :my_table, :my_column
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint 'my_table', 'my_column'
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint "my_table", "my_column", "new_column"
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint TABLE_NAME, MY_COLUMN
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint(:my_table, :my_column)
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint('my_table', 'my_column')
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint("my_table", "my_column")
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint(TABLE_NAME, MY_COLUMN)
++ end
++
++ def up
++ cleanup_conversion_of_integer_to_bigint(
++ :my_table,
++ :my_column
++ )
++ end
++end
diff --git a/spec/tooling/fixtures/remove_column_migration.txt b/spec/tooling/fixtures/remove_column_migration.txt
new file mode 100644
index 00000000000..885f0060d92
--- /dev/null
+++ b/spec/tooling/fixtures/remove_column_migration.txt
@@ -0,0 +1,84 @@
++# frozen_string_literal: true
++
++class TestMigration < Gitlab::Database::Migration[2.1]
++ disable_ddl_transaction!
++
++ def up
++ remove_column :my_table, :my_column
++ remove_column :my_other_table, :my_column
++ end
++
++ def down
++ remove_column :my_table, :my_column
++ end
++
++ def up
++ remove_column 'my_table', 'my_column'
++ end
++
++ def down
++ remove_column 'my_table', 'my_column'
++ end
++
++ def up
++ remove_column "my_table", "my_column", "new_column"
++ end
++
++ def down
++ remove_column "my_table", "my_column", "new_column"
++ end
++
++ def up
++ remove_column TABLE_NAME, MY_COLUMN
++ end
++
++ def down
++ remove_column TABLE_NAME, MY_COLUMN
++ end
++
++ def up
++ remove_column(:my_table, :my_column)
++ end
++
++ def down
++ remove_column(:my_table, :my_column)
++ end
++
++ def up
++ remove_column('my_table', 'my_column')
++ end
++
++ def down
++ remove_column('my_table', 'my_column')
++ end
++
++ def up
++ remove_column("my_table", "my_column")
++ end
++
++ def down
++ remove_column("my_table", "my_column")
++ end
++
++ def up
++ remove_column(TABLE_NAME, MY_COLUMN)
++ end
++
++ def down
++ remove_column(TABLE_NAME, MY_COLUMN)
++ end
++
++ def up
++ remove_column(
++ :my_table,
++ :my_column
++ )
++ end
++
++ def down
++ remove_column(
++ :my_table,
++ :my_column
++ )
++ end
++end
diff --git a/spec/tooling/fixtures/rename_column_migration.txt b/spec/tooling/fixtures/rename_column_migration.txt
new file mode 100644
index 00000000000..e79029219a5
--- /dev/null
+++ b/spec/tooling/fixtures/rename_column_migration.txt
@@ -0,0 +1,45 @@
++# frozen_string_literal: true
++
++class TestMigration < Gitlab::Database::Migration[2.1]
++ disable_ddl_transaction!
++
++ def up
++ cleanup_concurrent_column_rename :my_table, :old_column, :new_column
++ end
++
++ def up
++ cleanup_concurrent_column_rename 'my_table', 'old_column', 'new_column'
++ end
++
++ def up
++ cleanup_concurrent_column_rename "my_table", "old_column", "new_column"
++ end
++
++ def up
++ cleanup_concurrent_column_rename TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME
++ end
++
++ def up
++ cleanup_concurrent_column_rename(:my_table, :old_column, :new_column)
++ end
++
++ def up
++ cleanup_concurrent_column_rename('my_table', 'old_column', 'new_column')
++ end
++
++ def up
++ cleanup_concurrent_column_rename("my_table", "old_column", "new_column")
++ end
++
++ def up
++ cleanup_concurrent_column_rename(TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME)
++ end
++
++ def up
++ cleanup_concurrent_column_rename(
++ :my_table,
++ :old_column,
++ :new_column
++ )
++ end
++end
diff --git a/spec/uploaders/packages/nuget/symbol_uploader_spec.rb b/spec/uploaders/packages/nuget/symbol_uploader_spec.rb
new file mode 100644
index 00000000000..bdcb5245c1c
--- /dev/null
+++ b/spec/uploaders/packages/nuget/symbol_uploader_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::SymbolUploader, feature_category: :package_registry do
+ let(:object_storage_key) { 'object/storage/key' }
+ let(:symbol) { build_stubbed(:nuget_symbol, object_storage_key: object_storage_key) }
+
+ subject { described_class.new(symbol, :file) }
+
+ describe '#store_dir' do
+ it 'uses the object_storage_key' do
+ expect(subject.store_dir).to eq(object_storage_key)
+ end
+
+ context 'without the object_storage_key' do
+ let(:object_storage_key) { nil }
+
+ it 'raises the error' do
+ expect { subject.store_dir }
+ .to raise_error(
+ described_class::ObjectNotReadyError,
+ 'Packages::Nuget::Symbol model not ready'
+ )
+ end
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index 3b3a8a675a0..99564003d59 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -125,7 +125,6 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
shared_examples 'does not render the form' do
it 'does not render the form' do
expect(rendered).not_to have_field('application_setting_instance_level_code_suggestions_enabled')
- expect(rendered).not_to have_field('application_setting_ai_access_token')
end
end
diff --git a/spec/views/admin/identities/index.html.haml_spec.rb b/spec/views/admin/identities/index.html.haml_spec.rb
index 3e8def003ae..a4f6579f5ef 100644
--- a/spec/views/admin/identities/index.html.haml_spec.rb
+++ b/spec/views/admin/identities/index.html.haml_spec.rb
@@ -20,18 +20,19 @@ RSpec.describe 'admin/identities/index.html.haml', :aggregate_failures do
it 'shows table headers' do
render
- expect(rendered).to include('<th class="gl-border-t-0!">').exactly(5)
+ expect(rendered).to include('<th class="gl-border-t-0!">').exactly(6)
expect(rendered).to include(_('Provider'))
expect(rendered).to include(s_('Identity|Provider ID'))
expect(rendered).to include(_('Group'))
expect(rendered).to include(_('Identifier'))
+ expect(rendered).to include(_('Active'))
expect(rendered).to include(_('Actions'))
end
it 'shows information text' do
render
- expect(rendered).to include('<td colspan="5">').exactly(1)
+ expect(rendered).to include('<td colspan="6">').exactly(1)
expect(rendered).to include(_('This user has no identities'))
end
end
@@ -41,10 +42,10 @@ RSpec.describe 'admin/identities/index.html.haml', :aggregate_failures do
assign(:identities, ldap_user.identities)
end
- it 'shows exactly 5 columns' do
+ it 'shows exactly 6 columns' do
render
- expect(rendered).to include('</td>').exactly(5)
+ expect(rendered).to include('</td>').exactly(6)
end
it 'shows identity without provider ID or group' do
diff --git a/spec/views/devise/shared/_signin_box.html.haml_spec.rb b/spec/views/devise/shared/_signin_box.html.haml_spec.rb
index e2aa0bb9870..24937cfdd4a 100644
--- a/spec/views/devise/shared/_signin_box.html.haml_spec.rb
+++ b/spec/views/devise/shared/_signin_box.html.haml_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'devise/shared/_signin_box' do
it 'renders user_login label' do
render
- expect(rendered).to have_content(_('Username or email'))
+ expect(rendered).to have_content(_('Username or primary email'))
end
end
diff --git a/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
new file mode 100644
index 00000000000..cc3ee52e73b
--- /dev/null
+++ b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'devise/shared/_signup_omniauth_provider_list', feature_category: :system_access do
+ let_it_be(:provider_label) { :github }.freeze
+ let_it_be(:tracking_label) { 'free_registration' }.freeze
+ let_it_be(:tracking_action) { "#{provider_label}_sso" }.freeze
+
+ subject { rendered }
+
+ before do
+ allow(view).to receive(:providers).and_return([provider_label])
+ allow(view).to receive(:tracking_label).and_return(tracking_label)
+ allow(view).to receive(:glm_tracking_params).and_return({})
+ end
+
+ shared_examples 'sso buttons have snowplow tracking' do
+ it 'contains tracking attributes' do
+ css = "[data-track-action='#{tracking_action}']"
+ css += "[data-track-label='#{tracking_label}']"
+
+ expect(rendered).to have_css(css)
+ end
+ end
+
+ context 'when feature flag is true' do
+ before do
+ stub_feature_flags(restyle_login_page: true)
+
+ render
+ end
+
+ it { is_expected.to have_content(_("Register with:")) }
+
+ it_behaves_like 'sso buttons have snowplow tracking'
+ end
+
+ context 'when feature flag is false' do
+ before do
+ stub_feature_flags(restyle_login_page: false)
+
+ render
+ end
+
+ it { is_expected.to have_content(_("Create an account using:")) }
+
+ it_behaves_like 'sso buttons have snowplow tracking'
+ end
+end
diff --git a/spec/views/events/event/_push.html.haml_spec.rb b/spec/views/events/event/_push.html.haml_spec.rb
index f4d3258ff67..fe4357d3ad5 100644
--- a/spec/views/events/event/_push.html.haml_spec.rb
+++ b/spec/views/events/event/_push.html.haml_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'events/event/_push.html.haml' do
let(:event) { build_stubbed(:push_event) }
+ let(:event_presenter) { event.present }
context 'with a branch' do
let(:payload) { build_stubbed(:push_event_payload, event: event) }
@@ -16,14 +17,14 @@ RSpec.describe 'events/event/_push.html.haml' do
allow(event.project.repository).to receive(:branch_exists?).with(event.ref_name).and_return(true)
link = project_commits_path(event.project, event.ref_name)
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).to have_link(event.ref_name, href: link)
end
context 'that has been deleted' do
it 'does not link to the branch' do
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).not_to have_link(event.ref_name)
end
@@ -40,7 +41,7 @@ RSpec.describe 'events/event/_push.html.haml' do
end
it 'includes the count in the text' do
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).to include('4 branches')
end
@@ -58,14 +59,14 @@ RSpec.describe 'events/event/_push.html.haml' do
allow(event.project.repository).to receive(:tag_exists?).with(event.ref_name).and_return(true)
link = project_commits_path(event.project, event.ref_name)
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).to have_link(event.ref_name, href: link)
end
context 'that has been deleted' do
it 'does not link to the tag' do
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).not_to have_link(event.ref_name)
end
@@ -82,7 +83,7 @@ RSpec.describe 'events/event/_push.html.haml' do
end
it 'includes the count in the text' do
- render partial: 'events/event/push', locals: { event: event }
+ render partial: 'events/event/push', locals: { event: event_presenter }
expect(rendered).to include('4 tags')
end
diff --git a/spec/views/layouts/_page.html.haml_spec.rb b/spec/views/layouts/_page.html.haml_spec.rb
new file mode 100644
index 00000000000..da9d086efe0
--- /dev/null
+++ b/spec/views/layouts/_page.html.haml_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/_page', feature_category: :geo_replication do
+ let_it_be(:user) { build_stubbed(:user) }
+
+ describe '_silent_mode_banner' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ end
+
+ describe 'when ::Gitlab::SilentMode.enabled? is true' do
+ before do
+ allow(::Gitlab::SilentMode).to receive(:enabled?).and_return(true)
+ end
+
+ it 'renders silent mode banner' do
+ render
+
+ expect(rendered).to have_text('Silent mode is enabled')
+ end
+ end
+
+ describe 'when ::Gitlab::SilentMode.enabled? is false' do
+ before do
+ allow(::Gitlab::SilentMode).to receive(:enabled?).and_return(false)
+ end
+
+ it 'does not silent mode banner' do
+ render
+
+ expect(rendered).not_to have_text('Silent mode is enabled')
+ end
+ end
+ end
+end
diff --git a/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb b/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb
index 89a03d72a90..f81e8c5badf 100644
--- a/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb
+++ b/spec/views/layouts/header/_super_sidebar_logged_out.html.haml_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe 'layouts/header/_super_sidebar_logged_out', feature_category: :na
expect(rendered).to have_content('Pricing')
expect(rendered).to have_content('Contact Sales')
end
+
+ it 'renders the free trial button' do
+ expect(rendered).to have_content('Get free trial')
+ end
end
context 'on self-managed' do
diff --git a/spec/views/layouts/organization.html.haml_spec.rb b/spec/views/layouts/organization.html.haml_spec.rb
new file mode 100644
index 00000000000..72a4abd288a
--- /dev/null
+++ b/spec/views/layouts/organization.html.haml_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/organization', feature_category: :cell do
+ let_it_be(:organization) { build_stubbed(:organization) }
+ let_it_be(:current_user) { build_stubbed(:user, :admin) }
+
+ before do
+ allow(view).to receive(:current_user).and_return(current_user)
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(current_user))
+ allow(view).to receive(:users_path).and_return('/root')
+ end
+
+ subject do
+ render
+
+ rendered
+ end
+
+ describe 'navigation' do
+ context 'when action is #index' do
+ before do
+ allow(view).to receive(:params).and_return({ action: 'index' })
+ end
+
+ it 'renders your_work navigation' do
+ subject
+
+ expect(view.instance_variable_get(:@nav)).to eq('your_work')
+ end
+ end
+
+ context 'when action is #new' do
+ before do
+ allow(view).to receive(:params).and_return({ action: 'new' })
+ end
+
+ it 'renders your_work navigation' do
+ subject
+
+ expect(view.instance_variable_get(:@nav)).to eq('your_work')
+ end
+ end
+
+ context 'when action is #show' do
+ before do
+ allow(view).to receive(:params).and_return({ action: 'show' })
+ view.instance_variable_set(:@organization, organization)
+ end
+
+ it 'renders organization navigation' do
+ subject
+
+ expect(view.instance_variable_get(:@nav)).to eq('organization')
+ end
+ end
+ end
+end
diff --git a/spec/views/layouts/snippets.html.haml_spec.rb b/spec/views/layouts/snippets.html.haml_spec.rb
index 1e6963a6526..b7139f84174 100644
--- a/spec/views/layouts/snippets.html.haml_spec.rb
+++ b/spec/views/layouts/snippets.html.haml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'layouts/snippets', feature_category: :source_code_management do
describe 'sidebar' do
context 'when signed in' do
- let(:user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user, :no_super_sidebar) }
it 'renders the "Your work" sidebar' do
render
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index 2b19b364365..c478b446864 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe 'projects/empty' do
expect(rendered).to have_content('Invite your team')
expect(rendered).to have_content('Add members to this project and start collaborating with your team.')
expect(rendered).to have_selector('.js-invite-members-trigger')
- expect(rendered).to have_selector('[data-trigger-source=project-empty-page]')
+ expect(rendered).to have_selector('[data-trigger-source=project_empty_page]')
end
context 'when user does not have permissions to invite members' do
diff --git a/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb b/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb
index ee582ee9927..7f294f12d64 100644
--- a/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb
+++ b/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'projects/issues/service_desk/_issue.html.haml', feature_category
context 'when issue is service desk issue' do
let_it_be(:email) { 'user@example.com' }
let_it_be(:obfuscated_email) { 'us*****@e*****.c**' }
- let_it_be(:issue) { create(:issue, author: User.support_bot, service_desk_reply_to: email) }
+ let_it_be(:issue) { create(:issue, author: Users::Internal.support_bot, service_desk_reply_to: email) }
context 'with anonymous user' do
it 'obfuscates service_desk_reply_to email for anonymous user' do
diff --git a/spec/views/projects/pages/_pages_settings.html.haml_spec.rb b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
index 4f54ddbdb60..e790305da5d 100644
--- a/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
+++ b/spec/views/projects/pages/_pages_settings.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'projects/pages/_pages_settings', feature_category: :pages do
- let_it_be(:project) { build_stubbed(:project, :repository) }
+ let_it_be(:project) { build_stubbed(:project) }
let_it_be(:user) { build_stubbed(:user) }
before do
@@ -18,4 +18,32 @@ RSpec.describe 'projects/pages/_pages_settings', feature_category: :pages do
expect(rendered).to have_content('Use unique domain')
end
end
+
+ context 'for pages multiple versions' do
+ context 'when current user does not have access to pages multiple versions toggle' do
+ it 'shows the multiple versions toggle' do
+ allow(view)
+ .to receive(:can?)
+ .with(user, :pages_multiple_versions, project)
+ .and_return(false)
+
+ render
+
+ expect(rendered).not_to have_content('Use multiple versions')
+ end
+ end
+
+ context 'when current user have access to pages multiple versions toggle' do
+ it 'shows the multiple versions toggle' do
+ allow(view)
+ .to receive(:can?)
+ .with(user, :pages_multiple_versions, project)
+ .and_return(true)
+
+ render
+
+ expect(rendered).to have_content('Use multiple versions')
+ end
+ end
+ end
end
diff --git a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb
deleted file mode 100644
index 13ec7207ec9..00000000000
--- a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/pipeline_schedules/_pipeline_schedule' do
- let(:owner) { create(:user) }
- let(:maintainer) { create(:user) }
- let(:project) { create(:project) }
- let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) }
-
- before do
- assign(:project, project)
-
- allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:pipeline_schedule).and_return(pipeline_schedule)
-
- allow(view).to receive(:can?).and_return(true)
- end
-
- context 'taking ownership of schedule' do
- context 'when non-owner is signed in' do
- let(:user) { maintainer }
-
- before do
- allow(view).to receive(:can?).with(maintainer, :admin_pipeline_schedule, pipeline_schedule).and_return(true)
- end
-
- it 'non-owner can take ownership of pipeline' do
- render
-
- expect(rendered).to have_button('Take ownership')
- end
- end
-
- context 'when owner is signed in' do
- let(:user) { owner }
-
- before do
- allow(view).to receive(:can?).with(owner, :admin_pipeline_schedule, pipeline_schedule).and_return(false)
- end
-
- it 'owner cannot take ownership of pipeline' do
- render
-
- expect(rendered).not_to have_button('Take ownership')
- end
- end
- end
-end
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index 4188bd7e956..b652defbd1f 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -17,5 +17,4 @@ RSpec.describe 'registrations/welcome/show', feature_category: :onboarding do
it { is_expected.not_to have_selector('label[for="user_setup_for_company"]') }
it { is_expected.to have_button('Get started!') }
- it { is_expected.not_to have_selector('input[name="user[email_opted_in]"]') }
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index ec8550bb3bc..c96e5ace124 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -137,5 +137,174 @@ RSpec.describe BulkImportWorker, feature_category: :importers do
end
end
end
+
+ context 'when importing a group' do
+ it 'creates trackers for group entity' do
+ bulk_import = create(:bulk_import)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when importing a project' do
+ it 'creates trackers for project entity' do
+ bulk_import = create(:bulk_import)
+ entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when tracker configuration has a minimum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker mininum version' do
+ let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
+ let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
+
+ it 'creates trackers as skipped if version requirement does not meet' do
+ subject.perform(bulk_import.id)
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:skipped, 'PipelineClass4'],
+ [:skipped, 'PipelineClass5']
+ )
+ end
+
+ it 'logs an info message for the skipped pipelines' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:info).with({
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass4',
+ minimum_source_version: '15.1.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ })
+
+ expect(logger).to receive(:info).with({
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
+ importer: 'gitlab_migration',
+ pipeline_name: 'PipelineClass5',
+ minimum_source_version: '16.0.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ })
+ end
+
+ subject.perform(bulk_import.id)
+ end
+ end
+
+ context 'when the source instance version is undefined' do
+ it 'creates trackers as created' do
+ bulk_import = create(:bulk_import, source_version: nil)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+ end
+
+ context 'when tracker configuration has a maximum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker maximum version' do
+ it 'creates trackers as skipped if version requirement does not meet' do
+ bulk_import = create(:bulk_import, source_version: '15.0.0')
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:skipped, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+
+ context 'when the source instance version is a patch version' do
+ it 'creates trackers with the same status as the non-patch source version' do
+ bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
+ entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
+
+ bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
+ entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
+
+ described_class.perform_inline(bulk_import_1.id)
+ described_class.perform_inline(bulk_import_2.id)
+
+ trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+ trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+
+ expect(trackers_1).to eq(trackers_2)
+ end
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/finish_project_import_worker_spec.rb b/spec/workers/bulk_imports/finish_project_import_worker_spec.rb
new file mode 100644
index 00000000000..3f5f8477667
--- /dev/null
+++ b/spec/workers/bulk_imports/finish_project_import_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FinishProjectImportWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:job_args) { [project.id] }
+
+ describe '#perform' do
+ it_behaves_like 'an idempotent worker' do
+ it 'calls after_import for the project' do
+ expect_next_found_instance_of(Project) do |project|
+ expect(project).to receive(:after_import)
+ end
+
+ described_class.new.perform(project.id)
+ end
+
+ context 'when no project is found' do
+ let(:job_args) { nil }
+
+ it 'returns without error' do
+ expect { described_class.new.perform(project.id) }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
index c10e1b486ab..3c33910b62c 100644
--- a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
@@ -73,6 +73,16 @@ RSpec.describe BulkImports::PipelineBatchWorker, feature_category: :importers do
end
end
+ context 'when batch status is started' do
+ let(:batch) { create(:bulk_import_batch_tracker, :started, tracker: tracker) }
+
+ it 'runs the given pipeline batch successfully' do
+ subject.perform(batch.id)
+
+ expect(batch.reload).to be_finished
+ end
+ end
+
context 'when exclusive lease cannot be obtained' do
it 'does not run the pipeline' do
expect(subject).to receive(:try_obtain_lease).and_return(false)
diff --git a/spec/workers/click_house/events_sync_worker_spec.rb b/spec/workers/click_house/events_sync_worker_spec.rb
index 8f328839cfd..01267db36a7 100644
--- a/spec/workers/click_house/events_sync_worker_spec.rb
+++ b/spec/workers/click_house/events_sync_worker_spec.rb
@@ -3,48 +3,125 @@
require 'spec_helper'
RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_management do
- let(:databases) { { main: :some_db } }
let(:worker) { described_class.new }
- before do
- allow(ClickHouse::Client.configuration).to receive(:databases).and_return(databases)
- end
-
- include_examples 'an idempotent worker' do
- context 'when the event_sync_worker_for_click_house feature flag is on' do
+ it_behaves_like 'an idempotent worker' do
+ context 'when the event_sync_worker_for_click_house feature flag is on', :click_house do
before do
stub_feature_flags(event_sync_worker_for_click_house: true)
end
- it 'returns true' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :processed })
+ context 'when there is nothing to sync' do
+ it 'adds metadata for the worker' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 0, reached_end_of_table: true })
- worker.perform
+ worker.perform
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events).to be_empty
+ end
end
- context 'when no ClickHouse databases are configured' do
- let(:databases) { {} }
+ context 'when syncing records' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
+ let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
+ let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
+ let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
+ # looks invalid but we have some records like this on PRD
- it 'skips execution' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
+ it 'inserts all records' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 4, reached_end_of_table: true })
worker.perform
+
+ expected_records = [
+ hash_including('id' => project_event2.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue'),
+ hash_including('id' => event_without_parent.id, 'path' => '', 'target_type' => ''),
+ hash_including('id' => group_event.id, 'path' => "#{group.id}/", 'target_type' => ''),
+ hash_including('id' => project_event1.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue')
+ ]
+
+ events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+
+ expect(events).to match(expected_records)
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(project_event1.id)
end
- end
- context 'when exclusive lease error happens' do
- it 'skips execution' do
- expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
+ context 'when multiple batches are needed' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::INSERT_BATCH_SIZE", 1)
+ end
- worker.perform
+ it 'inserts all records' do
+ worker.perform
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events.size).to eq(4)
+ end
+ end
+
+ context 'when time limit is reached' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'stops the processing' do
+ allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
+ allow(runtime_limiter).to receive(:over_time?).and_return(false, true)
+ end
+
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 2, reached_end_of_table: false })
+
+ worker.perform
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(event_without_parent.id)
+ end
+ end
+
+ context 'when syncing from a certain point' do
+ before do
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event2.id)
+ end
+
+ it 'syncs records after the cursor' do
+ worker.perform
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+ expect(events).to eq([{ 'id' => event_without_parent.id }, { 'id' => group_event.id },
+ { 'id' => project_event1.id }])
+ end
+
+ context 'when there is nothing to sync' do
+ it 'does nothing' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :processed, records_inserted: 0, reached_end_of_table: true })
+
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event1.id)
+ worker.perform
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+ expect(events).to be_empty
+ end
+ end
end
end
end
- context 'when the event_sync_worker_for_click_house feature flag is off' do
+ context 'when clickhouse is not configured' do
before do
- stub_feature_flags(event_sync_worker_for_click_house: false)
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
end
it 'skips execution' do
@@ -54,4 +131,28 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
end
end
end
+
+ context 'when exclusive lease error happens' do
+ it 'skips execution' do
+ stub_feature_flags(event_sync_worker_for_click_house: true)
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+
+ expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
+
+ worker.perform
+ end
+ end
+
+ context 'when the event_sync_worker_for_click_house feature flag is off' do
+ before do
+ stub_feature_flags(event_sync_worker_for_click_house: false)
+ end
+
+ it 'skips execution' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
+
+ worker.perform
+ end
+ end
end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 3b7bbfc8a7b..27e1077b138 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -273,7 +273,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.to receive(:notify)
.with(
job['args'].last,
- job['jid']
+ job['jid'],
+ ttl: Gitlab::Import::JOB_WAITER_TTL
)
sidekiq_retries_exhausted
diff --git a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
index 6475be0243c..c76ce6b555f 100644
--- a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::GithubImport::ReschedulingMethods, feature_category: :imp
expect(Gitlab::JobWaiter)
.to receive(:notify)
- .with('123', 'abc123')
+ .with('123', 'abc123', ttl: Gitlab::Import::JOB_WAITER_TTL)
worker.notify_waiter('123')
end
diff --git a/spec/workers/concerns/gitlab/notify_upon_death_spec.rb b/spec/workers/concerns/gitlab/import/notify_upon_death_spec.rb
index 36faf3ee296..1f760e8542b 100644
--- a/spec/workers/concerns/gitlab/notify_upon_death_spec.rb
+++ b/spec/workers/concerns/gitlab/import/notify_upon_death_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::NotifyUponDeath, feature_category: :shared do
+RSpec.describe Gitlab::Import::NotifyUponDeath, feature_category: :importers do
let(:worker_class) do
Class.new do
include Sidekiq::Worker
- include Gitlab::NotifyUponDeath
+ include Gitlab::Import::NotifyUponDeath
end
end
@@ -16,13 +16,13 @@ RSpec.describe Gitlab::NotifyUponDeath, feature_category: :shared do
expect(Gitlab::JobWaiter)
.to receive(:notify)
- .with('123abc', '123')
+ .with('123abc', '123', ttl: Gitlab::Import::JOB_WAITER_TTL)
worker_class.sidekiq_retries_exhausted_block.call(job)
end
it 'does not notify the JobWaiter when only 2 arguments are given' do
- job = { 'args' => [12, {}], 'jid' => '123' }
+ job = { 'args' => [12, '123abc'], 'jid' => '123' }
expect(Gitlab::JobWaiter)
.not_to receive(:notify)
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::NotifyUponDeath, feature_category: :shared do
end
it 'does not notify the JobWaiter when only 1 argument is given' do
- job = { 'args' => [12], 'jid' => '123' }
+ job = { 'args' => ['123abc'], 'jid' => '123' }
expect(Gitlab::JobWaiter)
.not_to receive(:notify)
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
index 65906eef0fa..8092adec3b9 100644
--- a/spec/workers/concerns/limited_capacity/worker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -57,10 +57,26 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
it 'enqueues jobs' do
expect(worker_class)
.to receive(:bulk_perform_async)
- .with([[:arg], [:arg], [:arg]])
+ .with([[:arg], [:arg], [:arg]]).and_call_original
+
+ expect(Sidekiq::Client).to receive(:push_bulk)
perform_with_capacity
end
+
+ context 'when max_running_jobs is 0' do
+ let(:max_running_jobs) { 0 }
+
+ it 'does not enqueue jobs' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([]).and_call_original
+
+ expect(Sidekiq::Client).not_to receive(:push_bulk)
+
+ perform_with_capacity
+ end
+ end
end
describe '#perform' do
diff --git a/spec/workers/database/lock_tables_worker_spec.rb b/spec/workers/database/lock_tables_worker_spec.rb
new file mode 100644
index 00000000000..cb720959db0
--- /dev/null
+++ b/spec/workers/database/lock_tables_worker_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::LockTablesWorker, feature_category: :cell do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:worker) { described_class.new }
+ let(:exception_class) { described_class::TableShouldNotBeLocked }
+
+ describe '#perform' do
+ context 'when running with single database' do # this covers both single-db and single-db-ci-connection cases
+ before do
+ skip_if_database_exists(:ci)
+ end
+
+ it 'skips executing the job' do
+ expect do
+ worker.perform('ci', %w[ci_pipelines])
+ end.to raise_error(exception_class, 'GitLab is not running in multiple database mode')
+ end
+ end
+
+ context 'when running in decomposed database' do
+ before do
+ skip_if_shared_database(:ci)
+ end
+
+ context 'when the table is wrong' do
+ context 'when trying to lock tables on an unknown database' do
+ it 'raises an exception' do
+ expect do
+ worker.perform('foobar', %w[ci_pipelines])
+ end.to raise_error(exception_class, /does not support locking writes on tables/)
+ end
+ end
+
+ context 'when trying to lock tables on the database that does not support locking' do
+ it 'raises an exception' do
+ expect do
+ worker.perform('geo', %w[ci_pipelines]) # ci tables should be locked only on main
+ end.to raise_error(exception_class, /does not support locking writes on tables/)
+ end
+ end
+
+ context 'when trying to lock tables on the wrong database' do
+ it 'raises an exception' do
+ expect do
+ worker.perform('ci', %w[ci_pipelines]) # ci tables should be locked only on main
+ end.to raise_error(exception_class, "table 'ci_pipelines' should not be locked on the database 'ci'")
+ end
+ end
+
+ context 'when trying to lock shared tables on the database' do
+ it 'raises an exception' do
+ expect do
+ worker.perform('main', %w[loose_foreign_keys_deleted_records])
+ end.to raise_error(exception_class, /should not be locked on the database 'main'/)
+ end
+ end
+ end
+
+ context 'when the table is correct' do
+ context 'when the table is not locked for writes' do
+ where(:database_name, :tables) do
+ :ci | %w[users namespaces]
+ :main | %w[ci_pipelines ci_builds]
+ end
+
+ with_them do
+ it 'locks the tables on the corresponding database' do
+ tables.each do |table_name|
+ unlock_table(database_name, table_name)
+ expect(lock_writes_manager(database_name, table_name).table_locked_for_writes?).to eq(false)
+ end
+
+ expected_log_results = tables.map do |table_name|
+ { action: "locked", database: database_name, dry_run: false, table: table_name }
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:performed_actions, expected_log_results)
+
+ worker.perform(database_name, tables)
+ tables.each do |table_name|
+ expect(lock_writes_manager(database_name, table_name).table_locked_for_writes?).to eq(true)
+ end
+ end
+ end
+
+ context 'when the table is already locked for writes' do
+ where(:database_name, :tables) do
+ :ci | %w[users namespaces]
+ :main | %w[ci_pipelines ci_builds]
+ end
+
+ with_them do
+ it 'skips locking the tables on the corresponding database' do
+ tables.each do |table_name|
+ lock_table(database_name, table_name)
+ end
+
+ expected_log_results = tables.map do |table_name|
+ { action: 'skipped', database: database_name, dry_run: false, table: table_name }
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:performed_actions, expected_log_results)
+
+ worker.perform(database_name, tables)
+ tables.each do |table_name|
+ expect(lock_writes_manager(database_name, table_name).table_locked_for_writes?).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def lock_table(database_name, table_name)
+ lock_writes_manager(database_name, table_name).lock_writes
+ end
+
+ def unlock_table(database_name, table_name)
+ lock_writes_manager(database_name, table_name).unlock_writes
+ end
+
+ def lock_writes_manager(database_name, table_name)
+ connection = Gitlab::Database.database_base_models_with_gitlab_shared[database_name].connection
+ Gitlab::Database::LockWritesManager.new(
+ table_name: table_name,
+ connection: connection,
+ database_name: database_name,
+ with_retries: false,
+ dry_run: false
+ )
+ end
+end
diff --git a/spec/workers/database/monitor_locked_tables_worker_spec.rb b/spec/workers/database/monitor_locked_tables_worker_spec.rb
index 47475a0ad4a..7e900259265 100644
--- a/spec/workers/database/monitor_locked_tables_worker_spec.rb
+++ b/spec/workers/database/monitor_locked_tables_worker_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe Database::MonitorLockedTablesWorker, feature_category: :cell do
end
context 'when running in decomposed database' do
+ before do
+ skip_if_shared_database(:ci)
+ end
+
context 'when the feature flag is disabled' do
before do
stub_feature_flags(monitor_database_locked_tables: false)
@@ -32,7 +36,6 @@ RSpec.describe Database::MonitorLockedTablesWorker, feature_category: :cell do
context 'when the feature flag is enabled' do
before do
- skip_if_shared_database(:ci)
stub_feature_flags(monitor_database_locked_tables: true)
allow(Gitlab::Database::TablesLocker).to receive(:new).and_return(tables_locker)
end
@@ -73,6 +76,56 @@ RSpec.describe Database::MonitorLockedTablesWorker, feature_category: :cell do
worker.perform
end
+
+ context 'with automatically locking the unlocked tables' do
+ context 'when there are no tables to be locked' do
+ before do
+ stub_feature_flags(lock_tables_in_monitoring: true)
+ allow(tables_locker).to receive(:lock_writes).and_return([])
+ end
+
+ it 'does not call the Database::LockTablesWorker' do
+ expect(Database::LockTablesWorker).not_to receive(:perform_async)
+ end
+ end
+
+ context 'when there are tables to be locked' do
+ before do
+ lock_writes_results = [
+ { table: 'users', database: 'ci', action: 'needs_lock' },
+ { table: 'projects', database: 'ci', action: 'needs_lock' },
+ { table: 'ci_builds', database: 'main', action: 'needs_lock' },
+ { table: 'ci_pipelines', database: 'main', action: 'skipped' }
+ ]
+ allow(tables_locker).to receive(:lock_writes).and_return(lock_writes_results)
+ end
+
+ context 'when feature flag lock_tables_in_monitoring is enabled' do
+ before do
+ stub_feature_flags(lock_tables_in_monitoring: true)
+ end
+
+ it 'locks the tables that need to be locked' do
+ expect(Database::LockTablesWorker).to receive(:perform_async).once.with('ci', %w[users projects])
+ expect(Database::LockTablesWorker).to receive(:perform_async).once.with('main', %w[ci_builds])
+
+ worker.perform
+ end
+ end
+
+ context 'when feature flag lock_tables_in_monitoring is disabled' do
+ before do
+ stub_feature_flags(lock_tables_in_monitoring: false)
+ end
+
+ it 'does not lock the tables that need to be locked' do
+ expect(Database::LockTablesWorker).not_to receive(:perform_async)
+
+ worker.perform
+ end
+ end
+ end
+ end
end
end
end
diff --git a/spec/workers/environments/stop_job_success_worker_spec.rb b/spec/workers/environments/stop_job_success_worker_spec.rb
index 3a2db8cfb77..df0acf46bd9 100644
--- a/spec/workers/environments/stop_job_success_worker_spec.rb
+++ b/spec/workers/environments/stop_job_success_worker_spec.rb
@@ -4,39 +4,54 @@ require 'spec_helper'
RSpec.describe Environments::StopJobSuccessWorker, feature_category: :continuous_delivery do
describe '#perform' do
- subject { described_class.new.perform(build.id) }
+ let_it_be_with_refind(:environment) { create(:environment, state: :available) }
- context 'when build exists' do
- context 'when the build will stop an environment' do
- let!(:build) { create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project, status: :success) } # rubocop:disable Layout/LineLength
- let(:environment) { create(:environment, state: :available) }
+ subject { described_class.new.perform(job.id) }
- it 'stops the environment' do
+ shared_examples_for 'stopping an associated environment' do
+ it 'stops the environment' do
+ expect(environment).to be_available
+
+ subject
+
+ expect(environment.reload).to be_stopped
+ end
+
+ context 'when the job fails' do
+ before do
+ job.update!(status: :failed)
+ environment.update!(state: :available)
+ end
+
+ it 'does not stop the environment' do
expect(environment).to be_available
subject
- expect(environment.reload).to be_stopped
+ expect(environment.reload).not_to be_stopped
end
+ end
+ end
- context 'when the build fails' do
- before do
- build.update!(status: :failed)
- environment.update!(state: :available)
- end
-
- it 'does not stop the environment' do
- expect(environment).to be_available
+ context 'with build job' do
+ let!(:job) do
+ create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project,
+ status: :success)
+ end
- subject
+ it_behaves_like 'stopping an associated environment'
+ end
- expect(environment.reload).not_to be_stopped
- end
- end
+ context 'with bridge job' do
+ let!(:job) do
+ create(:ci_bridge, :stop_review_app, environment: environment.name, project: environment.project,
+ status: :success)
end
+
+ it_behaves_like 'stopping an associated environment'
end
- context 'when build does not exist' do
+ context 'when job does not exist' do
it 'does not raise exception' do
expect { described_class.new.perform(123) }
.not_to raise_error
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 3cd030e678d..9a94a836d60 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -142,6 +142,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'BulkImports::EntityWorker' => false,
'BulkImports::PipelineWorker' => false,
'BulkImports::PipelineBatchWorker' => false,
+ 'BulkImports::FinishProjectImportWorker' => 5,
'Chaos::CpuSpinWorker' => 3,
'Chaos::DbSpinWorker' => 3,
'Chaos::KillWorker' => false,
@@ -194,6 +195,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'CreateGithubWebhookWorker' => 3,
'CreateNoteDiffFileWorker' => 3,
'CreatePipelineWorker' => 3,
+ 'Database::LockTablesWorker' => false,
'Database::BatchedBackgroundMigration::CiExecutionWorker' => 0,
'Database::BatchedBackgroundMigration::MainExecutionWorker' => 0,
'DeleteContainerRepositoryWorker' => 3,
@@ -233,8 +235,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
'Geo::Batch::ProjectRegistryWorker' => 3,
'Geo::ContainerRepositorySyncWorker' => 1,
- 'Geo::DesignRepositoryShardSyncWorker' => false,
- 'Geo::DesignRepositorySyncWorker' => 1,
'Geo::DestroyWorker' => 3,
'Geo::EventWorker' => 3,
'Geo::FileRemovalWorker' => 3,
@@ -247,6 +247,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
'Geo::ReverificationBatchWorker' => 0,
'Geo::BulkMarkPendingBatchWorker' => 0,
+ 'Geo::BulkMarkVerificationPendingBatchWorker' => 0,
'Geo::Scheduler::Primary::SchedulerWorker' => false,
'Geo::Scheduler::SchedulerWorker' => false,
'Geo::Scheduler::Secondary::SchedulerWorker' => false,
@@ -255,6 +256,10 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
'GeoRepositoryDestroyWorker' => 3,
+ 'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportRepositoryWorker' => 3,
'Gitlab::BitbucketServerImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::FinishImportWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportLfsObjectsWorker' => 3,
@@ -342,6 +347,9 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'JiraConnect::SyncProjectWorker' => 3,
'LdapGroupSyncWorker' => 3,
'Licenses::ResetSubmitLicenseUsageDataBannerWorker' => 13,
+ 'Llm::Embedding::GitlabDocumentation::SetEmbeddingsOnTheRecordWorker' => 5,
+ 'Llm::Embedding::GitlabDocumentation::CreateEmptyEmbeddingsRecordsWorker' => 3,
+ 'Llm::Embedding::GitlabDocumentation::CreateDbEmbeddingsPerDocFileWorker' => 5,
'Llm::TanukiBot::UpdateWorker' => 1,
'Llm::TanukiBot::RecreateRecordsWorker' => 3,
'MailScheduler::IssueDueWorker' => 3,
@@ -366,10 +374,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Onboarding::PipelineCreatedWorker' => 3,
'Onboarding::ProgressWorker' => 3,
'Onboarding::UserAddedWorker' => 3,
- 'Namespaces::FreeUserCap::OverLimitNotificationWorker' => false,
'Namespaces::RootStatisticsWorker' => 3,
'Namespaces::ScheduleAggregationWorker' => 3,
- 'Namespaces::FreeUserCap::NotificationClearingWorker' => false,
'NewEpicWorker' => 3,
'NewIssueWorker' => 3,
'NewMergeRequestWorker' => 3,
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
new file mode 100644
index 00000000000..16e3a3dc481
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state, feature_category: :importers do
+ let(:project) { create(:project) }
+ let(:import_state) { create(:import_state, project: project, jid: '123') }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'when the project no longer exists' do
+ it 'does not perform any work' do
+ expect(worker).not_to receive(:wait_for_jobs)
+
+ worker.perform(-1, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_import_state)
+ .and_return(import_state)
+ end
+
+ it 'reschedules itself' do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are no remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_import_state)
+ .and_return(import_state)
+
+ allow(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({})
+ end
+
+ it 'schedules the next stage' do
+ expect(import_state)
+ .to receive(:refresh_jid_expiration)
+
+ expect(Gitlab::BitbucketImport::Stage::FinishImportWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+
+ it 'raises KeyError when the stage name is invalid' do
+ expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
+ .to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#wait_for_jobs' do
+ it 'waits for jobs to complete and returns a new pair of keys to wait for' do
+ waiter1 = instance_double(Gitlab::JobWaiter, jobs_remaining: 1, key: '123')
+ waiter2 = instance_double(Gitlab::JobWaiter, jobs_remaining: 0, key: '456')
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(2, '123')
+ .and_return(waiter1)
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(1, '456')
+ .and_return(waiter2)
+
+ expect(waiter1)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ expect(waiter2)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
+
+ expect(new_waiters).to eq({ '123' => 1 })
+ end
+ end
+
+ describe '#find_import_state' do
+ it 'returns a ProjectImportState' do
+ import_state.update_column(:status, 'started')
+
+ found = worker.find_import_state(project.id)
+
+ expect(found).to be_an_instance_of(ProjectImportState)
+ expect(found.attributes.keys).to match_array(%w[id jid])
+ end
+
+ it 'returns nil if the project import is not running' do
+ expect(worker.find_import_state(project.id)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb
new file mode 100644
index 00000000000..082499be515
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportPullRequestWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/finish_import_worker_spec.rb
new file mode 100644
index 00000000000..11baa58f1ab
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/finish_import_worker_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::FinishImportWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ it 'does not abort on failure' do
+ expect(worker.abort_on_failure).to be_falsey
+ end
+
+ describe '#perform' do
+ it 'finalises the import process' do
+ expect_next_instance_of(Gitlab::Import::Metrics, :bitbucket_importer, project) do |metric|
+ expect(metric).to receive(:track_finished_import)
+ end
+
+ worker.perform(project.id)
+
+ expect(project.import_state.reload).to be_finished
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
new file mode 100644
index 00000000000..8f425066160
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :finish)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call the importer' do
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call the importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketImport::AdvanceStageWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb
new file mode 100644
index 00000000000..2234a49d66c
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportRepositoryWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ let(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ it 'executes the importer and enqueues ImportPullRequestsWorker' do
+ expect(Gitlab::BitbucketImport::Importers::RepositoryImporter).to receive_message_chain(:new, :execute)
+ .and_return(true)
+
+ expect(Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker).to receive(:perform_async).with(project.id)
+ .and_return(true).once
+
+ worker.perform(project.id)
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_server_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/advance_stage_worker_spec.rb
new file mode 100644
index 00000000000..14e93440422
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_server_import/advance_stage_worker_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::AdvanceStageWorker, feature_category: :importers do
+ it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
+end
diff --git a/spec/workers/gitlab/bitbucket_server_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/import_pull_request_worker_spec.rb
index dd3235f846c..376078532cd 100644
--- a/spec/workers/gitlab/bitbucket_server_import/import_pull_request_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_server_import/import_pull_request_worker_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::BitbucketServerImport::ImportPullRequestWorker, feature_c
end
it 'notifies job waiter' do
- expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid')
+ expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid', ttl: Gitlab::Import::JOB_WAITER_TTL)
worker.perform(project.id, {}, job_waiter_key)
end
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::BitbucketServerImport::ImportPullRequestWorker, feature_c
context 'when project does not exists' do
it 'does not call importer and notifies job waiter' do
expect(importer_class).not_to receive(:new)
- expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid')
+ expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid', ttl: Gitlab::Import::JOB_WAITER_TTL)
worker.perform(-1, {}, job_waiter_key)
end
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::BitbucketServerImport::ImportPullRequestWorker, feature_c
project = create(:project, :import_canceled)
expect(importer_class).not_to receive(:new)
- expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid')
+ expect(Gitlab::JobWaiter).to receive(:notify).with(job_waiter_key, 'jid', ttl: Gitlab::Import::JOB_WAITER_TTL)
worker.perform(project.id, {}, job_waiter_key)
end
diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
index 2e89263bcf3..dc715c3026b 100644
--- a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
.to receive(:info)
.with(log_attributes.merge('message' => 'start importer'))
expect(importer).to receive(:execute).and_return(importer_result)
- expect(Gitlab::JobWaiter).to receive(:notify).with('some_key', subject.jid)
+ expect(Gitlab::JobWaiter).to receive(:notify).with('some_key', subject.jid, ttl: Gitlab::Import::JOB_WAITER_TTL)
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(log_attributes.merge('message' => 'importer finished'))
@@ -114,7 +114,9 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
.with(log_attributes.merge('message' => 'importer failed', 'error.message' => 'error_message'))
- expect(Gitlab::JobWaiter).to receive(:notify).with('some_key', subject.jid)
+ expect(Gitlab::JobWaiter)
+ .to receive(:notify)
+ .with('some_key', subject.jid, ttl: Gitlab::Import::JOB_WAITER_TTL)
subject.perform(user.id, gist_hash, 'some_key')
@@ -189,7 +191,7 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
it 'notifies the JobWaiter' do
expect(Gitlab::JobWaiter)
.to receive(:notify)
- .with(job['args'].last, job['jid'])
+ .with(job['args'].last, job['jid'], ttl: Gitlab::Import::JOB_WAITER_TTL)
sidekiq_retries_exhausted
end
diff --git a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
index 121f30ea9d5..60c117a2a90 100644
--- a/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/advance_stage_worker_spec.rb
@@ -2,114 +2,6 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state, feature_category: :importers do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project, jid: '123') }
- let(:worker) { described_class.new }
-
- describe '#perform' do
- context 'when the project no longer exists' do
- it 'does not perform any work' do
- expect(worker).not_to receive(:wait_for_jobs)
-
- worker.perform(-1, { '123' => 2 }, :finish)
- end
- end
-
- context 'when there are remaining jobs' do
- before do
- allow(worker)
- .to receive(:find_import_state)
- .and_return(import_state)
- end
-
- it 'reschedules itself' do
- expect(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({ '123' => 1 })
-
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
-
- worker.perform(project.id, { '123' => 2 }, :finish)
- end
- end
-
- context 'when there are no remaining jobs' do
- before do
- allow(worker)
- .to receive(:find_import_state)
- .and_return(import_state)
-
- allow(worker)
- .to receive(:wait_for_jobs)
- .with({ '123' => 2 })
- .and_return({})
- end
-
- it 'schedules the next stage' do
- expect(import_state)
- .to receive(:refresh_jid_expiration)
-
- expect(Gitlab::GithubImport::Stage::FinishImportWorker)
- .to receive(:perform_async)
- .with(project.id)
-
- worker.perform(project.id, { '123' => 2 }, :finish)
- end
-
- it 'raises KeyError when the stage name is invalid' do
- expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
- .to raise_error(KeyError)
- end
- end
- end
-
- describe '#wait_for_jobs' do
- it 'waits for jobs to complete and returns a new pair of keys to wait for' do
- waiter1 = double(:waiter1, jobs_remaining: 1, key: '123')
- waiter2 = double(:waiter2, jobs_remaining: 0, key: '456')
-
- expect(Gitlab::JobWaiter)
- .to receive(:new)
- .ordered
- .with(2, '123')
- .and_return(waiter1)
-
- expect(Gitlab::JobWaiter)
- .to receive(:new)
- .ordered
- .with(1, '456')
- .and_return(waiter2)
-
- expect(waiter1)
- .to receive(:wait)
- .with(described_class::BLOCKING_WAIT_TIME)
-
- expect(waiter2)
- .to receive(:wait)
- .with(described_class::BLOCKING_WAIT_TIME)
-
- new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
-
- expect(new_waiters).to eq({ '123' => 1 })
- end
- end
-
- describe '#find_import_state' do
- it 'returns a ProjectImportState' do
- import_state.update_column(:status, 'started')
-
- found = worker.find_import_state(project.id)
-
- expect(found).to be_an_instance_of(ProjectImportState)
- expect(found.attributes.keys).to match_array(%w(id jid))
- end
-
- it 'returns nil if the project import is not running' do
- expect(worker.find_import_state(project.id)).to be_nil
- end
- end
+RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, feature_category: :importers do
+ it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
end
diff --git a/spec/workers/gitlab/jira_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/jira_import/advance_stage_worker_spec.rb
new file mode 100644
index 00000000000..d7c5d8aba4d
--- /dev/null
+++ b/spec/workers/gitlab/jira_import/advance_stage_worker_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::JiraImport::AdvanceStageWorker, feature_category: :importers do
+ it_behaves_like Gitlab::Import::AdvanceStage, factory: :jira_import_state
+end
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 5209395923f..6dfab44b228 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -12,9 +12,9 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker, feature_category: :importe
describe 'modules' do
it { expect(described_class).to include_module(ApplicationWorker) }
- it { expect(described_class).to include_module(Gitlab::NotifyUponDeath) }
it { expect(described_class).to include_module(Gitlab::JiraImport::QueueOptions) }
it { expect(described_class).to include_module(Gitlab::Import::DatabaseHelpers) }
+ it { expect(described_class).to include_module(Gitlab::Import::NotifyUponDeath) }
end
subject { described_class.new }
diff --git a/spec/workers/incident_management/close_incident_worker_spec.rb b/spec/workers/incident_management/close_incident_worker_spec.rb
index 02ca5260fbd..b218bf4ced1 100644
--- a/spec/workers/incident_management/close_incident_worker_spec.rb
+++ b/spec/workers/incident_management/close_incident_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe IncidentManagement::CloseIncidentWorker, feature_category: :incid
subject(:worker) { described_class.new }
describe '#perform' do
- let_it_be(:user) { User.alert_bot }
+ let_it_be(:user) { Users::Internal.alert_bot }
let_it_be(:project) { create(:project) }
let_it_be(:issue, reload: true) { create(:incident, project: project) }
diff --git a/spec/workers/incident_management/process_alert_worker_v2_spec.rb b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
index 476b6f04942..b9d7cd9fee8 100644
--- a/spec/workers/incident_management/process_alert_worker_v2_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
@@ -19,14 +19,14 @@ RSpec.describe IncidentManagement::ProcessAlertWorkerV2, feature_category: :inci
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
allow(AlertManagement::CreateAlertIssueService)
- .to receive(:new).with(alert, User.alert_bot)
+ .to receive(:new).with(alert, Users::Internal.alert_bot)
.and_call_original
end
shared_examples 'creates issue successfully' do
it 'creates an issue' do
expect(AlertManagement::CreateAlertIssueService)
- .to receive(:new).with(alert, User.alert_bot)
+ .to receive(:new).with(alert, Users::Internal.alert_bot)
expect { perform_worker }.to change { Issue.count }.by(1)
end
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 2e77f38e221..278efd3406c 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -166,4 +166,66 @@ RSpec.describe LooseForeignKeys::CleanupWorker, feature_category: :cell do
end
end
end
+
+ describe 'turbo mode' do
+ context 'when turbo mode is off' do
+ where(:database_name, :feature_flag) do
+ :main | :loose_foreign_keys_turbo_mode_main
+ :ci | :loose_foreign_keys_turbo_mode_ci
+ end
+
+ with_them do
+ before do
+ skip unless Gitlab::Database.has_config?(database_name)
+ stub_feature_flags(feature_flag => false)
+ end
+
+ it 'does not use TurboModificationTracker' do
+ allow_next_instance_of(LooseForeignKeys::TurboModificationTracker) do |instance|
+ expect(instance).not_to receive(:over_limit?)
+ end
+
+ perform_for(db: database_name)
+ end
+
+ it 'logs not using turbo mode' do
+ expect_next_instance_of(LooseForeignKeys::CleanupWorker) do |instance|
+ expect(instance).to receive(:log_extra_metadata_on_done).with(:stats, a_hash_including(turbo_mode: false))
+ end
+
+ perform_for(db: database_name)
+ end
+ end
+ end
+
+ context 'when turbo mode is on' do
+ where(:database_name, :feature_flag) do
+ :main | :loose_foreign_keys_turbo_mode_main
+ :ci | :loose_foreign_keys_turbo_mode_ci
+ end
+
+ with_them do
+ before do
+ skip unless Gitlab::Database.has_config?(database_name)
+ stub_feature_flags(feature_flag => true)
+ end
+
+ it 'does not use TurboModificationTracker' do
+ expect_next_instance_of(LooseForeignKeys::TurboModificationTracker) do |instance|
+ expect(instance).to receive(:over_limit?).at_least(:once)
+ end
+
+ perform_for(db: database_name)
+ end
+
+ it 'logs using turbo mode' do
+ expect_next_instance_of(LooseForeignKeys::CleanupWorker) do |instance|
+ expect(instance).to receive(:log_extra_metadata_on_done).with(:stats, a_hash_including(turbo_mode: true))
+ end
+
+ perform_for(db: database_name)
+ end
+ end
+ end
+ end
end
diff --git a/spec/workers/merge_requests/ensure_prepared_worker_spec.rb b/spec/workers/merge_requests/ensure_prepared_worker_spec.rb
new file mode 100644
index 00000000000..8f599ffe642
--- /dev/null
+++ b/spec/workers/merge_requests/ensure_prepared_worker_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::EnsurePreparedWorker, :sidekiq_inline, feature_category: :code_review_workflow do
+ subject(:worker) { described_class.new }
+
+ let_it_be(:merge_request_1, reload: true) { create(:merge_request, prepared_at: :nil) }
+ let_it_be(:merge_request_2, reload: true) { create(:merge_request, prepared_at: Time.current) }
+ let_it_be(:merge_request_3, reload: true) { create(:merge_request, prepared_at: :nil) }
+
+ describe '#perform' do
+ context 'when ensure_merge_requests_prepared is enabled' do
+ it 'creates the expected NewMergeRequestWorkers of the unprepared merge requests' do
+ expect(merge_request_1.prepared_at).to eq(nil)
+ expect(merge_request_2.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.prepared_at).to eq(nil)
+
+ worker.perform
+
+ expect(merge_request_1.reload.prepared_at).not_to eq(nil)
+ expect(merge_request_2.reload.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.reload.prepared_at).not_to eq(nil)
+ end
+ end
+
+ context 'when ensure_merge_requests_prepared is disabled' do
+ before do
+ stub_feature_flags(ensure_merge_requests_prepared: false)
+ end
+
+ it 'does not prepare any merge requests' do
+ expect(merge_request_1.prepared_at).to eq(nil)
+ expect(merge_request_2.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.prepared_at).to eq(nil)
+
+ worker.perform
+
+ expect(merge_request_1.prepared_at).to eq(nil)
+ expect(merge_request_2.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.prepared_at).to eq(nil)
+ end
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ it 'creates the expected NewMergeRequestWorkers of the unprepared merge requests' do
+ expect(merge_request_1.prepared_at).to eq(nil)
+ expect(merge_request_2.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.prepared_at).to eq(nil)
+
+ subject
+
+ expect(merge_request_1.reload.prepared_at).not_to eq(nil)
+ expect(merge_request_2.reload.prepared_at).to eq(merge_request_2.prepared_at)
+ expect(merge_request_3.reload.prepared_at).not_to eq(nil)
+ end
+ end
+end
diff --git a/spec/workers/metrics/global_metrics_update_worker_spec.rb b/spec/workers/metrics/global_metrics_update_worker_spec.rb
deleted file mode 100644
index d5bfbcc928a..00000000000
--- a/spec/workers/metrics/global_metrics_update_worker_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Metrics::GlobalMetricsUpdateWorker, feature_category: :metrics do
- subject { described_class.new }
-
- describe '#perform' do
- let(:service) { ::Metrics::GlobalMetricsUpdateService.new }
-
- it 'delegates to ::Metrics::GlobalMetricsUpdateService' do
- expect(::Metrics::GlobalMetricsUpdateService).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
-
- subject.perform
- end
-
- context 'for an idempotent worker' do
- include_examples 'an idempotent worker' do
- it 'exports metrics' do
- allow(Gitlab).to receive(:maintenance_mode?).and_return(true).at_least(1).time
-
- perform_multiple
-
- expect(service.maintenance_mode_metric.get).to eq(1)
- end
- end
- end
- end
-end
diff --git a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
deleted file mode 100644
index 237b5081bb1..00000000000
--- a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform', unless: Gitlab.ee?, feature_category: :experimentation_activation do
- # Running this in EE would call the overridden method, which can't be tested in CE.
- # The EE code is covered in a separate EE spec.
-
- context 'when the in_product_marketing_emails_enabled setting is disabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: false)
- end
-
- it 'does not execute the email service' do
- expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
- end
-
- context 'when the in_product_marketing_emails_enabled setting is enabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: true)
- end
-
- it 'executes the email service' do
- expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
- end
-end
diff --git a/spec/workers/new_merge_request_worker_spec.rb b/spec/workers/new_merge_request_worker_spec.rb
index 58f6792f9a0..4ed9b61a9d7 100644
--- a/spec/workers/new_merge_request_worker_spec.rb
+++ b/spec/workers/new_merge_request_worker_spec.rb
@@ -88,33 +88,25 @@ RSpec.describe NewMergeRequestWorker, feature_category: :code_review_workflow do
worker.perform(merge_request.id, user.id)
end
- context 'when add_prepared_state_to_mr feature flag is off' do
+ context 'when the merge request is prepared' do
before do
- stub_feature_flags(add_prepared_state_to_mr: false)
+ merge_request.update!(prepared_at: Time.current)
end
- it 'calls the create service' do
- expect_next_instance_of(MergeRequests::AfterCreateService, project: merge_request.target_project, current_user: user) do |service|
- expect(service).to receive(:execute).with(merge_request)
- end
+ it 'does not call the create service' do
+ expect(MergeRequests::AfterCreateService).not_to receive(:new)
worker.perform(merge_request.id, user.id)
end
end
- context 'when add_prepared_state_to_mr feature flag is on' do
- before do
- stub_feature_flags(add_prepared_state_to_mr: true)
- end
-
- context 'when the merge request is not prepared' do
- it 'calls the create service' do
- expect_next_instance_of(MergeRequests::AfterCreateService, project: merge_request.target_project, current_user: user) do |service|
- expect(service).to receive(:execute).with(merge_request)
- end
-
- worker.perform(merge_request.id, user.id)
+ context 'when the merge request is not prepared' do
+ it 'calls the create service' do
+ expect_next_instance_of(MergeRequests::AfterCreateService, project: merge_request.target_project, current_user: user) do |service|
+ expect(service).to receive(:execute).with(merge_request).and_call_original
end
+
+ worker.perform(merge_request.id, user.id)
end
end
end
diff --git a/spec/workers/new_note_worker_spec.rb b/spec/workers/new_note_worker_spec.rb
index 651b5742854..3465cffea2d 100644
--- a/spec/workers/new_note_worker_spec.rb
+++ b/spec/workers/new_note_worker_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe NewNoteWorker, feature_category: :team_planning do
end
context 'when Note author has been deleted' do
- let_it_be(:note) { create(:note, author: User.ghost) }
+ let_it_be(:note) { create(:note, author: Users::Internal.ghost) }
it "does not call NotificationService" do
expect(NotificationService).not_to receive(:new)
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
deleted file mode 100644
index b3c81b25a93..00000000000
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ /dev/null
@@ -1,267 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::InvalidateDomainCacheWorker, feature_category: :pages do
- shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
- include AfterNextHelpers
-
- let(:event) { event_class.new(data: event_data) }
-
- subject { consume_event(subscriber: described_class, event: event) }
-
- it_behaves_like 'subscribes to event'
-
- it 'clears the cache with Gitlab::Pages::CacheControl' do
- caches.each do |cache|
- expect_next(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id])
- .to receive(:clear_cache)
- end
-
- subject
- end
- end
-
- context 'when a project have multiple domains' do
- include AfterNextHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:pages_domain) { create(:pages_domain, project: project) }
- let_it_be(:pages_domain2) { create(:pages_domain, project: project) }
-
- let(:event) do
- Pages::PageDeployedEvent.new(
- data: {
- project_id: project.id,
- namespace_id: project.namespace_id,
- root_namespace_id: project.root_ancestor.id
- }
- )
- end
-
- subject { consume_event(subscriber: described_class, event: event) }
-
- it 'clears the cache with Gitlab::Pages::CacheControl' do
- expect_next(Gitlab::Pages::CacheControl, type: :namespace, id: project.namespace_id)
- .to receive(:clear_cache)
- expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain.id)
- .to receive(:clear_cache)
- expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain2.id)
- .to receive(:clear_cache)
-
- subject
- end
- end
-
- it_behaves_like 'clears caches with',
- event_class: Pages::PageDeployedEvent,
- event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Pages::PageDeletedEvent,
- event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectDeletedEvent,
- event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectCreatedEvent,
- event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectArchivedEvent,
- event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectPathChangedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- old_path: 'old_path',
- new_path: 'new_path'
- },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectTransferedEvent,
- event_data: {
- project_id: 1,
- old_namespace_id: 2,
- old_root_namespace_id: 3,
- new_namespace_id: 4,
- new_root_namespace_id: 5
- },
- caches: [
- { type: :namespace, id: 3 },
- { type: :namespace, id: 5 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Groups::GroupTransferedEvent,
- event_data: {
- group_id: 1,
- old_root_namespace_id: 3,
- new_root_namespace_id: 5
- },
- caches: [
- { type: :namespace, id: 3 },
- { type: :namespace, id: 5 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Groups::GroupPathChangedEvent,
- event_data: {
- group_id: 1,
- root_namespace_id: 2,
- old_path: 'old_path',
- new_path: 'new_path'
- },
- caches: [
- { type: :namespace, id: 2 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: Groups::GroupDeletedEvent,
- event_data: {
- group_id: 1,
- root_namespace_id: 3
- },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: PagesDomains::PagesDomainDeletedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- domain_id: 4,
- domain: 'somedomain.com'
- },
- caches: [
- { type: :domain, id: 4 },
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: PagesDomains::PagesDomainUpdatedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- domain_id: 4,
- domain: 'somedomain.com'
- },
- caches: [
- { type: :domain, id: 4 },
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'clears caches with',
- event_class: PagesDomains::PagesDomainCreatedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- domain_id: 4,
- domain: 'somedomain.com'
- },
- caches: [
- { type: :domain, id: 4 },
- { type: :namespace, id: 3 }
- ]
-
- context 'when project attributes change' do
- Projects::ProjectAttributesChangedEvent::PAGES_RELATED_ATTRIBUTES.each do |attribute|
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectAttributesChangedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- domain_id: 4,
- attributes: [attribute]
- },
- caches: [
- { type: :domain, id: 4 },
- { type: :namespace, id: 3 }
- ]
- end
-
- it_behaves_like 'ignores the published event' do
- let(:event) do
- Projects::ProjectAttributesChangedEvent.new(
- data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- attributes: ['unknown']
- }
- )
- end
- end
- end
-
- context 'when project features change' do
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectFeaturesChangedEvent,
- event_data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- features: ['pages_access_level']
- },
- caches: [
- { type: :namespace, id: 3 }
- ]
-
- it_behaves_like 'ignores the published event' do
- let(:event) do
- Projects::ProjectFeaturesChangedEvent.new(
- data: {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- features: ['unknown']
- }
- )
- end
- end
- end
-
- context 'when namespace based cache keys are duplicated' do
- # de-dups namespace cache keys
- it_behaves_like 'clears caches with',
- event_class: Projects::ProjectTransferedEvent,
- event_data: {
- project_id: 1,
- old_namespace_id: 2,
- old_root_namespace_id: 5,
- new_namespace_id: 4,
- new_root_namespace_id: 5
- },
- caches: [
- { type: :namespace, id: 5 }
- ]
- end
-end
diff --git a/spec/workers/personal_access_tokens/expiring_worker_spec.rb b/spec/workers/personal_access_tokens/expiring_worker_spec.rb
index 01ce4e85fe2..0cc63fdb85e 100644
--- a/spec/workers/personal_access_tokens/expiring_worker_spec.rb
+++ b/spec/workers/personal_access_tokens/expiring_worker_spec.rb
@@ -58,5 +58,32 @@ RSpec.describe PersonalAccessTokens::ExpiringWorker, type: :worker, feature_cate
expect { worker.perform }.not_to change { pat.reload.expire_notification_delivered }
end
end
+
+ context 'when a token is owned by a project bot' do
+ let_it_be(:maintainer1) { create(:user) }
+ let_it_be(:maintainer2) { create(:user) }
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:expiring_token) { create(:personal_access_token, user: project_bot, expires_at: 5.days.from_now) }
+
+ before_all do
+ project.add_developer(project_bot)
+ project.add_maintainer(maintainer1)
+ project.add_maintainer(maintainer2)
+ end
+
+ it 'uses notification service to send the email' do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(:resource_access_tokens_about_to_expire)
+ .with(project_bot, match_array([expiring_token.name]))
+ end
+
+ worker.perform
+ end
+
+ it 'marks the notification as delivered' do
+ expect { worker.perform }.to change { expiring_token.reload.expire_notification_delivered }.from(false).to(true)
+ end
+ end
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 5c8a75aca3f..2e0a2535453 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -282,7 +282,7 @@ RSpec.describe PostReceive, feature_category: :source_code_management do
let(:user) { project.creator }
let(:label) { 'counts.source_code_pushes' }
let(:property) { 'source_code_pushes' }
- let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: label).to_h] }
+ let(:context) { [Gitlab::Usage::MetricDefinition.context_for(label).to_h] }
subject(:post_receive) { perform }
end
diff --git a/spec/workers/projects/record_target_platforms_worker_spec.rb b/spec/workers/projects/record_target_platforms_worker_spec.rb
index ecb6aab7349..116da404112 100644
--- a/spec/workers/projects/record_target_platforms_worker_spec.rb
+++ b/spec/workers/projects/record_target_platforms_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :groups_and_projects do
+RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :experimentation_activation do
include ExclusiveLeaseHelpers
let_it_be(:swift) { create(:programming_language, name: 'Swift') }
diff --git a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
index 38ea7c43267..fbbfd44bba6 100644
--- a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
+++ b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb
@@ -34,8 +34,8 @@ RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker, feature_category
it 'migrates issue to ghost user' do
subject
- expect(issue.reload.author).to eq(User.ghost)
- expect(issue.last_edited_by).to eq(User.ghost)
+ expect(issue.reload.author).to eq(Users::Internal.ghost)
+ expect(issue.last_edited_by).to eq(Users::Internal.ghost)
end
end
end
diff --git a/spec/workers/users/track_namespace_visits_worker_spec.rb b/spec/workers/users/track_namespace_visits_worker_spec.rb
new file mode 100644
index 00000000000..cfb2b7ab5eb
--- /dev/null
+++ b/spec/workers/users/track_namespace_visits_worker_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::TrackNamespaceVisitsWorker, feature_category: :navigation do
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when tracking a group' do
+ let_it_be(:entity) { create(:group) }
+ let_it_be(:entity_type) { 'groups' }
+ let_it_be(:worker) { described_class.new }
+ let_it_be(:model) { ::Users::GroupVisit }
+
+ it_behaves_like 'namespace visits tracking worker'
+ end
+
+ context 'when tracking a project' do
+ let_it_be(:entity) { create(:project) }
+ let_it_be(:entity_type) { 'projects' }
+ let_it_be(:worker) { described_class.new }
+ let_it_be(:model) { ::Users::ProjectVisit }
+
+ it_behaves_like 'namespace visits tracking worker'
+ end
+ end
+end